repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
jovahe/Mask_RCNN_RS | my/shp2json.py | <reponame>jovahe/Mask_RCNN_RS
# -*- coding: utf-8 -*-
from osgeo import ogr
import gdal
import sys
import os
import fire
def ChangeToJson(vector, output):
print("Starting........")
#打开矢量图层
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
gdal.SetConfigOption("SHAPE_ENCODING", "GBK")
shp_ds = ogr.Open(vector)
shp_lyr = shp_ds.GetLayer(0)
numFeatures = shp_lyr.GetFeatureCount()
print("Features number:{}".format(numFeatures))
# 获取范围
extent = shp_lyr.GetExtent()
print("Extent:", extent)
print("UL:", extent[0], extent[3])
print("LR:", extent[1], extent[2])
# 循环每个要素属性
for i in range(numFeatures):
feature = shp_lyr.GetNextFeature()
# 获取字段“id”的属性
# id = feature.GetField('type')
# 获取空间属性
# print(id)
geometry = feature.GetGeometryRef()
# x = geometry.GetX()
polygonextent = geometry.GetEnvelope()
print(geometry.GetEnvelope())
# print(y)
# y = geometry.GetY()
print("UL:", polygonextent[0], polygonextent[3])
print("LR:", polygonextent[1], polygonextent[2])
print("segmentation:", geometry)
# # 创建结果Geojson
# baseName = os.path.basename(output)
# out_driver = ogr.GetDriverByName('GeoJSON')
# out_ds = out_driver.CreateDataSource(output)
# if out_ds.GetLayer(baseName):
# out_ds.DeleteLayer(baseName)
# out_lyr = out_ds.CreateLayer(baseName, shp_lyr.GetSpatialRef())
# out_lyr.CreateFields(shp_lyr.schema)
# out_feat = ogr.Feature(out_lyr.GetLayerDefn())
#
# #生成结果文件
# for feature in shp_lyr:
# out_feat.SetGeometry(feature.geometry())
# for j in range(feature.GetFieldCount()):
# out_feat.SetField(j, feature.GetField(j))
# out_lyr.CreateFeature(out_feat)
#
# del out_ds
# del shp_ds
print("Success........")
def getPolygonEnvelope(vector):
driver = ogr.GetDriverByName('ESRI Shapefile')
ds = driver.Open(vector,0)
if ds is None:
print("Could not open {}".format(vector))
return -1
layer = ds.GetLayer()
numFeatures=layer.GetFeatureCount()
print("Features number:{}".format(numFeatures))
# 获取范围
extent = layer.GetExtent()
print("Extent:", extent)
print("UL:", extent[0], extent[3])
print("LR:", extent[1], extent[2])
#循环每个要素属性
for i in range(numFeatures):
feature = layer.GetNextFeature()
# 获取字段“id”的属性
# id = feature.GetField('type')
# 获取空间属性
# print(id)
geometry = feature.GetGeometryRef()
# x = geometry.GetX()
polygonextent = geometry.GetEnvelope()
print(geometry.GetEnvelope())
# print(y)
# y = geometry.GetY()
print("UL:", polygonextent[0], polygonextent[3])
print("LR:", polygonextent[1], polygonextent[2])
return 0
if __name__ == '__main__':
shapefile = '/home/omnisky/PycharmProjects/data/maskRcnn/mytest/onepolygon/262985539_1709e54576_z.shp'
out = '/home/omnisky/PycharmProjects/data/maskRcnn/mytest/onepolygon/262985539_1709e54576_z.json'
ChangeToJson(shapefile, out)
# getPolygonEnvelope(shapefile)
# fire.Fire() |
jovahe/Mask_RCNN_RS | my/shp2COCO.py | <filename>my/shp2COCO.py<gh_stars>0
import os,sys
import numpy as np
import glob
import json
import PIL.Image
from shapely.geometry import Polygon
from osgeo import ogr, gdal
gdal.UseExceptions()
from labelme import utils # necessary
def get_file(file_dir, file_type=[".jpg",'.png', '.tif', '.tiff','.img','.pix']):
im_type = ['.png']
# print("进入get_file")
if isinstance(file_type, str):
im_type = file_type
elif isinstance(file_type, list):
im_type = file_type
# print(im_type)
# print("input_dir={}".format(file_dir))
L = []
if not os.path.isdir(file_dir):
print("Error:input dir is not existed")
return "", 0
for root, dirs, files in os.walk(file_dir):
# print("\nfiles")
for file in files:
if (str.lower(os.path.splitext(file)[1]) in im_type):
L.append(os.path.join(root, file))
num = len(L)
if num == 0:
L = ""
return L, num
def get_img_from_shp(imgdir, shp_file):
if not os.path.isdir(imgdir):
if len(imgdir)==0:
imgdir = os.path.dirname(shpfile)
print("imdir is empty, now using the dir of shpfile:\n {}".format(imgdir))
else:
print("imgdir is illegal")
return -1
temp = ''
str = os.path.split(shp_file)[1]
str = str.split('.')[0]
files, nb = get_file(imgdir)
for file in files:
if not str in file:
continue
else:
temp = file
# if len(temp.strip())==0:
# # raise FError('No file in diretotry')
# return None
else:
return temp
class shp2coco(object):
def __init__(self, shpfile=[], save_json_path='./new.json', imgdir = ''):
'''
:param labelme_json: 所有labelme的json文件路径组成的列表
:param save_json_path: json保存位置
'''
self.shpfile = shpfile
self.imgdir = imgdir
self.save_json_path = save_json_path
self.images = []
# self.categories=[]
# self.categories = [{'supercategory': 'column', 'id': 1, 'name': 'column'}, {'supercategory': 'box', 'id': 2, 'name': 'box'}, {'supercategory': 'fruit', 'id': 3, 'name': 'fruit'}, {'supercategory': 'package', 'id': 4, 'name': 'package'}]
self.categories = [{'supercategory': 'A', 'id': 1, 'name': 'building'}]
self.annotations = []
# self.data_coco = {}
self.label = []
self.annID = 1
self.height = 0
self.width = 0
self.save_json()
def data_transfer(self):
for num, shp_file in enumerate(self.shpfile):
# get image info
ret =0
ret = get_img_from_shp(self.imgdir, shp_file)
if isinstance(ret,int) or len(ret) ==0:
print("Warning: can not find the corresponding image:\n {}".format(shp_file))
continue
print("image:", ret)
self.images.append(self.image(ret, num))
print(self.images)
# 打开矢量图层
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
gdal.SetConfigOption("SHAPE_ENCODING", "GBK")
fp = ogr.Open(shp_file)
shp_lyr = fp.GetLayer(0)
numFeatures = shp_lyr.GetFeatureCount()
# 循环每个要素属性
for i in range(numFeatures):
feature = shp_lyr.GetNextFeature()
# 获取字段“id”的属性
try:
label = feature.GetField('label')
except:
print("can not get the label")
label = 'ship'
print("the {}th feature is :",i+1, label)
if label == None:
print("Warning: feature is background")
continue
# 获取空间属性
geometry = feature.GetGeometryRef()
feat= geometry.GetGeometryRef(0)
points =[]
for i in range(feat.GetPointCount()-1):
x = abs(feat.GetX(point=i))
y = abs(feat.GetY(point=i))
points.append([x,y])
print(points)
self.annotations.append(self.annotation(points, label, num))
self.annID += 1
print(self.categories)
def image(self, file, num):
image = {}
# img = utils.img_b64_to_arr(data['imageData']) # 解析原图片数据
# img=io.imread(data['imagePath']) # 通过图片路径打开图片
# img = cv2.imread(data['imagePath'], 0)
# height, width = img.shape[:2]
try:
dataset = gdal.Open(file, gdal.GF_Read)
except:
print("Warning: can not open file:\n {}".format(file))
return -1
height = dataset.RasterYSize
width = dataset.RasterXSize
del dataset
img = None
image['height'] = height
image['width'] = width
image['id'] = num + 1
image['file_name'] = file.split('/')[-1]
self.height = height
self.width = width
return image
def annotation(self, points, label, num):
annotation = {}
annotation['segmentation'] = [list(np.asarray(points).flatten())]
poly = Polygon(points)
area_ = round(poly.area, 6)
annotation['area'] = area_
annotation['iscrowd'] = 0
annotation['image_id'] = num + 1
# annotation['bbox'] = str(self.getbbox(points)) # 使用list保存json文件时报错(不知道为什么)
# list(map(int,a[1:-1].split(','))) a=annotation['bbox'] 使用该方式转成list
annotation['bbox'] = list(map(float, self.getbbox(points)))
annotation['category_id'] = self.getcatid(label)
# print(label)
# annotation['category_id'] = len(self.label) + 1
# print(self.getcatid(label))
annotation['id'] = self.annID
return annotation
def getcatid(self, label):
for categorie in self.categories:
if label == categorie['name']:
return categorie['id']
return -1
def getbbox(self, points):
# img = np.zeros([self.height,self.width],np.uint8)
# cv2.polylines(img, [np.asarray(points)], True, 1, lineType=cv2.LINE_AA) # 画边界线
# cv2.fillPoly(img, [np.asarray(points)], 1) # 画多边形 内部像素值为1
polygons = points
mask = self.polygons_to_mask([self.height, self.width], polygons)
return self.mask2box(mask)
def mask2box(self, mask):
'''从mask反算出其边框
mask:[h,w] 0、1组成的图片
1对应对象,只需计算1对应的行列号(左上角行列号,右下角行列号,就可以算出其边框)
'''
# np.where(mask==1)
index = np.argwhere(mask == 1)
rows = index[:, 0]
clos = index[:, 1]
# 解析左上角行列号
left_top_r = np.min(rows) # y
left_top_c = np.min(clos) # x
# 解析右下角行列号
right_bottom_r = np.max(rows)
right_bottom_c = np.max(clos)
# return [(left_top_r,left_top_c),(right_bottom_r,right_bottom_c)]
# return [(left_top_c, left_top_r), (right_bottom_c, right_bottom_r)]
# return [left_top_c, left_top_r, right_bottom_c, right_bottom_r] # [x1,y1,x2,y2]
return [left_top_c, left_top_r, right_bottom_c - left_top_c,
right_bottom_r - left_top_r] # [x1,y1,w,h] 对应COCO的bbox格式
def polygons_to_mask(self, img_shape, polygons):
mask = np.zeros(img_shape, dtype=np.uint8)
mask = PIL.Image.fromarray(mask)
xy = list(map(tuple, polygons))
PIL.ImageDraw.Draw(mask).polygon(xy=xy, outline=1, fill=1)
mask = np.array(mask, dtype=bool)
return mask
def data2coco(self):
data_coco = {}
data_coco['images'] = self.images
data_coco['categories'] = self.categories
data_coco['annotations'] = self.annotations
return data_coco
def save_json(self):
self.data_transfer()
self.data_coco = self.data2coco()
# 保存json文件
json.dump(self.data_coco, open(self.save_json_path, 'w'), indent=4) # indent=4 更加美观显示
if __name__=='__main__':
shp_path = "/home/omnisky/PycharmProjects/data/maskRcnn/cocotest/whubuilding/ori_from_win10/test/label_pixel_unit/"
image_path = "/home/omnisky/PycharmProjects/data/maskRcnn/cocotest/whubuilding/test/"
shpfile = glob.glob(shp_path+'/*.shp')
shp2coco(shpfile,
'/home/omnisky/PycharmProjects/data/maskRcnn/cocotest/whubuilding/annotations/instances_test.json',
image_path)
|
jovahe/Mask_RCNN_RS | my/shp2mask.py | # -*- coding: utf-8 -*-
from osgeo import gdal, ogr,osr
from osgeo.gdalconst import *
import numpy as np
import os
from os import path
import cv2
import matplotlib.pyplot as plt
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8","NO")
gdal.AllRegister() #注册驱动
ogr.RegisterAll()
img_path="/home/omnisky/PycharmProjects/data/maskRcnn/cocotest/whubuilding/ori_from_win10/val/image/val_123.tif"
# img_path=input('输入影像路径:')
srcDS=gdal.Open(img_path,GA_ReadOnly)# 只读方式打开影像
geoTrans = srcDS.GetGeoTransform() # 获取地理参考6参数
srcPro=srcDS.GetProjection() # 获取坐标引用
srcXSize=srcDS.RasterXSize # 宽度
srcYSize=srcDS.RasterYSize # 高度
nbands=srcDS.RasterCount # 波段数
vector_fn_sample="/home/omnisky/PycharmProjects/data/maskRcnn/cocotest/whubuilding/ori_from_win10/val/label_pixel_unit/val_123.shp"
raster_fn_sample ="/home/omnisky/PycharmProjects/data/maskRcnn/mytest/04197314_mask.tif" # 存放掩膜影像
if os.path.exists(raster_fn_sample):
gdal.GetDriverByName('GTiff').Delete(raster_fn_sample)# 删除掉样本提取掩膜
source_ds = ogr.Open(vector_fn_sample) # 打开矢量文件
source_layer = source_ds.GetLayer() # 获取图层 (包含图层中所有特征 所有几何体)
mark_ds = gdal.GetDriverByName('GTiff').Create(raster_fn_sample, srcXSize, srcYSize, 1, gdal.GDT_Byte) # 1表示1个波段,按原始影像大小生成 掩膜
mark_ds.SetGeoTransform(geoTrans) # 设置掩膜的地理参考
mark_ds.SetProjection(srcPro) # 设置掩膜坐标引用
band = mark_ds.GetRasterBand(1) # 获取第一个波段(影像只有一个波段)
band.SetNoDataValue(0) # 将这个波段的值全设置为0
# Rasterize 矢量栅格化
gdal.RasterizeLayer(mark_ds, [1], source_layer, burn_values=[125]) # 几何体内的值全设置为1
mark_ds.FlushCache() # 将数据写入文件
img=srcDS.GetRasterBand(1).ReadAsArray(0, 0, srcXSize, srcYSize, srcXSize, srcYSize)
mask = mark_ds.GetRasterBand(1).ReadAsArray(0, 0, srcXSize, srcYSize, srcXSize, srcYSize)
plt.subplot(121)
plt.imshow(img,'gray')
plt.subplot(122)
plt.imshow(mask,'gray')
plt.show()
srcDS=None
mark_ds=None
source_ds=None |
jovahe/Mask_RCNN_RS | my/tools/geo_convert.py | """
将mask rcnn转成shapefile的一些函数
"""
from osgeo import ogr, gdal, osr
import cv2
import numpy as np
def create_geom():
"""用于测试,生成一个wkt"""
multipolygon = ogr.Geometry(ogr.wkbMultiPolygon)
polygon = ogr.Geometry(ogr.wkbPolygon)
ring = ogr.Geometry(ogr.wkbLinearRing)
ring.AddPoint(1179091.1646903288, 712782.8838459781)
ring.AddPoint(1161053.0218226474, 667456.2684348812)
ring.AddPoint(1218405.0658121984, 721108.1805541387)
ring.AddPoint(1179091.1646903288, 712782.8838459781)
polygon.AddGeometry(ring)
multipolygon.AddGeometry(polygon)
polygon1 = ogr.Geometry(ogr.wkbPolygon)
ring1 = ogr.Geometry(ogr.wkbLinearRing)
ring1.AddPoint(1214704.933941905, 641092.8288590391)
ring1.AddPoint(1228580.428455506, 682719.3123998424)
ring1.AddPoint(1218405.0658121984, 721108.1805541387)
ring1.AddPoint(1214704.933941905, 641092.8288590391)
polygon1.AddGeometry(ring1)
multipolygon.AddGeometry(polygon1)
return multipolygon
def reference_of_tiff(input_tiff_path):
"""
获取给定tiff图像的地理坐标系和投影坐标系
:param input_tiff_path: tiff图像的路径
:return: 投影坐标系,地理坐标系
"""
tiff_data = gdal.Open(input_tiff_path)
prosrs = osr.SpatialReference()
prosrs.ImportFromWkt(tiff_data.GetProjection())
geosrs = prosrs.CloneGeogCS()
return prosrs, geosrs
def convert_geom_to_shp(input_geo, outputfile_name='untitled.shp', geo_type='multipolygon', spatialref=None):
"""
仅支持multipolygon类型转shapefile
:param spatialref: 空间参考坐标系,osr.SpatialReference()类型
:param outputfile_name: 输出文件的名字
:param input_geo: wkt格式的polygon
:return:
"""
if spatialref is None:
spatialref = osr.SpatialReference()
spatialref.ImportFromEPSG(4326)
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
gdal.SetConfigOption("SHAPE_ENCODING", "GBK")
driver = ogr.GetDriverByName("ESRI Shapefile")
if geo_type == 'multipolygon':
geometry_count = input_geo.GetGeometryCount()
output_shpfile = driver.CreateDataSource(outputfile_name)
dstlayer = output_shpfile.CreateLayer("layer", spatialref, geom_type=ogr.wkbMultiPolygon)
for i in range(geometry_count):
polygon = input_geo.GetGeometryRef(i)
feature = ogr.Feature(dstlayer.GetLayerDefn())
feature.SetGeometry(polygon)
dstlayer.CreateFeature(feature)
# feature.Destroy()
# output_shpfile.Destroy()
def convert_xy_from_img_to_geo(x, y, reference=None):
"""
将图上坐标转成地理坐标或者投影坐标
:param x: 计算机中的x,应该是列
:param y: 计算机中的y,应该是行
:param reference: GDAL的六参数,可以通过dataset.GetGeoTransform()获取,
这里的dataset = gdal.Open(file_name) file_name表示tif格式图片的路径,带坐标的tiff格式
:return:
"""
row = y
col = x
px = reference[0] + col * reference[1] + row * reference[2]
py = reference[3] + col * reference[4] + row * reference[5]
return px, py
def create_geom_from_rcnnmask(masks, reference=None):
num_masks = masks.shape[-1]
multipolygon = ogr.Geometry(ogr.wkbMultiPolygon)
polygon = ogr.Geometry(ogr.wkbPolygon)
ring = ogr.Geometry(ogr.wkbLinearRing)
for i_mask in range(num_masks):
mask = 255 * masks[:, :, i_mask]
gray = mask.astype(np.uint8)
ret, binary = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY)
_, contours, hierarchy = cv2.findContours(binary, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for index in range(0, len(contours)):
polygon.Empty()
ring.Empty()
contour = contours[index]
for i in range(contour.shape[0]):
x, y = 1.0 * contour[i, 0, 0], 1.0 * contour[i, 0, 1]
x, y = convert_xy_from_img_to_geo(x, y, reference)
ring.AddPoint(x, y)
ring.CloseRings()
if ring.GetPointCount() > 4: # 这个消除了一个bug
polygon.AddGeometry(ring)
multipolygon.AddGeometry(polygon)
return multipolygon
"""if __name__ == '__main__':
poly = create_geom()
print(poly.ExportToWkt())
print(poly.GetGeometryCount())
print(poly.GetGeometryRef(1))
tiff_path = 'D:/codes/python/RS_Test/test.tif'
prosrs, geosrs = reference_of_tiff(tiff_path)
convert_geom_to_shp(poly, spatialref=geosrs)
"""
|
Newlance/ha-nicehash | custom_components/nicehash/payout_sensors.py | """
NiceHash Rig Payout Sensors
"""
from datetime import datetime
import logging
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from .const import (
CURRENCY_BTC,
DEFAULT_NAME,
FORMAT_DATETIME,
ICON_CURRENCY_BTC,
ICON_PULSE,
ICON_THERMOMETER,
NICEHASH_ATTRIBUTION,
PAYOUT_USER,
)
from .coordinators import (
MiningPayoutsDataUpdateCoordinator,
MiningRigsDataUpdateCoordinator,
)
from .nicehash import MiningRig, Payout
_LOGGER = logging.getLogger(__name__)
class RecentMiningPayoutSensor(Entity):
"""
Displays most recent mining payout
"""
def __init__(
self, coordinator: MiningPayoutsDataUpdateCoordinator, organization_id: str
):
"""Initialize the sensor"""
self.coordinator = coordinator
self.organization_id = organization_id
self._id = None
self._created = None
self._currency = None
self._amount = 0.00
self._fee = 0.00
@property
def name(self):
"""Sensor name"""
return f"{DEFAULT_NAME} Recent Mining Payout"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self.organization_id}:payouts:recent"
@property
def should_poll(self):
"""No need to poll, Coordinator notifies entity of updates"""
return False
@property
def available(self):
"""Whether sensor is available"""
return self.coordinator.last_update_success
@property
def state(self):
"""Sensor state"""
try:
for raw_payout in self.coordinator.data:
payout = Payout(raw_payout)
if payout.account_type == PAYOUT_USER:
self._id = payout.id
self._amount = payout.amount
self._currency = payout.currency
self._created = datetime.fromtimestamp(payout.created / 1000.0)
self._fee = payout.fee
except Exception as e:
_LOGGER.error(f"Unable to get most recent \n{e}")
self._id = None
self._created = None
self._currency = None
self._amount = 0.00
self._fee = 0.00
return self._amount - self._fee
@property
def icon(self):
"""Sensor icon"""
return ICON_CURRENCY_BTC
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return CURRENCY_BTC
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
created = None
if self._created:
created = self._created.strftime(FORMAT_DATETIME)
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"amount": self._amount,
"created": created,
"fee": self._fee,
}
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications"""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Update entity"""
await self.coordinator.async_request_refresh()
|
Newlance/ha-nicehash | custom_components/nicehash/switch.py | """
Sensor platform for NiceHash
"""
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import Config, HomeAssistant
from .const import (
BALANCE_TYPE_AVAILABLE,
BALANCE_TYPE_PENDING,
BALANCE_TYPE_TOTAL,
CURRENCY_BTC,
CURRENCY_EUR,
CURRENCY_USD,
DOMAIN,
DEVICE_LOAD,
DEVICE_RPM,
DEVICE_SPEED_RATE,
DEVICE_SPEED_ALGORITHM,
)
from .nicehash import (
MiningRig,
MiningRigDevice,
NiceHashPrivateClient,
NiceHashPublicClient,
)
from .control_switches import (
GPUSwitch
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass: HomeAssistant, config: Config, async_add_entities, discovery_info=None
):
"""Setup NiceHash sensor platform"""
_LOGGER.debug("Creating new NiceHash switch components")
data = hass.data[DOMAIN]
# Configuration
organization_id = data.get("organization_id")
client = data.get("client")
# Options
currency = data.get("currency")
balances_enabled = data.get("balances_enabled")
payouts_enabled = data.get("payouts_enabled")
rigs_enabled = data.get("rigs_enabled")
devices_enabled = data.get("devices_enabled")
# Mining rig and device sensors
if rigs_enabled or devices_enabled:
rigs_coordinator = data.get("rigs_coordinator")
rig_data = await client.get_mining_rigs()
mining_rigs = rig_data.get("miningRigs")
_LOGGER.debug(f"Found {len(mining_rigs)} rigs")
if devices_enabled:
_LOGGER.debug("Device sensors enabled")
device_switches = create_device_switches(mining_rigs, rigs_coordinator,client)
async_add_entities(device_switches, True)
def create_device_switches(mining_rigs, coordinator, client):
device_switches = []
for rig_data in mining_rigs:
rig = MiningRig(rig_data)
devices = rig.devices.values()
_LOGGER.debug(
f"Found {len(devices)} device switches(s) for {rig.name} ({rig.id})"
)
for device in devices:
_LOGGER.debug(f"Creating {device.name} ({device.id}) switches")
device_switches.append(GPUSwitch(coordinator, rig, device, client))
return device_switches
|
Newlance/ha-nicehash | custom_components/nicehash/control_switches.py | """
NiceHash Rig controls
"""
from datetime import datetime
import logging
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from homeassistant.components.switch import SwitchEntity
from .coordinators import MiningRigsDataUpdateCoordinator
from .nicehash import MiningRig, MiningRigDevice, NiceHashPrivateClient
from .const import DOMAIN, NICEHASH_ATTRIBUTION
import asyncio
class DeviceSwitch(SwitchEntity):
def __init__(
self,
coordinator: MiningRigsDataUpdateCoordinator,
rig: MiningRig,
device: MiningRigDevice,
client: NiceHashPrivateClient,
):
"""Initialize the switch"""
self.coordinator = coordinator
self._rig_id = rig.id
self._rig_name = rig.name
self._device_id = device.id
self._device_name = device.name
self._client = client
self._status = device.status
def _get_device(self):
try:
mining_rigs = self.coordinator.data.get("miningRigs")
rig = MiningRig(mining_rigs.get(self._rig_id))
return rig.devices.get(self._device_id)
except Exception as e:
_LOGGER.error(f"Unable to get mining device ({self._device_id})\n{e}")
class GPUSwitch(DeviceSwitch):
_is_on = False
_last_response = "N/A"
@property
def name(self):
"""switch name"""
return f"{self._device_name} Switch"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:switch"
@property
def is_on(self):
device = self._get_device()
if device.status == "Mining":
self._is_on = True
elif device:
self._is_on = False
else:
self._is_on = "unavailable"
return self._is_on
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"status": self._status,
"device": self._device_name,
"last_response": self._last_response,
}
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._is_on = True
response = asyncio.run(self._client.toggle_device(self._device_id, "START", self._rig_id))
self._last_response = "Success!" if response["success"] else response["message"]
def turn_off(self, **kwargs):
"""Turn the switch off."""
self._is_on = False
response = asyncio.run(self._client.toggle_device(self._device_id, "STOP", self._rig_id))
self._last_response = "Success!" if response["success"] else response["message"] |
Newlance/ha-nicehash | custom_components/nicehash/account_sensors.py | """
NiceHash Account Sensors
"""
import logging
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from .const import (
BALANCE_TYPE_AVAILABLE,
BALANCE_TYPE_PENDING,
BALANCE_TYPE_TOTAL,
CURRENCY_BTC,
CURRENCY_EUR,
CURRENCY_USD,
DEFAULT_NAME,
ICON_CURRENCY_BTC,
ICON_CURRENCY_EUR,
ICON_CURRENCY_USD,
NICEHASH_ATTRIBUTION,
)
from .coordinators import AccountsDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
class BalanceSensor(Entity):
"""
Displays [available|pending|total] balance of an account for a currency
"""
def __init__(
self,
coordinator: AccountsDataUpdateCoordinator,
organization_id: str,
currency: str,
balance_type=BALANCE_TYPE_AVAILABLE,
):
"""Initialize the sensor"""
self.coordinator = coordinator
self.currency = currency
self.organization_id = organization_id
self.balance_type = balance_type
self._available = 0.00
self._pending = 0.00
self._total_balance = 0.00
self._exchange_rate = 0.00
@property
def name(self):
"""Sensor name"""
balance_type = self.balance_type[0].upper() + self.balance_type[1:]
return f"{DEFAULT_NAME} {balance_type} Account Balance {self.currency}"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self.organization_id}:{self.currency}:{self.balance_type}"
@property
def should_poll(self):
"""No need to poll, Coordinator notifies entity of updates"""
return False
@property
def available(self):
"""Whether sensor is available"""
return self.coordinator.last_update_success
@property
def state(self):
"""Sensor state"""
accounts = self.coordinator.data.get("accounts")
total = accounts.get("total")
pending = float(total.get("pending"))
available = float(total.get("available"))
total_balance = float(total.get("totalBalance"))
if self.currency == CURRENCY_BTC:
self._pending = pending
self._available = available
self._total_balance = total_balance
else:
exchange_rates = self.coordinator.data.get("exchange_rates")
exchange_rate = exchange_rates.get(f"{CURRENCY_BTC}-{self.currency}")
self._pending = round(pending * exchange_rate, 2)
self._available = round(available * exchange_rate, 2)
self._total_balance = round(total_balance * exchange_rate, 2)
self._exchange_rate = exchange_rate
if self.balance_type == BALANCE_TYPE_TOTAL:
return self._total_balance
elif self.balance_type == BALANCE_TYPE_PENDING:
return self._pending
return self._available
@property
def icon(self):
"""Sensor icon"""
if self.currency == CURRENCY_EUR:
return ICON_CURRENCY_EUR
elif self.currency == CURRENCY_USD:
return ICON_CURRENCY_USD
return ICON_CURRENCY_BTC
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return self.currency
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"total": self._total_balance,
"available": self._available,
"pending": self._pending,
"exchange_rate": self._exchange_rate,
}
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications"""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Update entity"""
await self.coordinator.async_request_refresh()
|
Newlance/ha-nicehash | custom_components/nicehash/coordinators.py | <filename>custom_components/nicehash/coordinators.py
"""
NiceHash Data Update Coordinators
"""
from datetime import timedelta
import logging
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import (
DataUpdateCoordinator,
UpdateFailed,
)
from .const import (
CURRENCY_BTC,
DOMAIN,
)
from .nicehash import NiceHashPrivateClient, NiceHashPublicClient
SCAN_INTERVAL_RIGS = timedelta(minutes=1)
SCAN_INTERVAL_ACCOUNTS = timedelta(minutes=60)
SCAN_INTERVAL_PAYOUTS = timedelta(minutes=60)
_LOGGER = logging.getLogger(__name__)
class AccountsDataUpdateCoordinator(DataUpdateCoordinator):
"""Manages fetching accounts data from NiceHash API"""
def __init__(self, hass: HomeAssistant, client: NiceHashPrivateClient):
"""Initialize"""
self.name = f"{DOMAIN}_accounts_coordinator"
self._client = client
super().__init__(
hass, _LOGGER, name=self.name, update_interval=SCAN_INTERVAL_ACCOUNTS
)
async def _async_update_data(self):
"""Update accounts data and exchange rates"""
try:
accounts = await self._client.get_accounts()
exchange_rates = await NiceHashPublicClient().get_exchange_rates()
rates_dict = dict()
for rate in exchange_rates:
fromCurrency = rate.get("fromCurrency")
# Only care about the Bitcoin exchange rates
if fromCurrency == CURRENCY_BTC:
toCurrency = rate.get("toCurrency")
exchange_rate = float(rate.get("exchangeRate"))
rates_dict[f"{fromCurrency}-{toCurrency}"] = exchange_rate
return {
"accounts": accounts,
"exchange_rates": rates_dict,
}
except Exception as e:
raise UpdateFailed(e)
class MiningRigsDataUpdateCoordinator(DataUpdateCoordinator):
"""Manages fetching mining rigs data from NiceHash API"""
def __init__(self, hass: HomeAssistant, client: NiceHashPrivateClient):
"""Initialize"""
self.name = f"{DOMAIN}_mining_rigs_coordinator"
self._client = client
super().__init__(
hass, _LOGGER, name=self.name, update_interval=SCAN_INTERVAL_RIGS
)
async def _async_update_data(self):
"""Update mining rigs data"""
try:
data = await self._client.get_mining_rigs()
mining_rigs = data.get("miningRigs")
rigs_dict = dict()
for rig in mining_rigs:
rig_id = rig.get("rigId")
rigs_dict[f"{rig_id}"] = rig
data["miningRigs"] = rigs_dict
return data
except Exception as e:
raise UpdateFailed(e)
class MiningPayoutsDataUpdateCoordinator(DataUpdateCoordinator):
"""Manages fetching mining rig payout data from NiceHash API"""
def __init__(self, hass: HomeAssistant, client: NiceHashPrivateClient):
"""Initialize"""
self.name = f"{DOMAIN}_mining_payouts_coordinator"
self._client = client
super().__init__(
hass, _LOGGER, name=self.name, update_interval=SCAN_INTERVAL_PAYOUTS
)
async def _async_update_data(self):
"""Update mining payouts data"""
try:
data = await self._client.get_rig_payouts(42) # 6 (per day) * 7 days
payouts = data.get("list")
payouts.sort(key=lambda payout: payout.get("created"))
return payouts
except Exception as e:
raise UpdateFailed(e)
|
Newlance/ha-nicehash | custom_components/nicehash/device_sensors.py | """
NiceHash Rig Device Sensors
"""
from datetime import datetime
import logging
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from .const import (
DEVICE_STATUS_UNKNOWN,
DEVICE_LOAD,
DEVICE_RPM,
DEVICE_SPEED_ALGORITHM,
DEVICE_SPEED_RATE,
ICON_PICKAXE,
ICON_PULSE,
ICON_THERMOMETER,
ICON_SPEEDOMETER,
NICEHASH_ATTRIBUTION,
)
from .coordinators import MiningRigsDataUpdateCoordinator
from .nicehash import MiningRig, MiningRigDevice
_LOGGER = logging.getLogger(__name__)
class DeviceSensor(Entity):
"""
Mining rig device sensor
"""
def __init__(
self,
coordinator: MiningRigsDataUpdateCoordinator,
rig: MiningRig,
device: MiningRigDevice,
):
"""Initialize the sensor"""
self.coordinator = coordinator
self._rig_id = rig.id
self._rig_name = rig.name
self._device_id = device.id
self._device_name = device.name
@property
def name(self):
"""Sensor name"""
return f"{self._device_name}"
@property
def should_poll(self):
"""No need to poll, Coordinator notifies entity of updates"""
return False
@property
def available(self):
"""Whether sensor is available"""
return self.coordinator.last_update_success
@property
def icon(self):
"""Sensor icon"""
return ICON_PICKAXE
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return None
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications"""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Update entity"""
await self.coordinator.async_request_refresh()
def _get_device(self):
try:
mining_rigs = self.coordinator.data.get("miningRigs")
rig = MiningRig(mining_rigs.get(self._rig_id))
return rig.devices.get(self._device_id)
except Exception as e:
_LOGGER.error(f"Unable to get mining device ({self._device_id})\n{e}")
class DeviceStatusSensor(DeviceSensor):
"""
Displays status of a mining rig device
"""
_status = "Unknown"
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Status"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:status"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._status = device.status
else:
self._status = "Unknown"
return self._status
@property
def icon(self):
"""Sensor icon"""
return ICON_PULSE
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"status": self._status,
"rig": self._rig_name,
}
class DeviceSpeedSensor(DeviceSensor):
"""
Displays speed of a mining rig device
"""
_algorithm = None
_speed = 0.00
_speed_unit = "MH"
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Speed"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:speed"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device and len(device.speeds) > 0:
algorithm = device.speeds[0]
self._algorithm = algorithm.get("title")
self._speed = algorithm.get("speed")
self._speed_unit = algorithm.get("displaySuffix")
else:
self._algorithm = "Unknown"
self._speed = 0.00
self._speed_unit = "MH"
return self._speed
@property
def icon(self):
"""Sensor icon"""
return ICON_SPEEDOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return f"{self._speed_unit}/s"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"algorithm": self._algorithm,
"speed": self._speed,
"speed_unit": self._speed_unit,
"rig": self._rig_name,
}
class DeviceAlgorithmSensor(DeviceSensor):
"""
Displays algorithm of a mining rig device
"""
_algorithm = None
_speed = 0.00
_speed_unit = "MH"
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Algorithm"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:algorithm"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device and len(device.speeds) > 0:
algorithm = device.speeds[0]
self._algorithm = algorithm.get("title")
self._speed = algorithm.get("speed")
self._speed_unit = algorithm.get("displaySuffix")
else:
self._algorithm = "Unknown"
self._speed = 0.00
self._speed_unit = "MH"
return self._algorithm
@property
def icon(self):
"""Sensor icon"""
return ICON_PICKAXE
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"algorithm": self._algorithm,
"speed": self._speed,
"speed_unit": self._speed_unit,
"rig": self._rig_name,
}
class DeviceTemperatureSensor(DeviceSensor):
"""
Displays temperature of a mining rig device
"""
_temperature = 0
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Temperature"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:temperature"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._temperature = device.temperature
else:
self._temperature = 0
return self._temperature
@property
def icon(self):
"""Sensor icon"""
return ICON_THERMOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
# Not Celsius because then HA might convert to Fahrenheit
return "C"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"temperature": self._temperature,
"rig": self._rig_name,
}
class DeviceLoadSensor(DeviceSensor):
"""
Displays load of a mining rig device
"""
_load = 0
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Load"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:load"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._load = device.load
else:
self._load = 0
return self._load
@property
def icon(self):
"""Sensor icon"""
return ICON_SPEEDOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return "%"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"load": self._load,
"rig": self._rig_name,
}
class DeviceRPMSensor(DeviceSensor):
"""
Displays RPM of a mining rig device
"""
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} RPM"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:rpm"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._rpm = device.rpm
else:
self._rpm = 0
return self._rpm
@property
def icon(self):
"""Sensor icon"""
return ICON_SPEEDOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return "RPM"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"rpm": self._rpm,
"rig": self._rig_name,
}
|
Newlance/ha-nicehash | custom_components/nicehash/rig_sensors.py | <reponame>Newlance/ha-nicehash
"""
NiceHash Rig Sensors
"""
from datetime import datetime
import logging
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from .const import (
CURRENCY_BTC,
DEVICE_STATUS_UNKNOWN,
FORMAT_DATETIME,
ICON_CURRENCY_BTC,
ICON_EXCAVATOR,
ICON_PULSE,
ICON_PICKAXE,
ICON_SPEEDOMETER,
ICON_THERMOMETER,
NICEHASH_ATTRIBUTION,
)
from .coordinators import MiningRigsDataUpdateCoordinator
from .nicehash import MiningRig
_LOGGER = logging.getLogger(__name__)
class RigSensor(Entity):
"""
Mining rig sensor
"""
def __init__(self, coordinator: MiningRigsDataUpdateCoordinator, rig: MiningRig):
"""Initialize the sensor"""
self.coordinator = coordinator
self._rig_id = rig.id
self._rig_name = rig.name
@property
def name(self):
"""Sensor name"""
return self._rig_name
@property
def icon(self):
"""Sensor icon"""
return ICON_EXCAVATOR
@property
def should_poll(self):
"""No need to poll, Coordinator notifies entity of updates"""
return False
@property
def available(self):
"""Whether sensor is available"""
return self.coordinator.last_update_success
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications"""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Update entity"""
await self.coordinator.async_request_refresh()
def _get_rig(self):
try:
mining_rigs = self.coordinator.data.get("miningRigs")
return MiningRig(mining_rigs.get(self._rig_id))
except Exception as e:
_LOGGER.error(f"Unable to get mining rig ({self._rig_id})\n{e}")
class RigHighTemperatureSensor(RigSensor):
"""
Displays highest temperature of active mining rig devices
"""
_temps = []
_num_devices = 0
_highest_temp = 0
@property
def name(self):
"""Sensor name"""
return f"{self._rig_name} Temperature"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._rig_id}:high_temperature"
@property
def state(self):
"""Sensor state"""
self._highest_temp = 0
rig = self._get_rig()
if rig:
self._num_devices = rig.num_devices
self._temps = []
for device in rig.devices.values():
if device.temperature > -1:
self._temps.append(device.temperature)
if len(self._temps) > 0:
self._highest_temp = max(self._temps)
return self._highest_temp
@property
def icon(self):
"""Sensor icon"""
return ICON_THERMOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
# Not Celsius because then HA might convert to Fahrenheit
return "C"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"highest_temperature": self._highest_temp,
"temperatures": self._temps,
"total_devices": self._num_devices,
}
class RigLowTemperatureSensor(RigSensor):
"""
Displays lowest temperature of active mining rig devices
"""
_temps = []
_num_devices = 0
_lowest_temp = 0
@property
def name(self):
"""Sensor name"""
return f"{self._rig_name} Low Temperature"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._rig_id}:low_temperature"
@property
def state(self):
"""Sensor state"""
self._lowest_temp = 0
rig = self._get_rig()
if rig:
self._num_devices = rig.num_devices
self._temps = []
for device in rig.devices.values():
if device.temperature > -1:
self._temps.append(device.temperature)
if len(self._temps) > 0:
self._lowest_temp = min(self._temps)
return self._lowest_temp
@property
def icon(self):
"""Sensor icon"""
return ICON_THERMOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
# Not Celsius because then HA might convert to Fahrenheit
return "C"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"lowest_temperature": self._lowest_temp,
"temperatures": self._temps,
"total_devices": self._num_devices,
}
class RigStatusSensor(RigSensor):
"""
Displays status of a mining rig
"""
_status = DEVICE_STATUS_UNKNOWN
_status_time = None
@property
def name(self):
"""Sensor name"""
return f"{self._rig_name} Status"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._rig_id}:status"
@property
def state(self):
"""Sensor state"""
rig = self._get_rig()
if rig:
status = rig.status
self._status_time = datetime.fromtimestamp(rig.status_time / 1000.0)
else:
status = DEVICE_STATUS_UNKNOWN
self._status_time = None
status = DEVICE_STATUS_UNKNOWN
self._status = status[0].upper() + status.lower()[1:]
return self._status
@property
def icon(self):
"""Sensor icon"""
return ICON_PULSE
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return None
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
status_time = None
if self._status_time:
status_time = self._status_time.strftime(FORMAT_DATETIME)
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"status": self._status,
"status_time": status_time,
}
class RigProfitabilitySensor(RigSensor):
"""
Displays profitability of a mining rig
"""
_profitability = 0
_unpaid_amount = 0
@property
def name(self):
"""Sensor name"""
return f"{self._rig_name} Profitability"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._rig_id}:profitability"
@property
def state(self):
"""Sensor state"""
rig = self._get_rig()
if rig:
self._profitability = rig.profitability
self._unpaid_amount = rig.unpaid_amount
else:
self._profitability = 0
self._unpaid_amount = 0
return self._profitability
@property
def icon(self):
"""Sensor icon"""
return ICON_CURRENCY_BTC
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return CURRENCY_BTC
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"profitability": self._profitability,
"unpaid_amount": self._unpaid_amount,
}
class RigAlgorithmSensor(RigSensor):
"""
Displays primary algorithm of a mining rig
"""
_algorithms = []
@property
def name(self):
"""Sensor name"""
return f"{self._rig_name} Algorithm"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._rig_id}:algorithm"
@property
def state(self):
"""Sensor state"""
rig = self._get_rig()
if rig:
algorithms = rig.get_algorithms()
self._algorithms = [*algorithms.keys()]
if len(self._algorithms) > 0:
return ", ".join(self._algorithms)
return "Unknown"
@property
def icon(self):
"""Sensor icon"""
return ICON_PICKAXE
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION, "algorithms": self._algorithms}
class RigSpeedSensor(RigSensor):
"""
Displays rig's highest algorithm speed of active mining rig devices
"""
_algorithm = "Unknown"
_speed = 0
_unit = "MH/s"
@property
def name(self):
"""Sensor name"""
return f"{self._rig_name} Speed"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._rig_id}:speed"
@property
def state(self):
"""Sensor state"""
self._speed = 0
rig = self._get_rig()
if rig:
algorithms = rig.get_algorithms()
for key in algorithms.keys():
algo = algorithms.get(key)
if algo.speed > self._speed:
self._algorithm = algo.name
self._speed = algo.speed
self._unit = algo.unit
return self._speed
@property
def icon(self):
"""Sensor icon"""
return ICON_SPEEDOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return None
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"algorithm": self._algorithm,
"speed": self._speed,
"unit": self._unit,
}
|
JustasGau/DonjinKrawler | install-hooks.py | <reponame>JustasGau/DonjinKrawler
import sys
from os import path
import urllib; from urllib.request import urlretrieve
from subprocess import call
def install_hooks(directory):
checkstyleUrl = 'https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.36.1/checkstyle-8.36.1-all.jar'
preCommitUrl = 'https://gist.githubusercontent.com/EdotJ/d512826d5b4fd3e6cdc285b9236511b2/raw/43e5087ed173fd03aab640b0b3db22f11319c623/pre-commit'
checkstyleName = checkstyleUrl.split('/')[len(checkstyleUrl.split('/')) - 1]
basePath = path.abspath(directory)
print("Downloading checkstyle to %s..." % basePath + "/.git/hooks/" + checkstyleName)
urlretrieve(checkstyleUrl, basePath + "/.git/hooks/" + checkstyleName)
print("Downloading pre-commit script to %s" % basePath + "/.git/hooks/pre-commit")
urlretrieve(preCommitUrl, basePath + "/.git/hooks/pre-commit")
with open(basePath + '/.git/config', 'a+') as gitConfig:
if ("[checkstyle]" not in gitConfig.read()):
print("Adding git configurations to .git/config")
gitConfig.write("[checkstyle]\n")
gitConfig.write("jar = %s\n" % (basePath + "/.git/hooks/" + checkstyleName))
gitConfig.write("checkfile = %s\n" % (basePath + "/checkstyle_config.xml"))
print("Changing permissions for pre-commit. Has to run as root, enter password plz")
call(["sudo", "chmod", "+x", (basePath + "/.git/hooks/pre-commit")])
if __name__ == "__main__":
if (len(sys.argv) < 2):
print("Enter a directory to install hooks")
else:
if (path.exists(sys.argv[1])):
install_hooks(sys.argv[1])
|
CarlosMart626/graphene-django-authorization | graphene_django_authorization/fields.py | from django.core.exceptions import PermissionDenied
from graphene_django.filter.fields import DjangoFilterConnectionField
from graphene_django.fields import DjangoConnectionField
from graphene_django import DjangoObjectType
from .utils import is_related_to_user, has_perm
class AuthDjangoFilterConnectionField(DjangoFilterConnectionField):
"""
Django Filter Connection required a defined permission *_permission* this permission
can be a *tuple* or a *str* to set a required or a set of required permissions.
"""
_permissions = None
def connection_resolver(self, resolver, connection, default_manager, max_limit,
enforce_first_or_last, filterset_class, filtering_args,
root, args, context, info):
"""
Resolve the required connection if the user in context has the permission required. If the user
does not have the required permission then returns a *Permission Denied* to the request.
"""
assert self._permissions is not None
if has_perm(self._permissions, context) is not True:
print(DjangoConnectionField)
return DjangoConnectionField.connection_resolver(
resolver, connection, [PermissionDenied('Permission Denied'), ], max_limit,
enforce_first_or_last, root, args, context, info)
return super(AuthDjangoFilterConnectionField, self).connection_resolver(
resolver, connection, default_manager, max_limit,
enforce_first_or_last, filterset_class, filtering_args,
root, args, context, info)
class AuthDjangoObjectType(DjangoObjectType):
"""
Django Filter Connection required a defined permission *_permission* this permission
can be a *tuple* or a *str* to set a required or a set of required permissions.
"""
_permissions = None
model = None
related_field = None
@classmethod
def get_node(cls, id, context, info):
assert cls._permissions is not None
try:
object_instance = cls._meta.model.objects.get(id=id)
except cls._meta.model.DoesNotExist:
return None
user = context.user
if cls.model or cls.related_field:
assert cls.model is not None and cls.related_field is not None
if is_related_to_user(object_instance, user):
return object_instance
if has_perm(cls._permissions, context):
return object_instance
return PermissionDenied('Permission Denied')
|
CarlosMart626/graphene-django-authorization | graphene_django_authorization/test/models.py | from django.db import models
class Pet(models.Model):
name = models.CharField(max_length=30)
|
CarlosMart626/graphene-django-authorization | setup.py | from setuptools import find_packages, setup
from graphene_django_authorization import __version__
tests_require = [
'pytest>=2.7.2',
'pytest-cov',
'coveralls',
'coverage==4.4.2',
'mock',
'pytz',
'django-filter',
'pytest-django==3.1.2',
'graphene-django<2.0',
'django<2.0'
]
setup(
name='graphene-django-authorization',
version=__version__,
description='Graphene Django Authorization Integration',
long_description=open('README.rst').read(),
url='https://github.com/CarlosMart626/graphene-django-authorization',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
],
keywords='api graphql protocol rest relay graphene django autotization permissions',
packages=find_packages(exclude=['tests']),
install_requires=[
'graphene-django<2.0'
],
setup_requires=[
'pytest-runner',
],
tests_require=tests_require,
extras_require={
'test': tests_require,
},
include_package_data=True,
zip_safe=False,
platforms='any',
)
|
CarlosMart626/graphene-django-authorization | examples/starwars/models.py | <gh_stars>1-10
from __future__ import absolute_import
from django.db import models
class Character(models.Model):
name = models.CharField(max_length=50)
ship = models.ForeignKey('Ship', blank=True, null=True, related_name='characters')
def __str__(self):
return self.name
class Faction(models.Model):
name = models.CharField(max_length=50)
hero = models.ForeignKey(Character)
def __str__(self):
return self.name
class Ship(models.Model):
name = models.CharField(max_length=50)
faction = models.ForeignKey(Faction, related_name='ships')
def __str__(self):
return self.name
|
CarlosMart626/graphene-django-authorization | graphene_django_authorization/auth/fields.py | from django.core.exceptions import PermissionDenied
from graphene_django.filter.fields import DjangoFilterConnectionField
from graphene_django.fields import DjangoConnectionField
class AuthDjangoFilterConnectionField(DjangoFilterConnectionField):
_permission = ''
@classmethod
def has_perm(cls, context):
if context is None:
return False
if type(context) is dict:
user = context.get('user', None)
if user is None:
return False
else:
user = context.user
if user.is_authenticated() is False:
return False
if type(cls._permission) is tuple:
for permission in cls._permission:
if not user.has_perm(permission):
return False
if type(cls._permission) is str:
if not user.has_perm(cls._permission):
return False
return True
def connection_resolver(self, resolver, connection, default_manager, filterset_class, filtering_args,
root, args, context, info):
if self.has_perm(context) is not True:
return DjangoConnectionField.connection_resolver(
resolver, connection, [PermissionDenied('Permission Denied'), ], root, args, context, info)
return super(AuthDjangoFilterConnectionField, self).connection_resolver(
resolver, connection, default_manager, filterset_class, filtering_args,
root, args, context, info)
|
CarlosMart626/graphene-django-authorization | graphene_django_authorization/tests/test_auth.py | <reponame>CarlosMart626/graphene-django-authorization
import collections
import graphene
import pytest
from graphene import Schema, relay, ObjectType
from django.test import TestCase, RequestFactory
from graphene_django import DjangoObjectType
from graphene_django_authorization.auth.mixins import AuthNodeMixin, AuthMutationMixin
from django.core.exceptions import PermissionDenied
from .models import Pet
from graphene_django.utils import DJANGO_FILTER_INSTALLED
pytestmark = []
if DJANGO_FILTER_INSTALLED:
from graphene_django_authorization.auth.fields import AuthDjangoFilterConnectionField
else:
pytestmark.append(pytest.mark.skipif(True, reason='django_filters not installed'))
pytestmark.append(pytest.mark.django_db)
class PetNode(AuthNodeMixin, DjangoObjectType):
_permission = 'app.view_pet'
class Meta:
model = Pet
interfaces = (relay.Node, )
class PetNodeMultiplePermissions(AuthNodeMixin, DjangoObjectType):
_permission = ('app.view_pet', 'app.add_pet')
class Meta:
model = Pet
interfaces = (relay.Node, )
class CreatePet(AuthMutationMixin, graphene.Mutation):
"""
Mutation for create user
example mutation:
mutation {
createPet(name: "Mila") {
pet {
id
name
}
}
}
"""
_permission = 'app.create_pet'
pet = graphene.Field(PetNode)
class Input:
name = graphene.String(required=True)
@classmethod
def mutate(cls, root, input, context, info):
if cls.has_permision(context) is not True:
return cls.has_permision(context)
pet_name = input.get('name')
pet = Pet.objects.create(name=pet_name)
return CreatePet(pet=pet)
class CreatePetMultiple(AuthMutationMixin, graphene.Mutation):
"""
Mutation for create user
example mutation:
mutation {
createPet(name: "Mila") {
pet {
id
name
}
}
}
"""
_permission = ('app.view_pet', 'app.add_pet')
pet = graphene.Field(PetNode)
class Input:
name = graphene.String(required=True)
@classmethod
def mutate(cls, root, input, context, info):
if cls.has_permision(context) is not True:
return cls.has_permision(context)
pet_name = input.get('name')
pet = Pet.objects.create(name=pet_name)
return CreatePet(pet=pet)
if DJANGO_FILTER_INSTALLED:
class PetFilterConnection(AuthDjangoFilterConnectionField):
_permission = 'app.create_pet'
class PetFilterConnectionMultiple(AuthDjangoFilterConnectionField):
_permission = ('app.view_pet', 'app.add_pet')
class QueryRoot(ObjectType):
pet = relay.Node.Field(PetNode)
if DJANGO_FILTER_INSTALLED:
pets = PetFilterConnection(PetNode)
class MutationRoot(ObjectType):
create_pet = CreatePet.Field()
schema = Schema(query=QueryRoot, mutation=MutationRoot)
class MockUserContext(object):
def __init__(self, authenticated=True, is_staff=False, superuser=False, perms=()):
self.user = self
self.authenticated = authenticated
self.is_staff = is_staff
self.is_superuser = superuser
self.perms = perms
def is_authenticated(self):
return self.authenticated
def has_perm(self, check_perms):
if check_perms not in self.perms:
return False
return True
class AuthorizationTests(TestCase):
"""
This TestCase auth.
"""
@classmethod
def setUpClass(cls):
super(AuthorizationTests, cls).setUpClass()
cls.schema = schema
cls.query_mutation = '''
mutation {{
createPet(name: "{name}") {{
pet{{
id
name
}}
}}
}}
'''
cls.query_node = '''
query {
pet(id: "UGV0Tm9kZTox"){
id
name
}
}
'''
cls.query_filter = '''
query {
pets{
edges{
node{
id
name
}
}
}
}
'''
def setUp(self):
self.factory = RequestFactory()
pet_names = ['Mila', 'Kira']
for name in pet_names:
Pet.objects.create(name=name)
self.anonymous = MockUserContext(
authenticated=False
)
self.luke = MockUserContext(
authenticated=True,
perms=('app.view_pet', 'app.create_pet',)
)
self.anakin = MockUserContext(
authenticated=True,
perms=('app.view_pet',)
)
self.storm_tropper = MockUserContext(
authenticated=True,
perms=()
)
def test_mutation_anonymous(self):
"""
Making mutation with anonymous user
"""
print(self.luke.user)
result = self.schema.execute(self.query_mutation.format(name='Mila'), context_value={'user': self.anonymous})
self.assertNotEqual(result.errors, [])
self.assertEqual(result.errors[0].message, 'Permission Denied')
self.assertEqual(True, False)
def test_mutation_non_permission(self):
"""
Making mutation with an user who does not have the permission
"""
result = self.schema.execute(self.query_mutation.format(name='Mila'), context_value={'user': self.anakin})
self.assertNotEqual(result.errors, [])
self.assertEqual(result.errors[0].message, 'Permission Denied')
def test_mutation_has_permission(self):
"""
Making mutation with an user who has the permission
"""
result = self.schema.execute(self.query_mutation.format(name='Mila'), context_value={'user': self.luke})
self.assertEqual(result.errors, [])
def test_query_anonymous(self):
"""
Making query with anonymous user
"""
result = self.schema.execute(self.query_node, context_value={'user': self.anonymous})
print(result.errors)
print(result.data)
self.assertNotEqual(result.errors, [])
self.assertEqual(result.errors[0].message, 'Permission Denied')
def test_query_non_permission(self):
"""
Making query with an user who does not have the permission
"""
result = self.schema.execute(self.query_node, context_value={'user': self.storm_tropper})
self.assertNotEqual(result.errors, [])
self.assertEqual(result.errors[0].message, 'Permission Denied')
def test_query_has_permission(self):
"""
Making query with an user who has the permission
"""
result = self.schema.execute(self.query_node, context_value={'user': self.luke})
self.assertEqual(result.errors, [])
def test_filter_has_permission(self):
"""
Making query with an user who has the permission
"""
result = self.schema.execute(self.query_filter, context_value={'user': self.luke})
print(result.data)
print(result.errors)
self.assertEqual(result.errors, [])
def test_filter_non_permission(self):
"""
Making query with an user who has the permission
"""
result = self.schema.execute(self.query_filter, context_value={'user': self.storm_tropper})
print(result.data)
print(result.errors)
self.assertNotEqual(result.errors, [])
self.assertEqual(result.errors[0].message, 'Permission Denied')
def test_auth_node(self):
pn = PetNode()
result = pn.get_node(id=1, context=None, info=None)
assert isinstance(result, PermissionDenied)
result = pn.get_node(id=1, context={'user': None}, info=None)
assert isinstance(result, PermissionDenied)
Context = collections.namedtuple('Context', ['user', ])
context = Context(MockUserContext(authenticated=False))
result = pn.get_node(id=1, context=context, info=None)
assert isinstance(result, PermissionDenied)
pn_multiple = PetNodeMultiplePermissions()
context = Context(MockUserContext(authenticated=True))
result = pn_multiple.get_node(id=1, context=context, info=None)
assert isinstance(result, PermissionDenied)
pn_multiple = PetNodeMultiplePermissions()
context = Context(MockUserContext(authenticated=True))
result = pn_multiple.get_node(id=10, context=context, info=None)
assert result is None
def test_auth_mutation(self):
pet_mutation = CreatePet()
result = pet_mutation.has_permision(context=None)
assert isinstance(result, PermissionDenied)
result = pet_mutation.has_permision(context={'user': None})
assert isinstance(result, PermissionDenied)
Context = collections.namedtuple('Context', ['user', ])
context = Context(MockUserContext(authenticated=False))
result = pet_mutation.has_permision(context=context)
assert isinstance(result, PermissionDenied)
pet_mutation_multiple = CreatePetMultiple()
context = Context(MockUserContext(authenticated=True))
result = pet_mutation_multiple.has_permision(context=context)
assert isinstance(result, PermissionDenied)
pet_mutation_multiple = CreatePetMultiple()
context = Context(MockUserContext(authenticated=True, perms=('app.view_pet', 'app.add_pet')))
result = pet_mutation_multiple.has_permision(context=context)
assert result is True
def test_auth_filter_connection_field(self):
pet_filter = PetFilterConnection(PetNode)
result = pet_filter.has_perm(context=None)
assert result is False
result = pet_filter.has_perm(context={'user': None})
assert result is False
Context = collections.namedtuple('Context', ['user', ])
context = Context(MockUserContext(authenticated=False))
result = pet_filter.has_perm(context=context)
assert result is False
pet_filter_mulitple = PetFilterConnectionMultiple(PetNode)
context = Context(MockUserContext(authenticated=True, perms=('app.view_pet', )))
result = pet_filter_mulitple.has_perm(context=context)
assert result is False
|
CarlosMart626/graphene-django-authorization | graphene_django_authorization/auth/mixins.py | <reponame>CarlosMart626/graphene-django-authorization<filename>graphene_django_authorization/auth/mixins.py
from django.core.exceptions import PermissionDenied
class AuthNodeMixin():
_permission = ''
@classmethod
def get_node(cls, id, context, info):
def has_perm(object_instance):
if context is None:
return False
if type(context) is dict:
user = context.get('user', None)
if user is None:
return False
else:
user = context.user
if user.is_authenticated() is False:
return False
if type(cls._permission) is tuple:
for permission in cls._permission:
if not user.has_perm(permission):
return False
if type(cls._permission) is str:
if not user.has_perm(cls._permission):
return False
return True
try:
object_instance = cls._meta.model.objects.get(id=id)
except cls._meta.model.DoesNotExist:
return None
if has_perm(object_instance):
return object_instance
return PermissionDenied('Permission Denied')
class AuthMutationMixin():
_permission = ''
@classmethod
def has_permision(cls, context):
if context is None:
return PermissionDenied('Permission Denied')
if type(context) is dict:
user = context.get('user', None)
if user is None:
return PermissionDenied('Permission Denied')
else:
user = context.user
if user.is_authenticated() is False:
return PermissionDenied('Permission Denied')
if type(cls._permission) is tuple:
for permission in cls._permission:
if not user.has_perm(permission):
return PermissionDenied('Permission Denied')
return True
if type(cls._permission) is str:
if user.has_perm(cls._permission):
return True
return PermissionDenied('Permission Denied')
|
CarlosMart626/graphene-django-authorization | graphene_django_authorization/decorators.py | from functools import wraps
from django.core.exceptions import PermissionDenied
from .utils import has_perm, is_unauthorized_to_mutate_object
def require_permission(permissions, model=None, id_field=None, user_field=None):
def require_permission_decorator(func):
@wraps(func)
def func_wrapper(cls, root, input, context, info):
if model or id_field or user_field:
assert model is not None and id_field is not None and user_field is not None
return is_unauthorized_to_mutate_object()
if has_perm(permissions=permissions, context=context):
return func(cls, root, input, context, info)
return PermissionDenied('Permission Denied')
return func_wrapper
return require_permission_decorator
|
CarlosMart626/graphene-django-authorization | examples/cookbook/cookbook/recipes/admin.py | from django.contrib import admin
from cookbook.recipes.models import Recipe, RecipeIngredient
class RecipeIngredientInline(admin.TabularInline):
model = RecipeIngredient
@admin.register(Recipe)
class RecipeAdmin(admin.ModelAdmin):
inlines = [RecipeIngredientInline]
|
CarlosMart626/graphene-django-authorization | graphene_django_authorization/utils.py | <filename>graphene_django_authorization/utils.py
""""
Auth utils module.
Define some functios to authorize user to user mutations or nodes.
"""
def is_related_to_user(object_instance, user, field):
"""Return True when the object_instance is related to user."""
user_instance = getattr(object_instance, field, None)
if user:
if user_instance == user:
return True
return False
def is_unauthorized_to_mutate_object(model, id, user, field):
"""Return True when the when the user is unauthorized."""
object_instance = model.objects.get(id=id)
if is_related_to_user(object_instance, user, field):
return False
return True
def has_perm(permissions, context):
"""
Validates if the user in the context has the permission required.
"""
if context is None:
return False
if type(context) is dict:
user = context.get('user', None)
if user is None:
return False
else:
user = context.user
if user.is_authenticated() is False:
return False
if type(permissions) is tuple:
print("permissions", permissions)
for permission in permissions:
print("User has perm", user.has_perm(permission))
if not user.has_perm(permission):
return False
if type(permissions) is str:
if not user.has_perm(permissions):
return False
return True
|
CarlosMart626/graphene-django-authorization | examples/cookbook/cookbook/schema.py | import cookbook.ingredients.schema
import cookbook.recipes.schema
import graphene
from graphene_django.debug import DjangoDebug
class Query(cookbook.recipes.schema.Query, cookbook.ingredients.schema.Query, graphene.ObjectType):
debug = graphene.Field(DjangoDebug, name='__debug')
schema = graphene.Schema(query=Query)
|
CarlosMart626/graphene-django-authorization | examples/starwars/tests/test_mutation.py | <gh_stars>1-10
import pytest
<<<<<<< HEAD
=======
from unittest.mock import Mock
>>>>>>> develop
from ..data import initialize
from ..schema import schema
pytestmark = pytest.mark.django_db
<<<<<<< HEAD
=======
class MockUserContext(object):
def __init__(self, authenticated=True, is_staff=False, superuser=False, perms=()):
self.user = self
self.authenticated = authenticated
self.is_staff = is_staff
self.is_superuser = superuser
self.perms = perms
def is_authenticated(self):
return self.authenticated
def has_perm(self, check_perms):
if check_perms not in self.perms:
return False
return True
anonymous = MockUserContext(authenticated=False)
luke = MockUserContext(
authenticated=True,
perms=('app.view_ship', 'app.create_ship',)
)
anakin = MockUserContext(
authenticated=True,
perms=('app.view_ship',)
)
storm_tropper = MockUserContext(
authenticated=True,
perms=()
)
>>>>>>> develop
def test_mutations():
initialize()
query = '''
mutation MyMutation {
<<<<<<< HEAD
introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) {
=======
introduceShip(shipName: "Peter", factionId: "1") {
>>>>>>> develop
ship {
id
name
}
faction {
name
ships {
edges {
node {
id
name
}
}
}
}
}
}
'''
expected = {
'introduceShip': {
'ship': {
'id': 'U2hpcDo5',
'name': 'Peter'
},
'faction': {
'name': 'Alliance to Restore the Republic',
'ships': {
'edges': [{
'node': {
'id': 'U2hpcDox',
'name': 'X-Wing'
}
}, {
'node': {
'id': 'U2hpcDoy',
'name': 'Y-Wing'
}
}, {
'node': {
'id': 'U2hpcDoz',
'name': 'A-Wing'
}
}, {
'node': {
'id': 'U2hpcDo0',
'name': '<NAME>'
}
}, {
'node': {
'id': 'U2hpcDo1',
'name': '<NAME>'
}
}, {
'node': {
'id': 'U2hpcDo5',
'name': 'Peter'
}
}]
},
}
}
}
<<<<<<< HEAD
result = schema.execute(query)
=======
result = schema.execute(query, context_value=Mock(user=anonymous))
assert result.errors[0].message == 'Permission Denied'
result = schema.execute(query, context_value=Mock(user=storm_tropper))
assert result.errors[0].message == 'Permission Denied'
result = schema.execute(query, context_value=Mock(user=anakin))
assert result.errors[0].message == 'Permission Denied'
result = schema.execute(query, context_value=Mock(user=luke))
>>>>>>> develop
assert not result.errors
assert result.data == expected
|
CarlosMart626/graphene-django-authorization | examples/cookbook-plain/cookbook/recipes/models.py | from django.db import models
from cookbook.ingredients.models import Ingredient
class Recipe(models.Model):
title = models.CharField(max_length=100)
instructions = models.TextField()
class RecipeIngredient(models.Model):
recipe = models.ForeignKey(Recipe, related_name='amounts')
ingredient = models.ForeignKey(Ingredient, related_name='used_by')
amount = models.FloatField()
unit = models.CharField(max_length=20, choices=(
('unit', 'Units'),
('kg', 'Kilograms'),
('l', 'Litres'),
('st', 'Shots'),
))
|
bio-boris/kb_SPAdes | test/kb_SPAdes_server_test.py | <reponame>bio-boris/kb_SPAdes
from __future__ import print_function
import unittest
import os
import time
from os import environ
from ConfigParser import ConfigParser
import psutil
import requests
from biokbase.workspace.client import Workspace as workspaceService # @UnresolvedImport @IgnorePep8
from biokbase.workspace.client import ServerError as WorkspaceError # @UnresolvedImport @IgnorePep8
from biokbase.AbstractHandle.Client import AbstractHandle as HandleService # @UnresolvedImport @IgnorePep8
from kb_SPAdes.kb_SPAdesImpl import kb_SPAdes
from ReadsUtils.baseclient import ServerError
from ReadsUtils.ReadsUtilsClient import ReadsUtils
from kb_SPAdes.kb_SPAdesServer import MethodContext
from pprint import pprint
import shutil
import inspect
from kb_SPAdes.GenericClient import GenericClient
class gaprice_SPAdesTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.token = environ.get('KB_AUTH_TOKEN')
cls.callbackURL = environ.get('SDK_CALLBACK_URL')
print('CB URL: ' + cls.callbackURL)
# WARNING: don't call any logging methods on the context object,
# it'll result in a NoneType error
cls.ctx = MethodContext(None)
cls.ctx.update({'token': cls.token,
'provenance': [
{'service': 'kb_SPAdes',
'method': 'please_never_use_it_in_production',
'method_params': []
}],
'authenticated': 1})
config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
cls.cfg = {}
config = ConfigParser()
config.read(config_file)
for nameval in config.items('kb_SPAdes'):
cls.cfg[nameval[0]] = nameval[1]
cls.wsURL = cls.cfg['workspace-url']
cls.shockURL = cls.cfg['shock-url']
cls.hs = HandleService(url=cls.cfg['handle-service-url'],
token=cls.token)
cls.wsClient = workspaceService(cls.wsURL, token=cls.token)
wssuffix = int(time.time() * 1000)
wsName = "test_kb_SPAdes_" + str(wssuffix)
cls.wsinfo = cls.wsClient.create_workspace({'workspace': wsName})
print('created workspace ' + cls.getWsName())
cls.serviceImpl = kb_SPAdes(cls.cfg)
cls.readUtilsImpl = ReadsUtils(cls.callbackURL, token=cls.token)
cls.staged = {}
cls.nodes_to_delete = []
cls.handles_to_delete = []
cls.setupTestData()
print('\n\n=============== Starting tests ==================')
@classmethod
def tearDownClass(cls):
print('\n\n=============== Cleaning up ==================')
if hasattr(cls, 'wsinfo'):
cls.wsClient.delete_workspace({'workspace': cls.getWsName()})
print('Test workspace was deleted: ' + cls.getWsName())
if hasattr(cls, 'nodes_to_delete'):
for node in cls.nodes_to_delete:
cls.delete_shock_node(node)
if hasattr(cls, 'handles_to_delete'):
cls.hs.delete_handles(cls.hs.ids_to_handles(cls.handles_to_delete))
print('Deleted handles ' + str(cls.handles_to_delete))
@classmethod
def getWsName(cls):
return cls.wsinfo[1]
def getImpl(self):
return self.serviceImpl
@classmethod
def delete_shock_node(cls, node_id):
header = {'Authorization': 'Oauth {0}'.format(cls.token)}
requests.delete(cls.shockURL + '/node/' + node_id, headers=header,
allow_redirects=True)
print('Deleted shock node ' + node_id)
# Helper script borrowed from the transform service, logger removed
@classmethod
def upload_file_to_shock(cls, file_path):
"""
Use HTTP multi-part POST to save a file to a SHOCK instance.
"""
header = dict()
header["Authorization"] = "Oauth {0}".format(cls.token)
if file_path is None:
raise Exception("No file given for upload to SHOCK!")
with open(os.path.abspath(file_path), 'rb') as dataFile:
files = {'upload': dataFile}
print('POSTing data')
response = requests.post(
cls.shockURL + '/node', headers=header, files=files,
stream=True, allow_redirects=True)
print('got response')
if not response.ok:
response.raise_for_status()
result = response.json()
if result['error']:
raise Exception(result['error'][0])
else:
return result["data"]
@classmethod
def upload_file_to_shock_and_get_handle(cls, test_file):
'''
Uploads the file in test_file to shock and returns the node and a
handle to the node.
'''
print('loading file to shock: ' + test_file)
node = cls.upload_file_to_shock(test_file)
pprint(node)
cls.nodes_to_delete.append(node['id'])
print('creating handle for shock id ' + node['id'])
handle_id = cls.hs.persist_handle({'id': node['id'],
'type': 'shock',
'url': cls.shockURL
})
cls.handles_to_delete.append(handle_id)
md5 = node['file']['checksum']['md5']
return node['id'], handle_id, md5, node['file']['size']
@classmethod
def upload_reads(cls, wsobjname, object_body, fwd_reads,
rev_reads=None, single_end=False, sequencing_tech='Illumina',
single_genome='1'):
ob = dict(object_body) # copy
ob['sequencing_tech'] = sequencing_tech
# ob['single_genome'] = single_genome
ob['wsname'] = cls.getWsName()
ob['name'] = wsobjname
if single_end or rev_reads:
ob['interleaved']= 0
else:
ob['interleaved']= 1
print('\n===============staging data for object ' + wsobjname +
'================')
print('uploading forward reads file ' + fwd_reads['file'])
fwd_id, fwd_handle_id, fwd_md5, fwd_size = \
cls.upload_file_to_shock_and_get_handle(fwd_reads['file'])
ob['fwd_id']= fwd_id
rev_id = None
rev_handle_id = None
if rev_reads:
print('uploading reverse reads file ' + rev_reads['file'])
rev_id, rev_handle_id, rev_md5, rev_size = \
cls.upload_file_to_shock_and_get_handle(rev_reads['file'])
ob['rev_id']= rev_id
obj_ref = cls.readUtilsImpl.upload_reads(ob)
objdata = cls.wsClient.get_object_info_new({
'objects': [{'ref': obj_ref['obj_ref']}]
})[0]
cls.staged[wsobjname] = {'info': objdata,
'ref': cls.make_ref(objdata),
'fwd_node_id': fwd_id,
'rev_node_id': rev_id,
'fwd_handle_id': fwd_handle_id,
'rev_handle_id': rev_handle_id
}
@classmethod
def upload_assembly(cls, wsobjname, object_body, fwd_reads,
rev_reads=None, kbase_assy=False,
single_end=False, sequencing_tech='Illumina'):
if single_end and rev_reads:
raise ValueError('u r supr dum')
print('\n===============staging data for object ' + wsobjname +
'================')
print('uploading forward reads file ' + fwd_reads['file'])
fwd_id, fwd_handle_id, fwd_md5, fwd_size = \
cls.upload_file_to_shock_and_get_handle(fwd_reads['file'])
fwd_handle = {
'hid': fwd_handle_id,
'file_name': fwd_reads['name'],
'id': fwd_id,
'url': cls.shockURL,
'type': 'shock',
'remote_md5': fwd_md5
}
ob = dict(object_body) # copy
ob['sequencing_tech'] = sequencing_tech
if kbase_assy:
if single_end:
wstype = 'KBaseAssembly.SingleEndLibrary'
ob['handle'] = fwd_handle
else:
wstype = 'KBaseAssembly.PairedEndLibrary'
ob['handle_1'] = fwd_handle
else:
if single_end:
wstype = 'KBaseFile.SingleEndLibrary'
obkey = 'lib'
else:
wstype = 'KBaseFile.PairedEndLibrary'
obkey = 'lib1'
ob[obkey] = \
{'file': fwd_handle,
'encoding': 'UTF8',
'type': fwd_reads['type'],
'size': fwd_size
}
rev_id = None
rev_handle_id = None
if rev_reads:
print('uploading reverse reads file ' + rev_reads['file'])
rev_id, rev_handle_id, rev_md5, rev_size = \
cls.upload_file_to_shock_and_get_handle(rev_reads['file'])
rev_handle = {
'hid': rev_handle_id,
'file_name': rev_reads['name'],
'id': rev_id,
'url': cls.shockURL,
'type': 'shock',
'remote_md5': rev_md5
}
if kbase_assy:
ob['handle_2'] = rev_handle
else:
ob['lib2'] = \
{'file': rev_handle,
'encoding': 'UTF8',
'type': rev_reads['type'],
'size': rev_size
}
print('Saving object data')
objdata = cls.wsClient.save_objects({
'workspace': cls.getWsName(),
'objects': [
{
'type': wstype,
'data': ob,
'name': wsobjname
}]
})[0]
print('Saved object objdata: ')
pprint(objdata)
print('Saved object ob: ')
pprint(ob)
cls.staged[wsobjname] = {'info': objdata,
'ref': cls.make_ref(objdata),
'fwd_node_id': fwd_id,
'rev_node_id': rev_id,
'fwd_handle_id': fwd_handle_id,
'rev_handle_id': rev_handle_id
}
@classmethod
def upload_empty_data(cls, wsobjname):
objdata = cls.wsClient.save_objects({
'workspace': cls.getWsName(),
'objects': [{'type': 'Empty.AType',
'data': {},
'name': 'empty'
}]
})[0]
cls.staged[wsobjname] = {'info': objdata,
'ref': cls.make_ref(objdata),
}
@classmethod
def setupTestData(cls):
print('Shock url ' + cls.shockURL)
print('WS url ' + cls.wsClient.url)
print('Handle service url ' + cls.hs.url)
print('CPUs detected ' + str(psutil.cpu_count()))
print('Available memory ' + str(psutil.virtual_memory().available))
print('staging data')
# get file type from type
fwd_reads = {'file': 'data/small.forward.fq',
'name': 'test_fwd.fastq',
'type': 'fastq'}
# get file type from handle file name
rev_reads = {'file': 'data/small.reverse.fq',
'name': 'test_rev.FQ',
'type': ''}
# get file type from shock node file name
int_reads = {'file': 'data/interleaved.fq',
'name': '',
'type': ''}
int64_reads = {'file': 'data/interleaved64.fq',
'name': '',
'type': ''}
pacbio_reads = {'file': 'data/pacbio_filtered_small.fastq.gz',
'name': '',
'type': ''}
pacbio_ccs_reads = {'file': 'data/pacbioCCS_small.fastq.gz',
'name': '',
'type': ''}
iontorrent_reads = {'file': 'data/IonTorrent_single.fastq.gz',
'name': '',
'type': ''}
plasmid1_reads = {'file': 'data/pl1.fq.gz',
'name': '',
'type': ''}
plasmid2_reads = {'file': 'data/pl2.fq.gz',
'name': '',
'type': ''}
cls.upload_reads('frbasic', {}, fwd_reads, rev_reads=rev_reads)
cls.upload_reads('intbasic', {'single_genome': 1}, int_reads)
cls.upload_reads('intbasic64', {'single_genome': 1}, int64_reads)
cls.upload_reads('pacbio', {'single_genome': 1},
pacbio_reads, single_end=True, sequencing_tech="PacBio CLR")
cls.upload_reads('pacbioccs', {'single_genome': 1},
pacbio_ccs_reads, single_end=True, sequencing_tech="PacBio CCS")
cls.upload_reads('iontorrent', {'single_genome': 1},
iontorrent_reads, single_end=True, sequencing_tech="IonTorrent")
cls.upload_reads('meta', {'single_genome': 0}, fwd_reads,
rev_reads=rev_reads)
cls.upload_reads('meta2', {'single_genome': 0}, fwd_reads,
rev_reads=rev_reads)
cls.upload_reads('meta_single_end', {'single_genome': 0}, fwd_reads, single_end=True)
cls.upload_reads('reads_out', {'read_orientation_outward': 1},
int_reads)
cls.upload_assembly('frbasic_kbassy', {}, fwd_reads,
rev_reads=rev_reads, kbase_assy=True)
cls.upload_assembly('intbasic_kbassy', {}, int_reads, kbase_assy=True)
cls.upload_reads('single_end', {}, fwd_reads, single_end=True)
cls.upload_reads('single_end2', {}, rev_reads, single_end=True)
cls.upload_reads('plasmid_reads', {'single_genome': 1},
plasmid1_reads, rev_reads=plasmid2_reads)
shutil.copy2('data/small.forward.fq', 'data/small.forward.bad')
bad_fn_reads = {'file': 'data/small.forward.bad',
'name': '',
'type': ''}
cls.upload_assembly('bad_shk_name', {}, bad_fn_reads)
bad_fn_reads['file'] = 'data/small.forward.fq'
bad_fn_reads['name'] = 'file.terrible'
cls.upload_assembly('bad_file_name', {}, bad_fn_reads)
bad_fn_reads['name'] = 'small.forward.fastq'
bad_fn_reads['type'] = 'xls'
cls.upload_assembly('bad_file_type', {}, bad_fn_reads)
cls.upload_assembly('bad_node', {}, fwd_reads)
cls.delete_shock_node(cls.nodes_to_delete.pop())
cls.upload_empty_data('empty')
print('Data staged.')
@classmethod
def make_ref(self, object_info):
return str(object_info[6]) + '/' + str(object_info[0]) + \
'/' + str(object_info[4])
def test_fr_pair_kbfile(self):
self.run_success(
['frbasic'], 'frbasic_out',
contig_count=2)
def test_fr_pair_kbassy(self):
self.run_success(
['frbasic_kbassy'], 'frbasic_kbassy_out',
contig_count=2)
def test_interlaced_kbfile(self):
self.run_success(
['intbasic'], 'intbasic_out')
def test_metagenome_kbfile(self):
self.run_success(
['meta'], 'metabasic_out',
contig_count=2, dna_source='metagenomic')
def test_metagenome_multiple(self):
self.run_error(['meta', 'meta2'],
'Metagenome assembly requires that one ' +
'and only one paired end library as input. ' +
'2 libraries detected.',
dna_source='metagenomic')
def test_metagenome_single_end(self):
self.run_error(['meta_single_end'],
'Metagenome assembly requires that one ' +
'and only one paired end library as input.',
dna_source='metagenomic')
def test_plasmid_kbfile(self):
self.run_success(
['plasmid_reads'], 'plasmid_out',
contig_count=1, dna_source='plasmid')
def test_plasmid_multiple(self):
self.run_error(['plasmid_reads', 'frbasic'],
'Plasmid assembly requires that one ' +
'and only one library as input. ' +
'2 libraries detected.',
dna_source='plasmid')
def test_interlaced_kbassy(self):
self.run_success(
['intbasic_kbassy'], 'intbasic_kbassy_out',
contig_count=1476, dna_source='')
def test_multiple(self):
self.run_success(
['intbasic_kbassy', 'frbasic'], 'multiple_out',
dna_source='None')
def test_multiple_pacbio_illumina(self):
self.run_success(
['intbasic_kbassy', 'pacbio'], 'pacbio_multiple_out',
dna_source='None')
def test_multiple_pacbio_single(self):
self.run_success(
['single_end', 'pacbio'], 'pacbio_single_out',
dna_source='None')
def test_multiple_single(self):
self.run_success(
['single_end', 'single_end2'], 'multiple_single_out',
dna_source='None')
def test_iontorrent_alone(self):
self.run_non_deterministic_success(
['iontorrent'], 'iontorrent_alone_out',
dna_source='None')
def test_multiple_iontorrent_illumina(self):
self.run_error(['intbasic_kbassy', 'iontorrent'],
'Both IonTorrent and Illumina read libraries exist. SPAdes ' +
'can not assemble them together.')
def test_pacbio_alone(self):
self.run_error(['pacbio'],
'Per SPAdes requirements : If doing PacBio CLR reads, you must also ' +
'supply at least one paired end or single end reads library')
def test_pacbioccs_alone(self):
self.run_success(
['pacbioccs'], 'pacbioccs_alone_out',
dna_source='None')
def test_multiple_pacbioccs_illumina(self):
self.run_success(
['intbasic_kbassy', 'pacbioccs'], 'pacbioccs_multiple_out',
dna_source='None')
def test_single_reads(self):
self.run_success(
['single_end'], 'single_out',
dna_source='None')
def test_multiple_bad(self):
# Testing where input reads have different phred types (33 and 64)
self.run_error(['intbasic64', 'frbasic'],
('The set of Reads objects passed in have reads that have different phred ' +
'type scores. SPAdes does not support assemblies of reads with different ' +
'phred type scores.\nThe following read objects have ' +
'phred 33 scores : {}/frbasic.\n' +
'The following read objects have phred 64 scores : ' +
'{}/intbasic64').format(self.getWsName(), self.getWsName()),
exception=ValueError)
def test_single_cell(self):
self.run_success(
['frbasic'], 'single_cell_out',
dna_source='single_cell')
def test_meta_kmer_sizes(self):
self.run_success(
['frbasic'], 'frbasic_meta_out',
contig_count=2, dna_source='metagenomic',
kmer_sizes=[33, 55, 77, 99, 127])
def test_invalid_min_contig_length(self):
self.run_error(
['frbasic'], 'min_contig_length must be of type int', min_contig_length='not an int!')
def test_no_workspace_param(self):
self.run_error(
['foo'], 'workspace_name parameter is required', wsname=None)
def test_bad_workspace_name(self):
self.run_error(['foo'], 'Invalid workspace name bad|name',
wsname='bad|name')
def test_non_extant_workspace(self):
self.run_error(
['foo'], 'Object foo cannot be accessed: No workspace with name ' +
'Ireallyhopethisworkspacedoesntexistorthistestwillfail exists',
wsname='Ireallyhopethisworkspacedoesntexistorthistestwillfail',
exception=WorkspaceError)
# TEST REMOVED SINCE FROM THE UI IT IS A REFERENCE (Old logic in Impl broke UI)
# def test_bad_lib_name(self):
# self.run_error(['bad&name'], 'Invalid workspace object name bad&name')
def test_no_libs_param(self):
self.run_error(None, 'read_libraries parameter is required')
def test_no_libs_list(self):
self.run_error('foo', 'read_libraries must be a list')
def test_non_extant_lib(self):
self.run_error(
['foo'],
('No object with name foo exists in workspace {} ' +
'(name {})').format(str(self.wsinfo[0]), self.wsinfo[1]),
exception=WorkspaceError)
def test_no_libs(self):
self.run_error([], 'At least one reads library must be provided')
def test_no_output_param(self):
self.run_error(
['foo'], 'output_contigset_name parameter is required',
output_name=None)
def test_no_output_name(self):
self.run_error(
['foo'], 'output_contigset_name parameter is required',
output_name='')
def test_bad_output_name(self):
self.run_error(
['frbasic'], 'Invalid workspace object name bad*name',
output_name='bad*name')
# def test_inconsistent_metagenomics_1(self):
# self.run_error(
# ['intbasic'],
# 'Reads object ' + self.getWsName() + '/intbasic (' +
# self.staged['intbasic']['ref'] +
# ') is marked as containing dna from a single genome but the ' +
# 'assembly method was specified as metagenomic',
# dna_source='metagenomic')
def test_inconsistent_metagenomics_2(self):
self.run_error(
['meta'],
'Reads object ' + self.getWsName() + '/meta (' +
self.staged['meta']['ref'] +
') is marked as containing metagenomic data but the assembly ' +
'method was not specified as metagenomic')
def test_outward_reads(self):
self.run_error(
['reads_out'],
'Reads object ' + self.getWsName() + '/reads_out (' +
self.staged['reads_out']['ref'] +
') is marked as having outward oriented reads, which SPAdes ' +
'does not support.')
def test_bad_module(self):
self.run_error(['empty'],
'Invalid type for object ' +
self.staged['empty']['ref'] + ' (empty). Only the ' +
'types KBaseAssembly.PairedEndLibrary and ' +
'KBaseFile.PairedEndLibrary are supported')
def test_bad_shock_filename(self):
self.run_error(
['bad_shk_name'],
('Shock file name is illegal: small.forward.bad. Expected FASTQ ' +
'file. Reads object bad_shk_name ({}). Shock node ' +
'{}').format(self.staged['bad_shk_name']['ref'],
self.staged['bad_shk_name']['fwd_node_id']),
exception=ServerError)
def test_bad_handle_filename(self):
self.run_error(
['bad_file_name'],
('Handle file name from reads Workspace object is illegal: file.terrible. ' +
'Expected FASTQ file. Reads object bad_file_name ({}). Shock node ' +
'{}').format(self.staged['bad_file_name']['ref'],
self.staged['bad_file_name']['fwd_node_id']),
exception=ServerError)
def test_bad_file_type(self):
self.run_error(
['bad_file_type'],
('File type from reads Workspace object is illegal: .xls. Expected ' +
'FASTQ file. Reads object bad_file_type ({}). Shock node ' +
'{}').format(self.staged['bad_file_type']['ref'],
self.staged['bad_file_type']['fwd_node_id']),
exception=ServerError)
def test_bad_shock_node(self):
self.run_error(['bad_node'],
('Handle error for object {}: The Handle Manager ' +
'reported a problem while attempting to set Handle ACLs: ' +
'Unable to set acl(s) on handles ' +
'{}').format(
self.staged['bad_node']['ref'],
self.staged['bad_node']['fwd_handle_id']),
exception=ServerError)
# def test_provenance(self):
# frbasic = 'frbasic'
# ref = self.make_ref(self.staged[frbasic]['info'])
# gc = GenericClient(self.callbackURL, use_url_lookup=False)
# gc.sync_call('CallbackServer.set_provenance',
# [{'service': 'myserv',
# 'method': 'mymeth',
# 'service_ver': '0.0.2',
# 'method_params': ['foo', 'bar', 'baz'],
# 'input_ws_objects': [ref]
# }]
# )
# params = {'workspace_name': self.getWsName(),
# 'read_libraries': [frbasic],
# 'output_contigset_name': 'foo'
# }
# ret = self.getImpl().run_SPAdes(self.ctx, params)[0]
# report = self.wsClient.get_objects([{'ref': ret['report_ref']}])[0]
# assembly_ref = report['data']['objects_created'][0]['ref']
# assembly = self.wsClient.get_objects([{'ref': assembly_ref}])[0]
# rep_prov = report['provenance']
# assembly_prov = assembly['provenance']
# self.assertEqual(len(rep_prov), 1)
# # self.assertEqual(len(assembly_prov), 2)
# # rep_prov = rep_prov[0]
# # assembly_prov = assembly_prov[0]
# # for p in [rep_prov, assembly_prov]:
# # self.assertEqual(p['service'], 'myserv')
# # self.assertEqual(p['method'], 'mymeth')
# # self.assertEqual(p['service_ver'], '0.0.2')
# # self.assertEqual(p['method_params'], ['foo', 'bar', 'baz'])
# # self.assertEqual(p['input_ws_objects'], [ref])
# # sa = p['subactions']
# # self.assertEqual(len(sa), 1)
# # sa = sa[0]
# # self.assertEqual(
# # sa['name'],
# # 'kb_read_library_to_file')
# # self.assertEqual(
# # sa['code_url'],
# # 'https://github.com/MrCreosote/kb_read_library_to_file')
# # don't check ver or commit since they can change from run to run
def run_error(self, readnames, error, wsname=('fake'), output_name='out',
dna_source=None, min_contig_length=0, exception=ValueError,
kmer_sizes=None, skip_error_correction=0):
test_name = inspect.stack()[1][3]
print('\n***** starting expected fail test: ' + test_name + ' *****')
print(' libs: ' + str(readnames))
if wsname == ('fake'):
wsname = self.getWsName()
params = {}
if (wsname is not None):
if wsname == 'None':
params['workspace_name'] = None
else:
params['workspace_name'] = wsname
if (readnames is not None):
params['read_libraries'] = readnames
if (output_name is not None):
params['output_contigset_name'] = output_name
if not (dna_source is None):
params['dna_source'] = dna_source
params['min_contig_length'] = min_contig_length
params['kmer_sizes'] = kmer_sizes
params['skip_error_correction'] = skip_error_correction
with self.assertRaises(exception) as context:
self.getImpl().run_SPAdes(self.ctx, params)
self.assertEqual(error, str(context.exception.message))
def run_success(self, readnames, output_name, expected=None, contig_count=None,
min_contig_length=0, dna_source=None,
kmer_sizes=None, skip_error_correction=0):
test_name = inspect.stack()[1][3]
print('\n**** starting expected success test: ' + test_name + ' *****')
print(' libs: ' + str(readnames))
print("READNAMES: " + str(readnames))
print("STAGED: " + str(self.staged))
libs = [self.staged[n]['info'][1] for n in readnames]
# assyrefs = sorted(
# [self.make_ref(self.staged[n]['info']) for n in readnames])
params = {'workspace_name': self.getWsName(),
'read_libraries': libs,
'output_contigset_name': output_name,
'min_contig_length': min_contig_length,
'kmer_sizes': kmer_sizes,
'skip_error_correction': skip_error_correction
}
if not (dna_source is None):
if dna_source == 'None':
params['dna_source'] = None
else:
params['dna_source'] = dna_source
ret = self.getImpl().run_SPAdes(self.ctx, params)[0]
report = self.wsClient.get_objects([{'ref': ret['report_ref']}])[0]
self.assertEqual('KBaseReport.Report', report['info'][2].split('-')[0])
self.assertEqual(1, len(report['data']['objects_created']))
self.assertEqual('Assembled contigs',
report['data']['objects_created'][0]['description'])
if not (contig_count):
self.assertIn('Assembled into ', report['data']['text_message'])
else:
self.assertIn('Assembled into ' + str(contig_count) +
' contigs', report['data']['text_message'])
print("PROVENANCE: " + str(report['provenance']))
self.assertEqual(1, len(report['provenance']))
# PERHAPS ADD THESE TESTS BACK IN THE FUTURE, BUT AssemblyUtils and this
# would need to pass in the extra provenance information
# self.assertEqual(
# assyrefs, sorted(report['provenance'][0]['input_ws_objects']))
# self.assertEqual(
# assyrefs,
# sorted(report['provenance'][0]['resolved_ws_objects']))
assembly_ref = report['data']['objects_created'][0]['ref']
assembly = self.wsClient.get_objects([{'ref': assembly_ref}])[0]
# print("ASSEMBLY OBJECT:")
self.assertEqual('KBaseGenomeAnnotations.Assembly', assembly['info'][2].split('-')[0])
self.assertEqual(1, len(assembly['provenance']))
# PERHAPS ADD THESE TESTS BACK IN THE FUTURE, BUT AssemblyUtils and this
# would need to pass in the extra provenance information
# self.assertEqual(
# assyrefs, sorted(assembly['provenance'][0]['input_ws_objects']))
# self.assertEqual(
# assyrefs, sorted(assembly['provenance'][0]['resolved_ws_objects']))
self.assertEqual(output_name, assembly['info'][1])
# handle_id = assembly['data']['fasta_handle_ref']
# print("HANDLE ID:" + handle_id)
# handle_ids_list = list()
# handle_ids_list.append(handle_id)
# print("HANDLE IDS:" + str(handle_ids_list))
# temp_handle_info = self.hs.hids_to_handles(handle_ids_list)
temp_handle_info = self.hs.hids_to_handles([assembly['data']['fasta_handle_ref']])
print("HANDLE OBJECT:")
pprint(temp_handle_info)
assembly_fasta_node = temp_handle_info[0]['id']
self.nodes_to_delete.append(assembly_fasta_node)
header = {"Authorization": "Oauth {0}".format(self.token)}
fasta_node = requests.get(self.shockURL + '/node/' + assembly_fasta_node,
headers=header, allow_redirects=True).json()
if not (contig_count is None):
self.assertEqual(contig_count, len(assembly['data']['contigs']))
self.assertEqual(output_name, assembly['data']['assembly_id'])
# self.assertEqual(output_name, assembly['data']['name']) #name key doesnt seem to exist
if not (expected is None):
self.assertEqual(expected['fasta_md5'],
fasta_node['data']['file']['checksum']['md5'])
self.assertEqual(expected['md5'], assembly['data']['md5'])
self.assertIn('Assembled into ' + str(contig_count) +
' contigs', report['data']['text_message'])
for exp_contig in expected['contigs']:
if exp_contig['id'] in assembly['data']['contigs']:
obj_contig = assembly['data']['contigs'][exp_contig['id']]
self.assertEqual(exp_contig['name'], obj_contig['name'])
self.assertEqual(exp_contig['md5'], obj_contig['md5'])
self.assertEqual(exp_contig['length'], obj_contig['length'])
else:
# Hacky way to do this, but need to see all the contig_ids
# They changed because the SPAdes version changed and
# Need to see them to update the tests accordingly.
# If code gets here this test is designed to always fail, but show results.
self.assertEqual(str(assembly['data']['contigs']), "BLAH")
def run_non_deterministic_success(self, readnames, output_name,
dna_source=None):
test_name = inspect.stack()[1][3]
print('\n**** starting expected success test: ' + test_name + ' *****')
print(' libs: ' + str(readnames))
print("READNAMES: " + str(readnames))
print("STAGED: " + str(self.staged))
libs = [self.staged[n]['info'][1] for n in readnames]
params = {'workspace_name': self.getWsName(),
'read_libraries': libs,
'output_contigset_name': output_name
}
if not (dna_source is None):
if dna_source == 'None':
params['dna_source'] = None
else:
params['dna_source'] = dna_source
ret = self.getImpl().run_SPAdes(self.ctx, params)[0]
report = self.wsClient.get_objects([{'ref': ret['report_ref']}])[0]
self.assertEqual('KBaseReport.Report', report['info'][2].split('-')[0])
self.assertEqual(1, len(report['data']['objects_created']))
self.assertEqual('Assembled contigs',
report['data']['objects_created'][0]['description'])
self.assertIn('Assembled into ', report['data']['text_message'])
self.assertIn('contigs', report['data']['text_message'])
assembly_ref = report['data']['objects_created'][0]['ref']
assembly = self.wsClient.get_objects([{'ref': assembly_ref}])[0]
self.assertEqual('KBaseGenomeAnnotations.Assembly', assembly['info'][2].split('-')[0])
self.assertEqual(output_name, assembly['info'][1])
|
bio-boris/kb_SPAdes | lib/kb_ea_utils/kb_ea_utilsClient.py | <filename>lib/kb_ea_utils/kb_ea_utilsClient.py
# -*- coding: utf-8 -*-
############################################################
#
# Autogenerated by the KBase type compiler -
# any changes made here will be overwritten
#
############################################################
from __future__ import print_function
# the following is a hack to get the baseclient to import whether we're in a
# package or not. This makes pep8 unhappy hence the annotations.
try:
# baseclient and this client are in a package
from .baseclient import BaseClient as _BaseClient # @UnusedImport
except:
# no they aren't
from baseclient import BaseClient as _BaseClient # @Reimport
import time
class kb_ea_utils(object):
def __init__(
self, url=None, timeout=30 * 60, user_id=None,
password=<PASSWORD>, token=<PASSWORD>, ignore_authrc=False,
trust_all_ssl_certificates=False,
auth_svc='https://kbase.us/services/authorization/Sessions/Login',
service_ver='release',
async_job_check_time_ms=100, async_job_check_time_scale_percent=150,
async_job_check_max_time_ms=300000):
if url is None:
raise ValueError('A url is required')
self._service_ver = service_ver
self._client = _BaseClient(
url, timeout=timeout, user_id=user_id, password=password,
token=token, ignore_authrc=ignore_authrc,
trust_all_ssl_certificates=trust_all_ssl_certificates,
auth_svc=auth_svc,
async_job_check_time_ms=async_job_check_time_ms,
async_job_check_time_scale_percent=async_job_check_time_scale_percent,
async_job_check_max_time_ms=async_job_check_max_time_ms)
def _check_job(self, job_id):
return self._client._check_job('kb_ea_utils', job_id)
def _get_fastq_ea_utils_stats_submit(self, input_params, context=None):
return self._client._submit_job(
'kb_ea_utils.get_fastq_ea_utils_stats', [input_params],
self._service_ver, context)
def get_fastq_ea_utils_stats(self, input_params, context=None):
"""
This function should be used for getting statistics on read library object types
The results are returned as a string.
:param input_params: instance of type
"get_fastq_ea_utils_stats_params" (if read_library_ref is set,
then workspace_name and read_library_name are ignored) ->
structure: parameter "workspace_name" of String, parameter
"read_library_name" of String, parameter "read_library_ref" of
String
:returns: instance of String
"""
job_id = self._get_fastq_ea_utils_stats_submit(input_params, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
def _run_app_fastq_ea_utils_stats_submit(self, input_params, context=None):
return self._client._submit_job(
'kb_ea_utils.run_app_fastq_ea_utils_stats', [input_params],
self._service_ver, context)
def run_app_fastq_ea_utils_stats(self, input_params, context=None):
"""
This function should be used for getting statistics on read library object type.
The results are returned as a report type object.
:param input_params: instance of type
"run_app_fastq_ea_utils_stats_params" (if read_library_ref is set,
then workspace_name and read_library_name are ignored) ->
structure: parameter "workspace_name" of String, parameter
"read_library_name" of String, parameter "read_library_ref" of
String
:returns: instance of type "Report" -> structure: parameter
"report_name" of String, parameter "report_ref" of String
"""
job_id = self._run_app_fastq_ea_utils_stats_submit(input_params, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
def _get_ea_utils_stats_submit(self, input_params, context=None):
return self._client._submit_job(
'kb_ea_utils.get_ea_utils_stats', [input_params],
self._service_ver, context)
def get_ea_utils_stats(self, input_params, context=None):
"""
This function should be used for getting statistics on fastq files. Input is string of file path.
Output is a report string.
:param input_params: instance of type "ea_utils_params"
(read_library_path : absolute path of fastq files) -> structure:
parameter "read_library_path" of String
:returns: instance of String
"""
job_id = self._get_ea_utils_stats_submit(input_params, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
def _calculate_fastq_stats_submit(self, input_params, context=None):
return self._client._submit_job(
'kb_ea_utils.calculate_fastq_stats', [input_params],
self._service_ver, context)
def calculate_fastq_stats(self, input_params, context=None):
"""
This function should be used for getting statistics on fastq files. Input is string of file path.
Output is a data structure with different fields.
:param input_params: instance of type "ea_utils_params"
(read_library_path : absolute path of fastq files) -> structure:
parameter "read_library_path" of String
:returns: instance of type "ea_report" (read_count - the number of
reads in the this dataset total_bases - the total number of bases
for all the the reads in this library. gc_content - the GC content
of the reads. read_length_mean - The average read length size
read_length_stdev - The standard deviation read lengths phred_type
- The scale of phred scores number_of_duplicates - The number of
reads that are duplicates qual_min - min quality scores qual_max -
max quality scores qual_mean - mean quality scores qual_stdev -
stdev of quality scores base_percentages - The per base percentage
breakdown) -> structure: parameter "read_count" of Long, parameter
"total_bases" of Long, parameter "gc_content" of Double, parameter
"read_length_mean" of Double, parameter "read_length_stdev" of
Double, parameter "phred_type" of String, parameter
"number_of_duplicates" of Long, parameter "qual_min" of Double,
parameter "qual_max" of Double, parameter "qual_mean" of Double,
parameter "qual_stdev" of Double, parameter "base_percentages" of
mapping from String to Double
"""
job_id = self._calculate_fastq_stats_submit(input_params, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
def status(self, context=None):
job_id = self._client._submit_job('kb_ea_utils.status',
[], self._service_ver, context)
async_job_check_time = self._client.async_job_check_time
while True:
time.sleep(async_job_check_time)
async_job_check_time = (async_job_check_time *
self._client.async_job_check_time_scale_percent / 100.0)
if async_job_check_time > self._client.async_job_check_max_time:
async_job_check_time = self._client.async_job_check_max_time
job_state = self._check_job(job_id)
if job_state['finished']:
return job_state['result'][0]
|
infobloxopen/bloxone-ansible | ansible_collections/infoblox/b1ddi_modules/plugins/modules/b1_a_record_gather.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2021 Infoblox, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
---
module: b1_a_record_gather
author: "<EMAIL>"
short_description: Configure IP space on Infoblox BloxOne DDI
version_added: "1.0.1"
description:
- Get, Create, Update and Delete IP spaces on Infoblox BloxOne DDI. This module manages the IPAM IP space object using BloxOne REST APIs.
requirements:
- requests
options:
api_key:
description:
- Configures the API token for authentication against Infoblox BloxOne patform.
type: str
required: true
host:
description:
- Configures the Infoblox BloxOne host URL.
type: dict
required: true
name:
description:
- Configures the name of object to fetch, add, update or remove from the system. User can also update the name as it is possible
to pass a dict containing I(new_name), I(old_name).
type: str
required: true
tags:
description:
- Configures the tags associated with the object to add or update from the system.
type: list
comment:
description:
- Configures the comment/description for the object to add or update from the system.
type: str
state:
description:
- Configures the state of the object on BloxOne DDI. When this value is set to C(get), the object
details are fetched (if present) from the platform, when this value is set to C(present), the object
is configured on the platform and when this value is set to C(absent)
the value is removed (if necessary) from the platform.
type: str
default: present
choices:
- gather
required: true
'''
EXAMPLES = '''
'''
RETURN = ''' # '''
from ansible.module_utils.basic import *
from ..module_utils.b1ddi import Request, Utilities
import json
def get_a_record_gather(data):
'''Fetches the BloxOne DDI IP Space object
'''
'''Fetches the BloxOne DDI IP Space object
'''
connector = Request(data['host'], data['api_key'])
endpoint = f'/api/ddi/v1/dns/record'
flag=0
fields=data['fields']
filters=data['filters']
if fields!=None and isinstance(fields, list):
temp_fields = ",".join(fields)
endpoint = endpoint+"?_fields="+temp_fields
flag=1
if filters!={} and isinstance(filters,dict):
temp_filters = []
for k,v in filters.items():
if(str(v).isdigit()):
temp_filters.append(f'{k}=={v}')
else:
temp_filters.append(f'{k}==\'{v}\'')
res = " and ".join(temp_filters)
if(flag==1):
endpoint = endpoint+"&_filter="+res
else:
endpoint = endpoint+"?_filter="+res
try:
return connector.get(endpoint)
except:
raise Exception(endpoint)
if result.status_code in [200,201,204]:
meta = {'status': result.status_code,'url': url, 'response': result.json()}
return (False, False, meta)
elif result.status_code == 401:
meta = {'status': result.status_code,'url': url, 'response': result.json()}
return (True, False, meta)
else:
meta = {'status': result.status_code,'url': url, 'response': result.json()}
return (True, False, meta)
def main():
'''Main entry point for module execution
'''
argument_spec = dict(
name=dict(default='', type='str'),
api_key=dict(required=True, type='str'),
host=dict(required=True, type='str'),
comment=dict(type='str'),
fields=dict(type='list'),
filters=dict(type='dict', default={}),
tags=dict(type='list', elements='dict', default=[{}]),
state=dict(type='str', default='present', choices=['present','absent','gather'])
)
choice_map = {
'gather': get_a_record_gather
}
module = AnsibleModule(argument_spec=argument_spec)
(is_error, has_changed, result) = choice_map.get(module.params['state'])(module.params)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg='Operation failed', meta=result)
if __name__ == '__main__':
main()
|
oKermorgant/ecn_manip | src/python/dh_code.py | #!/usr/bin/env python
'''
C++ code generation for Modified Denavit-Hartenberg parameters and URDF files
python dh_code.py file.yml (from yaml file)
python dh_code.py file.urdf base effector (from URDF file)
author: <NAME>, ICube Laboratory
'''
import yaml
from lxml import etree
import sys, os
import sympy
from sympy.parsing.sympy_parser import parse_expr
from pylab import pi, array, norm
import re
from multiprocessing import Pool
import argparse
from subprocess import check_output
from tf.transformations import quaternion_from_matrix
# Utility functions and variables
X = sympy.Matrix([1,0,0]).reshape(3,1)
Y = sympy.Matrix([0,1,0]).reshape(3,1)
Z = sympy.Matrix([0,0,1]).reshape(3,1)
Z4 = sympy.Matrix([0,0,0,1]).reshape(4,1)
cst_symb = {}
def sk(u):
return sympy.Matrix([[0,-u[2],u[1]],[u[2],0,-u[0]],[-u[1],u[0],0]])
def Rot(theta,u):
R = sympy.cos(theta)*sympy.eye(3) + sympy.sin(theta)*sk(u) + (1-sympy.cos(theta))*(u*u.transpose())
return sympy.Matrix(R)
def Rxyz(rpy):
'''
Rotation matrix in X - Y - Z convention, so multiply the matrices in reversed order
'''
return Rot(rpy[2],Z)*Rot(rpy[1],Y)*Rot(rpy[0],X)
def Quat(M):
Mn = array(M)
print(quaternion_from_matrix(Mn))
def Homogeneous(t, R):
'''
Homogeneous frame transformation matrix from translation t and rotation R
'''
M = simp_matrix((R.row_join(t)).col_join(sympy.Matrix([[0,0,0,1]])))
return M
class Bunch(object):
def __init__(self, adict):
self.__dict__.update(adict)
def load_yaml(filename):
'''
Loads the given yaml file content with DH parameters. Builds the corresponding data.
'''
with open(filename) as f:
robot = yaml.safe_load(f)
robot['keys'] = [k for k in robot]
robot = Bunch(robot)
# get ordering
if 'notation' in robot.keys:
iAlpha = robot.notation.index('alpha')
iA = robot.notation.index('a')
iR = robot.notation.index('r')
iTheta = robot.notation.index('theta')
else:
iAlpha = 0
iA = 1
iR = 2
iTheta = 3
# change into symbolic
print('')
print('Building intermediary matrices...')
prism = [] # True if prismatic
T = [] # relative T(i-1,i)
u = [] # joint axis
fM0 = None
eMw = None
for q,joint in robot.joint.items():
this_prism = None
if type(joint[iR]) == str:
if 'q' in joint[iR]:
this_prism = True
if type(joint[iTheta]) == str:
if 'q' in joint[iTheta]:
this_prism = False
prism.append(this_prism)
for i in range(4):
if type(joint[i]) == str:
joint[i] = parse_expr(joint[i])
if q == 'f':
fM0 = Homogeneous(joint[iA]*X, Rot(joint[iAlpha],X)) * Homogeneous(joint[iR]*Z, Rot(joint[iTheta],Z))
elif q == 'e':
eMw = Homogeneous(joint[iA]*X, Rot(joint[iAlpha],X)) * Homogeneous(joint[iR]*Z, Rot(joint[iTheta],Z))
else:
# transformation matrix
T.append(Homogeneous(joint[iA]*X, Rot(joint[iAlpha],X)) * Homogeneous(joint[iR]*Z, Rot(joint[iTheta],Z)))
# joint axis, always Z in DH convention
u.append(Z)
return T, u, prism, fM0, eMw
def simp_rpy(rpy):
'''
Converts floating angle values to fractions of pi
'''
rpy = [parse_expr(v) for v in rpy]
for i in range(3):
for k in range(-12,13):
if abs(rpy[i] - k*pi/12.) < 1e-5:
if k != 0:
print(' changing', rpy[i], 'to', sympy.simplify(k*sympy.pi/12))
rpy[i] = str(sympy.simplify(k*sympy.pi/12))
if rpy[i] == '0':
rpy[i] = 0
break
return rpy
def simp_axis(u):
'''
Convert x y z axis to real 1 and 0
'''
u = [parse_expr(v) for v in u]
for i in range(3):
for v in (-1,0,1):
if abs(u[i]-v) < 1e-5:
u[i] = v
return sympy.Matrix(u).reshape(3,1)
def simp_val(val, idx):
'''
Convert numeric x y z to symbolic ones
'''
global cst_symb
val = [parse_expr(v) for v in val]
for i in range(3):
if abs(val[i]) < 1e-5:
val[i] = 0
else:
s = 'abd'[i] + str(idx)
cst_symb[s] = val[i]
val[i] = sympy.Symbol(s)
return sympy.Matrix(val).reshape(len(val),1)
def load_urdf(filename):
# reads as URDF or XACRO depending on file name
if filename.endswith('.urdf'):
with open(filename) as f:
return f.read()
else:
urdf = check_output(('rosrun xacro xacro ' + filename).split(), encoding='UTF8')
if urdf[0] == 'w':
return check_output(('rosrun xacro xacro ' + filename).split(), encoding='UTF8')
return urdf
def parse_urdf(filename, base_frame, ee_frame, use_joint_names = False):
'''
Parse the URDF file to extract the geometrical parameters between given frames
'''
robot = etree.fromstring(load_urdf(filename).encode())
# find all joints
parents = []
children = []
all_joints = robot.findall('joint')
for joint in all_joints:
parents.append(joint.find('parent').get('link'))
children.append(joint.find('child').get('link'))
if base_frame not in parents:
print('Could not find', base_frame, 'in parent link list')
print('Known parents: ' + " ".join(set(parents)))
sys.exit(0)
if ee_frame not in children:
print('Could not find', ee_frame, 'in children link list')
print('Known children: ' + " ".join(set(children)))
sys.exit(0)
# find path from base link to effector link
joint_path = []
cur_link = ee_frame
while cur_link != base_frame:
try:
i = children.index(cur_link)
except:
print('Could not find', cur_link, 'in link list')
sys.exit(0)
if i in joint_path:
print('Error: passed 2 times through', cur_link)
sys.exit(0)
joint_path.append(i)
cur_link = parents[i]
joint_path.reverse()
# build robot geometry
n = 0
M = sympy.eye(4)
T = []
prism = []
u = []
all_q = []
parent = base_frame
fM0 = wMe = None
last_moving = 0
joints = [all_joints[i] for i in joint_path]
for k, joint in enumerate(joints):
# get this transform
xyz = simp_val(joint.find('origin').get('xyz').split(' '), k+1)
rpy = simp_rpy(joint.find('origin').get('rpy').split(' '))
Mi = Homogeneous(xyz, Rxyz(rpy))
# get quaternion
#print 'from', joint.find('parent').get('link'), 'to', child
#print Mi
if joint.get('type') != 'fixed':
last_moving = k
if n == 0 and k != 0:
# there were some fixed joints before this one, build a constant matrix fM0
fM0 = M
M = Mi
print('Constant matrix fM0 between', base_frame, 'and', joints[k-1].find('child').get('link'))
else:
M = M*Mi
n += 1
#print 'joint', n, ': from', parent, 'to', child
#print M
# prismatic?
prism.append(joint.get('type') == 'prismatic')
# axis
ax = array([float(v) for v in joint.find('axis').get('xyz').split(' ')])
ax = ax/norm(ax)
u.append(simp_axis([str(v) for v in ax]))
# Transform matrix
if use_joint_names:
q = sympy.Symbol(joint.get('name'))
else:
q = sympy.Symbol('q%i'%n)
all_q.append(q)
if prism[-1]:
T.append(M * Homogeneous(q*u[-1], Rot(0, X)))
else:
T.append(M * Homogeneous(0*X, Rot(q, u[-1])))
# reset M for next joint
M = sympy.eye(4)
else:
M = M*Mi
if joint.get('type') == 'fixed':
# we finished on some fixed links
nonI = False
for i in range(3):
for j in range(4):
if (i == j and M[i,j] != 1) or (i != j and M[i,j] != 0):
nonI = True
if nonI:
wMe = M
print('Constant matrix wMe between', joints[last_moving].find('child').get('link'), 'and', ee_frame)
return T, u, prism, fM0, wMe, all_q
# human sorting
def human_sort(l):
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ]
l.sort( key=alphanum_key )
def simp_matrix(M):
'''
simplify matrix for old versions of sympy
'''
for i in range(M.rows):
for j in range(M.cols):
M[i,j] = sympy.trigsimp(M[i,j])
# check for these strange small numbers
'''
s = str(M[i,j])
almost0 = False
for k in range(20, 50):
if 'e-' + str(k) in s:
almost0 = True
break
if almost0:
for k in range(20, 50):
while 'e-' + str(k) in s:
m = s.find('e-' + str(k))
n = m
while s[n] != '.':
n -= 1
s = s.replace(s[n-1:m+4], '0')
M[i,j] = sympy.trigsimp(parse_expr(s))
'''
return M
def compute_Ji(joint_prism, u0, p0, i):
'''
Compute the i-eth column of the Jacobian (used for multiprocessing)
'''
if joint_prism[i] == None: # fixed joint
return sympy.zeros(6,0)
if joint_prism[i]:
# prismatic joint: v = qdot.u and w = 0
Jv = simp_matrix(u0[i])
Jw = sympy.Matrix([[0,0,0]]).reshape(3,1)
else:
# revolute joint: v = [qdot.u]x p and w = qdot.u
Jv = simp_matrix(sk(u0[i])*(p0[-1]-p0[i]))
Jw = simp_matrix(u0[i])
print(' J_%i' % (i+1))
return (i, Jv.col_join(Jw)) # register this column as column i
#return Jv.col_join(Jw)
def replaceFctQ(s, cDef, cUse, q = 'q'):
'''
Replace cos and sin functions of q_i with precomputed constants
'''
fctList = ('cos', 'sin')
pmDict = {'+':'', '-':'m'}
defCount = len(cDef)
# replace with all expressions already found
for sf in cUse:
s = s.replace(sf, cUse[sf])
# look for new expressions
for fct in fctList:
while True:
pos = s.find(fct)
if pos != -1:
end = pos + s[pos:].find(')')
sf = s[pos:end+1] # sf = cos(q1 + q2 - q3)
expr = s[pos+len(fct)+1:end].split(' ') # expr = [q1,+,q2,-,q3]
cUse[sf] = fct[0]
sUse = fct + '('
for v in expr:
if 'q' in v:
cUse[sf] += v[1:]
i = int(v[1:])
sUse += '%s[%i]' % (q, i-1)
else:
cUse[sf] += pmDict[v]
sUse += v
sUse += ')' # cos(q[0]+q[1]-q[2])
cDef[sf] = 'const double %s = %s;' % (cUse[sf], sUse) # const c1p2m3 = cos(q[0]+q[1]-q[2]);
s = s.replace(sf, cUse[sf])
else:
break
# other occurences of qi
for i in range(100):
s = s.replace('q%i' % (i+1), '%s[%i]' % (q, i))
return s.replace('00000000000000', '').replace('0000000000000', ''), cDef, cUse
def exportCpp(M, s='M', q = 'q', col_offset = 0):
'''
Writes the C++ code corresponding to a given matrix
'''
cDef={}
cUse={}
M_lines = []
comment = True
if M.rows == 4 and M.cols == 4:
comment = M[3,:].tolist()[0] != [0,0,0,1]
# write each element
sRows = ''
sCols = ''
for i in range(M.rows):
if M.rows > 1:
sRows = '[' + str(i) + ']'
for j in range(M.cols):
if M.cols > 1:
sCols = '[' + str(j+col_offset) + ']'
if M[i,j] == 0 and comment: # comment this out
M_lines.append('//' + s + sRows + sCols + ' = 0;')
else:
ms, cDef, cUse = replaceFctQ(str(sympy.N(M[i,j])), cDef, cUse, q)
M_lines.append(s + sRows + sCols + ' = ' + ms + ';')
# print definitions
cDef = list(cDef.values())
human_sort(cDef)
for line in cDef:
print(' ',line)
# print matrix content
for line in M_lines:
print(' ', line)
def ComputeDK_J(T, u, prism, comp_all = False):
# get number of joints
dof = len(T)
# Transform matrices
print('')
print('Building direct geometric model...')
T0 = [] # absolute T(0,i)
for i in range(dof):
if len(T0) == 0:
T0.append(T[i])
else:
T0.append(simp_matrix(T0[-1]*T[i]))
print(' T %i/0' % (i+1))
# Jacobian
# Rotation of each frame to go to frame 0
print('')
print('Building kinematic model...')
R0 = [M[:3,:3] for M in T0]
# joint axis expressed in frame 0
u0 = [R0[i]*u[i] for i in range(dof)]
all_J = []
if comp_all:
ee_J = range(1,dof+1)
else:
ee_J = [dof]
for ee in ee_J:
# origin of each frame expressed in frame 0
p0 = [T0[i][:3,3] for i in range(ee)]
# build Jacobian
# Sympy + multithreading bug = argument is not an mpz
#pool = Pool()
#results = []
#for i in range(ee):
## add this column to pool
#results.append(pool.apply_async(compute_Ji, args=(prism, u0, p0, i)))
#iJ = [result.get() for result in results]
#pool.close()
iJ = [compute_Ji(prism, u0, p0, i) for i in range(ee)]
Js = sympy.Matrix()
Js.rows = 6
for i in range(ee):
for iJi in iJ:
if iJi[0] == i:
Js = Js.row_join(iJi[1])
all_J.append(Js)
print('')
return T0, all_J
def better_latex(M):
s = sympy.latex(M)
s = s.replace('\\cos', 'c').replace('\\sin', 's')
n = max([i for i in range(1,10) if '_{'+str(i)+'}' in s])
single = '{{\\left (q_{{{}}} \\right )}}'
double = '{{\\left (q_{{{}}} + q_{{{}}} \\right )}}'
for i1 in range(1, n+1):
s = s.replace(single.format(i1), '_{}'.format(i1))
for i2 in range(1,n):
s = s.replace(double.format(i1,i2), '_{{{}{}}}'.format(i1,i2))
print(s)
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.description = 'A module to generate C++ code from URDF or Yaml (DH) file.'
# files
parser.add_argument('files', metavar='file', type=str, nargs='+', help='File (and base and end-effector frames for URDF)')
parser.add_argument('-q', metavar='q', help='How the joint vector appears in the code',default='q')
parser.add_argument('-T', metavar='M', help='How the pose matrix appears in the code',default='M')
parser.add_argument('-J', metavar='J', help='How the Jacobian matrix appears in the code',default='J')
parser.add_argument('--all_J', action='store_true', help='Computes the Jacobian of all frames',default=False)
parser.add_argument('--only-fixed', action='store_true', help='Only computes the fixed matrices, before and after the arm',default=False)
parser.add_argument('--display', action='store_true', help='Prints the full model',default=False)
parser.add_argument('--wrist', action='store_true', help='Prints the model of the wrist to help computing inverse geometry',default=False)
parser.add_argument('--latex', action='store_true', help='Prints direct model and Jacobian in Latex style',default=False)
parser.add_argument('--only-DGM', action='store_true', help='Only DGM',default=False)
args = parser.parse_args()
# check robot description file
if not os.path.lexists(args.files[0]):
print('File', args.files[0], 'does not exist')
sys.exit(0)
fM0 = wMe = None
# load into symbolic
print('')
print('Building intermediary matrices...')
if args.files[0].split('.')[-1] in ('urdf','xacro'):
if len(args.files) == 3:
T, u, prism, fM0, wMe, all_q = parse_urdf(args.files[0], args.files[1], args.files[2])
else:
print('Not enough arguments for URDF parsing - frames needed')
sys.exit(0)
elif args.files[0][-4:] == 'yaml' or args.files[0][-3:] == 'yml':
T, u, prism, fM0, wMe = load_yaml(args.files[0])
else:
print('Unknown file type', args.files[0])
# get number of joints
dof = len(T)
if args.only_DGM:
# Transform matrices
print('')
print('Building direct kinematic model...')
T0 = [] # absolute T(0,i)
for i in range(dof):
if len(T0) == 0:
T0.append(T[i])
else:
T0.append(simp_matrix(T0[-1]*T[i]))
print(' T %i/0' % (i+1))
print('')
print('Building pose C++ code...')
print('')
print(' // Generated pose code')
exportCpp(T0[-1], args.T)
print(' // End of pose code')
sys.exit(0)
elif not args.only_fixed:
# Do the computation
T0, all_J = ComputeDK_J(T, u, prism, args.all_J)
if not args.display:
print('')
print('Building pose C++ code...')
print('')
print(' // Generated pose code')
exportCpp(T0[-1], args.T)
print(' // End of pose code')
print('')
print('Building Jacobian C++ code...')
if args.all_J:
for i,Js in enumerate(all_J):
print('')
print(' // Generated Jacobian code to link %i'% (i+1))
exportCpp(Js, args.J + str(i+1), args.q)
print(' // End of Jacobian code to link %i'% (i+1))
else:
print('')
print(' // Generated Jacobian code')
exportCpp(all_J[-1], args.J, args.q)
print(' // End of Jacobian code')
fixed_M = ((wMe, 'wMe','end-effector'), (fM0,'fM0','base frame'))
for M,symbol,title in fixed_M:
if M != None:
print('')
print(f'Building {symbol} code...')
print('')
print(f' // Generated {title} code')
exportCpp(M, symbol)
print(f' // End of {title} code')
if len(cst_symb):
print('\n//Model constants')
lines = []
for key in cst_symb:
line = 'const double {} = {};'.format(key, cst_symb[key])
while line[-2] == '0':
line = line[:-2] + ';'
lines.append(line)
print('\n'.join(sorted(lines)))
print('// End of constants')
if args.display:
print('\n\nFull model from root to wrist frame:')
print('\nTranslation')
sympy.pretty_print(T0[-1][:3,3])
print('\nRotation')
sympy.pretty_print(T0[-1][:3,:3])
if args.wrist and dof == 6:
print('\n\nDecomposing DGM with regards to frame 3:')
print('\nTranslation from root to wrist frame 0T6 (should only depend on q1 q2 q3):\n')
sympy.pretty_print(T0[-1][:3,3])
print('\n\nRotation 3R6 from frame 3 to wrist frame (should only depend on q4 q5 q6):\n')
R36 = simp_matrix(T[3][:3,:3] * T[4][:3,:3] * T[5][:3,:3])
sympy.pretty_print(R36)
print('\n\nCode for the rotation 0R3 from root frame to frame 3:\n')
exportCpp(T0[2][:3,:3], 'R03')
print('\n\nModel from base to end-effector frame fMe for q=0')
I4 = sympy.eye(4)
fMe = (fM0 if fM0 is not None else I4) * T0[-1] * (wMe if wMe is not None else I4)
for n in range(len(T0)):
q = sympy.Symbol(f'q{n+1}')
fMe = fMe.subs(q, 0)
sympy.pretty_print(fMe)
if args.latex:
print('\n\nModel from root to wrist frame:')
better_latex(T0[-1])
print('\n\nJacobian:')
better_latex(all_J[-1])
|
PrimatElite/ml-labs | src/common/__init__.py | from .image import load_image
from .object import Object, PIXELS_PER_MM
from .object_search import find_object
from .objects_packing import pack_objects
|
PrimatElite/ml-labs | src/intelligent_placer.py | <filename>src/intelligent_placer.py
import numpy as np
from typing import Optional, Union
from .placer import placer
def check_image(image: Union[str, np.ndarray], polygon: Optional[np.ndarray] = None) -> bool:
return placer.run(image, polygon)
|
PrimatElite/ml-labs | src/common/objects_packing.py | import numpy as np
def pack_objects(polygon: np.ndarray, objects: np.ndarray) -> bool:
return True
|
PrimatElite/ml-labs | test.py | <reponame>PrimatElite/ml-labs<gh_stars>0
import argparse
import time
from src.checker.tester import Tester
def register_launch_arguments():
parser = argparse.ArgumentParser(description='Serve the connector application')
parser.add_argument('-o', '--objects', help='path to the directory with objects images', required=True)
parser.add_argument('-c', '--config', help='path to the configuration file', required=True)
parser.add_argument('-i', '--images', help='path to the generated images', default='../generated_images')
parser.add_argument('-p', '--polygons', help='path to the generated polygons', default='../generated_polygons')
parser.add_argument('-C', '--charts', help='path to the generated charts', default='../generated_charts')
return parser.parse_args()
def check_placer():
args = register_launch_arguments()
start = time.time()
tester = Tester(args.objects, args.config, args.images, args.polygons, args.charts)
tester.run()
end = time.time()
print(f'Time of placer check: {end - start} seconds')
tester.plot()
if __name__ == '__main__':
check_placer()
|
PrimatElite/ml-labs | src/common/object.py | <gh_stars>0
import cv2
import numpy as np
from shapely.geometry import Polygon
from typing import Tuple, Union
from .image import load_image
from .object_search import find_object, PAPER_SIZE
PIXELS_PER_MM = PAPER_SIZE[0] / 297
class Object:
image: np.ndarray
convex_hull: np.ndarray
scaled_convex_hull: np.ndarray
alpha: np.ndarray
min_length: float
bounds: Tuple[float, float, float, float]
def __init__(self, image: Union[str, np.ndarray]):
image: np.ndarray = load_image(image)
self.image, self.convex_hull, self.alpha = find_object(image)
self.scaled_convex_hull = self.convex_hull.astype(float) / PIXELS_PER_MM
rotated_rectangle = cv2.minAreaRect(np.expand_dims(self.convex_hull, 1))
rotated_box = np.int0(cv2.boxPoints(rotated_rectangle))
self.min_length = min(np.linalg.norm(rotated_box[1] - rotated_box[0]),
np.linalg.norm(rotated_box[1] - rotated_box[2])) / PIXELS_PER_MM
self.bounds = tuple(Polygon(self.scaled_convex_hull).bounds)
|
PrimatElite/ml-labs | src/common/image.py | import cv2
import numpy as np
from typing import Union
def load_image(image_or_path: Union[str, np.ndarray]) -> np.ndarray:
if isinstance(image_or_path, str):
image = cv2.imread(image_or_path, cv2.IMREAD_COLOR)
if image is None:
raise FileNotFoundError(image_or_path)
elif isinstance(image_or_path, np.ndarray):
image = image_or_path
else:
raise TypeError(f'Image has incorrect type: {type(image_or_path).__name__}')
if len(image.shape) != 3 or image.shape[2] != 3 or image.dtype.type != np.uint8:
raise ValueError('Image has incorrect value')
return image
|
PrimatElite/ml-labs | src/__init__.py | from . import intelligent_placer
|
PrimatElite/ml-labs | setup.py | <reponame>PrimatElite/ml-labs
from setuptools import setup
setup(
name='intelligent_placer_lib',
version='0.0.1',
packages=['intelligent_placer_lib', 'intelligent_placer_lib.common', 'intelligent_placer_lib.placer'],
package_dir={'intelligent_placer_lib': 'src'},
install_requires=['numpy', 'opencv-python'],
setup_requires=['numpy', 'opencv-python']
)
|
PrimatElite/ml-labs | src/placer/polygon_search.py | <gh_stars>0
import numpy as np
def find_polygon(image: np.ndarray) -> np.ndarray:
return np.array([])
|
PrimatElite/ml-labs | src/checker/tester.py | <gh_stars>0
from __future__ import annotations
import cv2
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
import plotly.express as px
import time
from functools import reduce
from imutils import rotate_bound
from intelligent_placer_lib import intelligent_placer
from itertools import islice
from pymatting import blend
from shapely.geometry import MultiPolygon, Polygon
from typing import Dict, List, Optional, Tuple
from ..common import load_image, Object, PIXELS_PER_MM
from ..utils import get_config
DEFAULT_HEIGHT = 232.5
PIXELS_PER_CM = PIXELS_PER_MM * 10
SEGMENT_RATIO = 0.01
FIRST_TEST_CONSTRAINTS = ['same_obj_num', 'shooting_height', 'rotation', 'noise', 'blur']
SECOND_TEST_CONSTRAINTS = [
['noise', 'blur'],
['same_obj_num', 'shooting_height', 'rotation'],
['noise', 'shooting_height'],
]
X_SHIFT_CM = 3
POLYGON_SHIFT = 10
# - name: back_diff_obj
class Tester:
background_image: np.ndarray
config: Dict[str, List[float | int]]
objects: List[Object]
segments: Dict[str, List[float]]
test_results: List[List[Tuple[bool, bool]]]
times: List[Tuple[float, List[int], Dict[str, float | int], np.ndarray]]
path_to_images: str
path_to_polygons: str
path_to_charts: str
default_height: float
def __init__(self, path_to_objects: str, path_to_config: str, path_to_images: str, path_to_polygons: str,
path_to_charts: str):
self.path_to_images = path_to_images
self.path_to_polygons = path_to_polygons
self.path_to_charts = path_to_charts
for path in [path_to_images, path_to_polygons, path_to_charts]:
if not os.path.exists(path):
os.mkdir(path)
self.background_image = load_image('src/checker/background.png')
config = get_config(path_to_config)
self.config = {constraint['name']: constraint['value'] for constraint in config['restrictions']}
self.config['shooting_height'] = [self.config['shooting_height'][0] * 10,
self.config['shooting_height'][1] * 10]
self.config['resolution'] = [self.config['resolution'][0] * 1e6, self.config['resolution'][1] * 1e6]
self.objects = [Object(os.path.join(path_to_objects, file)) for file in os.listdir(path_to_objects)]
self.segments = {}
self.test_results = []
self.times = []
self.default_height = np.clip(DEFAULT_HEIGHT, self.config['shooting_height'][0], # type: ignore
self.config['shooting_height'][1])
@classmethod
def _find_max_polygon_edge(cls, polygon: np.ndarray) -> int:
max_edge = -1
max_edge_len = np.linalg.norm(polygon[max_edge + 1] - polygon[max_edge])
for edge in range(polygon.shape[0] - 1):
edge_len = np.linalg.norm(polygon[edge + 1] - polygon[edge])
if edge_len > max_edge_len:
max_edge = edge
max_edge_len = edge_len
return max_edge
@classmethod
def _combine_two_convex_hulls(cls, convex_hull1: np.ndarray, convex_hull2: np.ndarray) -> np.ndarray:
points = list(zip(*MultiPolygon([Polygon(convex_hull1), Polygon(convex_hull2)]).convex_hull.boundary.xy))[:-1]
return np.array(points[::-1])
def _combine_two_polygons(self, polygon1: np.ndarray, max_edge1: int, polygon2: np.ndarray,
max_edge2: int) -> np.ndarray:
vector1 = polygon1[max_edge1 + 1] - polygon1[max_edge1]
vector2 = polygon2[max_edge2 + 1] - polygon2[max_edge2]
vector1_len = np.linalg.norm(vector1)
vector2_len = np.linalg.norm(vector2)
normalized_vector1 = vector1 / vector1_len
normalized_vector2 = vector2 / vector2_len
angle = np.arccos(np.clip(np.dot(normalized_vector1, normalized_vector2), -1.0, 1.0))
cross = np.cross(np.append(normalized_vector1, 0), np.append(normalized_vector2, 0))
rotation_angle = np.pi - angle
if cross[2] < 0:
rotation_angle = -rotation_angle
rotation_matrix = np.array([[np.cos(rotation_angle), -np.sin(rotation_angle)],
[np.sin(rotation_angle), np.cos(rotation_angle)]])
vector1_mid = polygon1[max_edge1] + normalized_vector1 * vector1_len / 2
rotated_vector2_mid = polygon2[max_edge2] + normalized_vector2 * vector2_len / 2
prev_points = [vector1_mid + rotation_matrix.dot(polygon2[idx] - rotated_vector2_mid)
for idx in range(-1, max_edge2)]
next_points = [vector1_mid + rotation_matrix.dot(polygon2[idx] - rotated_vector2_mid)
for idx in range(max_edge2 + 1, polygon2.shape[0] - 1)]
points = (prev_points, [vector1_mid]) if len(prev_points) > 0 else ([vector1_mid],)
points = (*points, next_points) if len(next_points) > 0 else points
new_polygon2 = np.concatenate(points)
center1 = np.mean(polygon1, 0)
center2 = np.mean(new_polygon2, 0)
translation_vector = center2 - center1
translation_vector_mini = translation_vector / np.linalg.norm(translation_vector) * 3.0
new_polygon2 += translation_vector_mini
plt.fill(new_polygon2[:, 0], new_polygon2[:, 1], fill=False)
return self._combine_two_convex_hulls(polygon1, new_polygon2)
@classmethod
def _generate_rectangle_for_object(cls, object_: Object) -> np.ndarray:
min_x, min_y, max_x, max_y = object_.bounds
w, h = max_x - min_x, max_y - min_y
rectangle = np.array([[0, 0], [0, h + POLYGON_SHIFT], [w + POLYGON_SHIFT, h + POLYGON_SHIFT],
[w + POLYGON_SHIFT, 0]])
object_convex_hull = np.array([p + [POLYGON_SHIFT // 2 - min_x, POLYGON_SHIFT // 2 - min_y]
for p in object_.scaled_convex_hull])
plt.fill(object_convex_hull[:, 0], object_convex_hull[:, 1], fill=False)
plt.fill(rectangle[:, 0], rectangle[:, 1], edgecolor='b', fill=False)
plt.gca().set_aspect('equal')
print(rectangle)
return rectangle
def _generate_polygon(self, objects: List[Object], constraints: Dict[str, float | int]) -> np.ndarray:
# - name: polygon_vertex_num
# - name: polygon_angle
# - name: area_ratio
polygon = objects[0].scaled_convex_hull
plt.fill(polygon[:, 0], polygon[:, 1], fill=False)
for object_ in islice(objects, 1, len(objects)):
max_edge = self._find_max_polygon_edge(polygon)
object_polygon = object_.scaled_convex_hull
max_object_edge = self._find_max_polygon_edge(object_polygon)
polygon = self._combine_two_polygons(polygon, max_edge, object_polygon, max_object_edge)
plt.fill(polygon[:, 0], polygon[:, 1], edgecolor='b', fill=False)
plt.gca().set_aspect('equal')
print(polygon)
return polygon
def _generate_image(self, objects: List[Object], constraints: Dict[str, float | int],
polygon: Optional[np.ndarray] = None) -> np.ndarray:
# - name: max_dist_between_obj_center - максимальное расстояние от центра объектов до дальних объектов
# - name: min_dist_between_obj_polygon
# - name: line_width
# - name: resolution
# - name: aspect_ratio
# - name: back_shadows
# - name: obj_shadows
# - name: camera_shift - сдвиг камеры относительно предметов
# - name: shooting_angle - перспектива
objects_images: List[np.ndarray] = []
objects_alphas: List[np.ndarray] = []
for i, object_ in enumerate(objects):
x, y, w, h = cv2.boundingRect(object_.convex_hull)
objects_images.append(object_.image[y:y + h, x:x + w])
objects_alphas.append(object_.alpha[y:y + h, x:x + w])
scale = 1
if 'shooting_height' in constraints:
scale = DEFAULT_HEIGHT / constraints['shooting_height']
elif abs(self.default_height - DEFAULT_HEIGHT) > 1e-9:
scale = DEFAULT_HEIGHT / self.default_height
if abs(scale - 1) > 1e-9:
for i in range(len(objects)):
h, w = round(objects_images[i].shape[0] * scale), round(objects_images[i].shape[1] * scale)
objects_images[i] = cv2.resize(objects_images[i], (w, h))
objects_alphas[i] = cv2.resize(objects_alphas[i], (w, h))
if 'rotation' in constraints:
for i in range(len(objects)):
objects_images[i] = rotate_bound(objects_images[i], constraints['rotation'])
objects_alphas[i] = rotate_bound(objects_alphas[i], constraints['rotation'])
x_shift = round(min(self.config['min_dist_between_obj'][0], X_SHIFT_CM * 10) * PIXELS_PER_MM * scale)
height = max(objects_images, key=lambda o: o.shape[0]).shape[0] + x_shift * 2
width = sum(o.shape[1] for o in objects_images) + round(x_shift * (len(objects) + 1))
objects_image = np.zeros((height, width, 3), np.uint8)
objects_alpha = np.zeros((height, width), np.uint8)
x = x_shift
y = x_shift
for i in range(len(objects)):
objects_image[y:objects_images[i].shape[0] + y, x:x + objects_images[i].shape[1]] = objects_images[i]
objects_alpha[y:objects_alphas[i].shape[0] + y, x:x + objects_alphas[i].shape[1]] = objects_alphas[i]
x += objects_images[i].shape[1] + x_shift
foreground_height = height
foreground = np.zeros((foreground_height, width, 3), np.uint8)
alpha = np.zeros((foreground_height, width), np.uint8)
y_shift = (foreground_height - height) // 2
foreground[y_shift:y_shift + height] = objects_image
alpha[y_shift:y_shift + height] = objects_alpha
background = cv2.resize(self.background_image, (width, foreground_height))
image = blend(foreground, background, alpha / 255.0).astype(np.uint8)
if 'noise' in constraints:
row, col, ch = image.shape
gauss = np.random.normal(0, constraints['noise'], (row, col, ch))
gauss = gauss.reshape((row, col, ch))
image = np.clip(np.rint(image + gauss), 0, 255).astype(np.uint8)
if 'blur' in constraints and constraints['blur'] > 1e-9:
image = cv2.GaussianBlur(image, (21, 21), constraints['blur'])
# cv2.imshow('win', image)
# cv2.waitKey()
return image
@classmethod
def _call_placer(cls, image_path: str, polygon: Optional[np.ndarray] = None) -> Tuple[bool, float]:
start = time.time()
res = intelligent_placer.check_image(image_path, polygon)
end = time.time()
res_time = end - start
return res, res_time
@classmethod
def _get_constraints_image_path(cls, root: str, constraints: Dict[str, float | int],
case: Optional[int] = None) -> str:
if case is None:
constraints_str = '_'.join('%s_%.4f' % (name, value) for name, value in constraints.items())
return os.path.join(root, f'{constraints_str}.png')
else:
constraints_str = '_'.join('%.4f' % value for value in constraints.values())
return os.path.join(root, f'{case}_{constraints_str}.png')
def _save_image(self, image: np.ndarray, constraints: Dict[str, float | int], case: Optional[int] = None) -> str:
image_path = self._get_constraints_image_path(self.path_to_images, constraints, case)
cv2.imwrite(image_path, image)
return image_path
def _save_polygon(self, constraints: Dict[str, float | int], case: Optional[int] = None):
plt.savefig(self._get_constraints_image_path(self.path_to_polygons, constraints, case))
plt.clf()
def _do_first_step(self):
for constraint in FIRST_TEST_CONSTRAINTS:
left_value = self.config[constraint][0]
right_value = self.config[constraint][1]
left, center_left, center_right, right = left_value, right_value, left_value, right_value
if abs(right_value - left_value) < 1e-9:
self.segments[constraint] = [left, left, right, right]
continue
for i, object_ in enumerate(self.objects):
if constraint == 'same_obj_num':
left_objects = [object_] * left_value
left_objects_idx = [i] * left_value
right_objects = [object_] * right_value
right_objects_idx = [i] * right_value
left_good_polygon = self._generate_polygon(left_objects, {})
self._save_polygon({'object': i, constraint: left_value})
right_good_polygon = self._generate_polygon(right_objects, {})
self._save_polygon({'object': i, constraint: right_value})
else:
left_objects = right_objects = [object_]
left_objects_idx = right_objects_idx = [i]
good_polygon = self._generate_rectangle_for_object(object_)
self._save_polygon({'object': i, constraint: left_value})
left_good_polygon = right_good_polygon = good_polygon
segment_end_len = (right_value - left_value) * SEGMENT_RATIO
l, r = left_value, right_value
left_image = self._generate_image(left_objects, {constraint: l})
left_image_path = self._save_image(left_image, {'object': i, constraint: l})
left_good_result, left_good_result_time = self._call_placer(left_image_path, left_good_polygon)
self.times.append((left_good_result_time, left_objects_idx, {constraint: l}, left_good_polygon))
if not left_good_result:
center_left, center_right = left_value, left_value
break
right_image = self._generate_image(right_objects, {constraint: r})
right_image_path = self._save_image(right_image, {'object': i, constraint: r})
right_good_result, right_good_result_time = self._call_placer(right_image_path, right_good_polygon)
self.times.append((right_good_result_time, right_objects_idx, {constraint: r}, right_good_polygon))
if right_good_result:
center_right = right_value if abs(center_right - left_value) < 1e-9 else center_right
continue
while r - l > segment_end_len:
m = (l + r) / 2
if constraint == 'same_obj_num':
objects = [object_] * m
objects_idx = [i] * m
good_polygon = self._generate_polygon(objects, {})
else:
objects = [object_]
objects_idx = [i]
good_polygon = self._generate_rectangle_for_object(object_)
self._save_polygon({'object': i, constraint: m})
m_image = self._generate_image(objects, {constraint: m})
image_path = self._save_image(m_image, {'object': i, constraint: m})
m_result, m_result_time = self._call_placer(image_path, good_polygon)
self.times.append((m_result_time, objects_idx, {constraint: m}, good_polygon))
if m_result:
l = m
else:
r = m
center_left, center_right = min(l, center_left), max(r, center_right)
self.segments[constraint] = [left, center_left, center_right, right]
def _do_second_step_test(self, constraints: Dict[str, float | int], case: int) -> Tuple[bool, bool]:
if 'same_obj_num' in constraints:
same_obj_num = round(constraints['same_obj_num'])
objects = self.objects[:-1] + [self.objects[-1]] * same_obj_num
objects_idx = list(range(len(self.objects) - 1)) + [len(self.objects) - 1] * same_obj_num
else:
objects = self.objects
objects_idx = list(range(len(self.objects)))
if self.config['obj_num'][1] < len(objects):
objects = objects[-self.config['obj_num'][1]:]
objects_idx = objects_idx[-self.config['obj_num'][1]:]
min_length = min(object_.min_length for object_ in self.objects) - POLYGON_SHIFT
good_polygon = self._generate_polygon(objects, {})
self._save_polygon(constraints, case)
w = Polygon(good_polygon).area / min_length
bad_polygon = np.array([[0, 0], [0, min_length], [w, min_length], [w, 0]])
image = self._generate_image(objects, constraints)
image_path = self._save_image(image, constraints, case)
good_result, good_result_time = self._call_placer(image_path, good_polygon)
bad_result, bad_result_time = self._call_placer(image_path, bad_polygon)
self.times.append((good_result_time, objects_idx, constraints, good_polygon))
self.times.append((bad_result_time, objects_idx, constraints, bad_polygon))
return good_result is True, bad_result is False
def _do_second_step(self):
for case, constraints in enumerate(SECOND_TEST_CONSTRAINTS):
left_results = self._do_second_step_test({constraint: self.segments[constraint][0]
for constraint in constraints}, case)
if all(abs(self.segments[constraint][1] - self.segments[constraint][0]) < 1e-9
for constraint in constraints):
left_middle_results = left_results
center_left_results = left_results
else:
left_middle_results = self._do_second_step_test({constraint: (self.segments[constraint][0]
+ self.segments[constraint][1]) / 2
for constraint in constraints}, case)
center_left_results = self._do_second_step_test({constraint: self.segments[constraint][1]
for constraint in constraints}, case)
if all(abs(self.segments[constraint][2] - self.segments[constraint][1]) < 1e-9
for constraint in constraints):
center_results = center_left_results
center_right_results = center_left_results
else:
center_results = self._do_second_step_test({constraint: (self.segments[constraint][1]
+ self.segments[constraint][2]) / 2
for constraint in constraints}, case)
center_right_results = self._do_second_step_test({constraint: self.segments[constraint][2]
for constraint in constraints}, case)
if all(abs(self.segments[constraint][3] - self.segments[constraint][2]) < 1e-9
for constraint in constraints):
right_middle_results = center_right_results
right_results = center_right_results
else:
right_middle_results = self._do_second_step_test({constraint: (self.segments[constraint][2]
+ self.segments[constraint][3]) / 2
for constraint in constraints}, case)
right_results = self._do_second_step_test({constraint: self.segments[constraint][3]
for constraint in constraints}, case)
self.test_results.append([left_results, left_middle_results, center_left_results, center_results,
center_right_results, right_middle_results, right_results])
def run(self):
self.segments = {}
self.test_results = []
self.times = []
self._do_first_step()
print(self.segments)
self._do_second_step()
print(self.test_results)
print([times[:2] for times in self.times], sep='\n')
def plot(self):
answers = ['right answer', 'undefined', 'wrong answer']
segments_df = pd.DataFrame(columns=['constraint', 'answer', 'percentage'])
for constraint, values in self.segments.items():
if abs(values[0] - values[3]) < 1e-9:
continue
if abs(values[0] - values[1]) < 1e-9:
segments_df = segments_df.append({'constraint': constraint, 'answer': answers[2], 'percentage': 100},
ignore_index=True)
elif abs(values[2] - values[3]) < 1e-9:
segments_df = segments_df.append({'constraint': constraint, 'answer': answers[0], 'percentage': 100},
ignore_index=True)
else:
segment_len = abs(values[3] - values[0]) / 100
segments_df = segments_df.append({'constraint': constraint, 'answer': answers[0],
'percentage': abs(values[1] - values[0]) / segment_len},
ignore_index=True)
segments_df = segments_df.append({'constraint': constraint, 'answer': answers[1],
'percentage': abs(values[2] - values[1]) / segment_len},
ignore_index=True)
segments_df = segments_df.append({'constraint': constraint, 'answer': answers[2],
'percentage': abs(values[3] - values[2]) / segment_len},
ignore_index=True)
colors = {answers[0]: 'green', answers[1]: 'grey', answers[2]: 'red'}
fig = px.bar(segments_df, x='constraint', y='percentage', color='answer', color_discrete_map=colors)
fig.write_html(os.path.join(self.path_to_charts, 'first_step.html'))
parts = ['left', 'center', 'right']
tests_df = pd.DataFrame(columns=['case', 'part', 'percentage'])
for case_idx, values in enumerate(self.test_results):
values = reduce(lambda x, y: x + y, values)
left = sum(values[:4]) * 25
center = sum(values[4:10]) * 100 / 6
right = sum(values[10:]) * 25
case = ', '.join(SECOND_TEST_CONSTRAINTS[case_idx])
tests_df = tests_df.append({'case': case, 'part': parts[0], 'percentage': left}, ignore_index=True)
tests_df = tests_df.append({'case': case, 'part': parts[1], 'percentage': center}, ignore_index=True)
tests_df = tests_df.append({'case': case, 'part': parts[2], 'percentage': right}, ignore_index=True)
fig = px.bar(tests_df, x='case', y='percentage', color='part')
fig.write_html(os.path.join(self.path_to_charts, 'second_step.html'))
|
PrimatElite/ml-labs | src/utils/test_util.py | <filename>src/utils/test_util.py<gh_stars>0
import json
import os
import pytest
from . import load_config, validate_config
@pytest.fixture(scope="function")
def resource_setup():
print("\n Init")
examples_path = "./config_examples"
examples = next(os.walk(examples_path), (None, None, []))[2]
return [{"name": ex, "config": load_config(os.path.join(examples_path, ex))} for ex in examples]
def test_validate_examples(resource_setup):
for config in resource_setup:
# print(json.dumps(config, indent=2))
validate(config)
def validate(obj):
print("\n", obj["name"])
if obj["name"].split("_")[0] == "good":
assert validate_config(obj["config"])
else:
assert not validate_config(obj["config"])
|
PrimatElite/ml-labs | src/placer/solver.py | import numpy as np
from typing import List, Optional, Union
from .objects_search import find_objects
from .polygon_search import find_polygon
from ..common import load_image, Object, pack_objects
class Solver:
objects: List[Object]
def __init__(self):
self.objects = []
def run(self, image: Union[str, np.ndarray], polygon: Optional[np.ndarray] = None) -> bool:
image: np.ndarray = load_image(image)
if polygon is None:
polygon = find_polygon(image)
objects = find_objects(image, self.objects)
return pack_objects(polygon, objects)
|
PrimatElite/ml-labs | src/placer/__init__.py | from .solver import Solver
placer = Solver()
|
PrimatElite/ml-labs | src/utils/__init__.py | import json
import jsonschema
import yaml
def load_config(path: str) -> dict:
with open(path, 'r') as stream:
document = yaml.safe_load(stream)
return document
def validate_config(obj: dict):
scheme_path = './restrictions_schema.json'
with open(scheme_path, 'r') as file:
schema = json.load(file)
jsonschema.validate(instance=obj, schema=schema)
def get_config(path: str) -> dict:
path_to_default_config = './default_config.yaml'
custom_config = load_config(path)
default_config = load_config(path_to_default_config)
validate_config(custom_config)
validate_config(default_config)
default_config.update(custom_config)
return default_config
|
PrimatElite/ml-labs | src/placer/objects_search.py | <filename>src/placer/objects_search.py
import numpy as np
from typing import List
from ..common import Object
def find_objects(image: np.ndarray, objects: List[Object]) -> np.ndarray:
return np.array([obj.convex_hull for obj in objects])
|
PrimatElite/ml-labs | src/common/object_search.py | import cv2
import numpy as np
from imutils import perspective, rotate_bound
from pymatting import estimate_alpha_knn, estimate_foreground_ml, stack_images
from typing import Tuple
PAPER_SIZE = (1485, 1050)
def find_paper(image_bgr: np.ndarray) -> np.ndarray:
image_hsv = cv2.cvtColor(image_bgr, cv2.COLOR_BGR2HSV)
paper_mask = cv2.inRange(image_hsv, (0, 0, 90), (180, 60, 255))
contours, _ = cv2.findContours(paper_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
paper_contour = max(contours, key=cv2.contourArea)
eps = 1
while paper_contour.shape[0] > 4:
paper_contour = cv2.approxPolyDP(paper_contour, eps, True)
eps += 1
paper_contour = np.squeeze(paper_contour)
paper_image_bgr = perspective.four_point_transform(image_bgr, paper_contour)
return cv2.resize(paper_image_bgr, PAPER_SIZE if image_bgr.shape[1] > image_bgr.shape[0] else PAPER_SIZE[::-1])
def get_object_trimap(paper_image_bgr: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
paper_image_gray = cv2.cvtColor(paper_image_bgr, cv2.COLOR_BGR2GRAY)
# Reshaping the image into a 2D array of pixels and 3 color values (RGB)
pixel_vals = paper_image_gray.reshape((-1, 1))
# Convert to float type
pixel_vals = np.float32(pixel_vals)
k = 3
retval, labels, centers = cv2.kmeans(pixel_vals, k, None, None, None, cv2.KMEANS_PP_CENTERS)
# convert data into 8-bit values
centers = np.uint8(centers)
darkest_component_mask = np.uint8(np.ones(paper_image_gray.shape) * 255)
darkest_component_mask[labels.reshape(paper_image_gray.shape) == np.argmin(centers)] = 0
contours, _ = cv2.findContours(darkest_component_mask, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
contours_new = []
border_size = 5
for contour in contours:
if np.min(contour[:, :, 0]) > border_size \
and np.min(contour[:, :, 1]) > border_size \
and np.max(contour[:, :, 0]) < darkest_component_mask.shape[1] - border_size \
and np.max(contour[:, :, 1]) < darkest_component_mask.shape[0] - border_size \
and cv2.contourArea(contour) > 150:
contours_new.append(contour)
convex_hulls = []
for contour_new in contours_new:
convex_hulls.append(cv2.convexHull(contour_new))
convex_hull = cv2.convexHull(np.concatenate(convex_hulls))
mask_by_countour = np.uint8(np.ones(paper_image_gray.shape) * 255)
cv2.drawContours(mask_by_countour, [convex_hull], -1, 0, -1)
eroded_mask_by_countour = cv2.erode(mask_by_countour, (30, 30), iterations=9)
trimap = 255 - eroded_mask_by_countour
trimap[trimap == 255] = 128
trimap[np.logical_and(trimap == 128, labels.reshape(paper_image_gray.shape) == np.argmin(centers))] = 255
return trimap, convex_hull
def find_object(image: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
image_bgr = image
image_bgr = cv2.resize(image_bgr, (1920, 1080) if image_bgr.shape[1] > image_bgr.shape[0] else (1080, 1920))
paper_image_bgr = find_paper(image_bgr)
trimap, convex_hull = get_object_trimap(paper_image_bgr)
paper_image_bgr_scaled = cv2.cvtColor(paper_image_bgr, cv2.COLOR_BGR2RGB) / 255.0
trimap_scaled = trimap / 255.0
# alpha = estimate_alpha_knn(paper_image_bgr_scaled, trimap_scaled)
alpha = np.zeros_like(trimap_scaled)
alpha[trimap_scaled > 0] = 1
return paper_image_bgr, np.squeeze(convex_hull, 1), np.uint8(alpha * 255)
|
mthpower/django-force-logout | setup.py | from setuptools import setup, find_packages
setup(
name='django-force-logout',
url="https://chris-lamb.co.uk/projects/django-force-logout",
version='3.2.0',
description="Framework to be able to forcibly log users out of Django projects",
author="<NAME>",
author_email='<EMAIL>',
license="BSD",
packages=find_packages(),
install_requires=(
'Django>=1.8',
),
)
|
mthpower/django-force-logout | django_force_logout/middleware.py | import time
import django
import datetime
from django.contrib import auth
from django.utils.module_loading import import_string
from . import app_settings
if django.VERSION > (1, 10):
from django.utils.deprecation import MiddlewareMixin
def is_authenticated(user):
return user.is_authenticated
else:
MiddlewareMixin = object
def is_authenticated(user):
return user.is_authenticated()
class ForceLogoutMiddleware(MiddlewareMixin):
SESSION_KEY = 'force-logout:last-login'
def __init__(self, get_response=None):
super(ForceLogoutMiddleware, self).__init__(get_response)
self.fn = app_settings.CALLBACK
if not callable(self.fn):
self.fn = import_string(self.fn)
def callback(sender, user=None, request=None, **kwargs):
if request:
request.session[self.SESSION_KEY] = int(time.time())
auth.signals.user_logged_in.connect(callback, weak=False)
def process_request(self, request):
if not is_authenticated(request.user):
return
user_timestamp = self.fn(request.user)
if user_timestamp is None:
return
try:
timestamp = datetime.datetime.utcfromtimestamp(
request.session[self.SESSION_KEY],
)
except KeyError:
# May not have logged in since we started populating this key.
return
if timestamp > user_timestamp:
return
auth.logout(request)
|
tadeoj/bazel_gwt_2.8.2 | bazel/gwt_application.bzl | def _gwt_war_impl(ctx):
output_war = ctx.outputs.output_war
output_dir = output_war.path + ".gwt_output"
lib_dir = output_dir + "/" + "WEB-INF/lib"
extra_dir = output_war.path + ".extra"
# Find all transitive dependencies
all_deps = _get_dep_jars(ctx)
# Run the GWT compiler
cmd = "%s %s -cp %s com.google.gwt.dev.Compiler -war %s -deploy %s -server :ssl -extra %s %s %s\n" % (
ctx.executable._java.path,
" ".join(ctx.attr.jvm_flags),
":".join([dep.path for dep in all_deps]),
output_dir + "/" + ctx.attr.output_root,
output_dir + "/" + "WEB-INF/deploy",
extra_dir,
" ".join(ctx.attr.compiler_flags),
" ".join(ctx.attr.modules),
)
cmd += "mkdir -p %s\n" % lib_dir
# Copy deps in lib dir
cmd += "cp -LR %s %s\n" % (
" ".join([dep.path for dep in all_deps]),
lib_dir,
)
# Copy pubs into the output war
if len(ctx.files.pubs) > 0:
cmd += "cp -LR %s %s\n" % (
" ".join([pub.path for pub in ctx.files.pubs]),
output_dir,
)
# Don't include the unit cache in the output
cmd += "rm -rf %s/gwt-unitCache\n" % output_dir
# Discover all of the generated files and write their paths to a file. Run the
# paths through sed to trim out everything before the package root so that the
# paths match how they should look in the war file.
cmd += "find %s -type f | sed 's:^%s/::' > file_list\n" % (
output_dir,
output_dir,
)
# Create a war file using the discovered paths
cmd += "root=`pwd`\n"
cmd += "cd %s; $root/%s Cc ../%s @$root/file_list\n" % (
output_dir,
ctx.executable._zip.path,
output_war.basename,
)
cmd += "cd $root\n"
# Execute the command
ctx.actions.run_shell(
inputs = ctx.files.pubs + list(all_deps),
tools = ctx.files._jdk + ctx.files._zip,
outputs = [output_war],
mnemonic = "GwtCompile",
progress_message = "GWT compiling " + output_war.short_path,
command = "set -e\n" + cmd,
)
_gwt_war = rule(
implementation = _gwt_war_impl,
attrs = {
"deps": attr.label_list(allow_files = True),
"pubs": attr.label_list(allow_files = True),
"modules": attr.string_list(mandatory = True),
"output_root": attr.string(default = "."),
"compiler_flags": attr.string_list(),
"jvm_flags": attr.string_list(),
"_java": attr.label(
default = Label("@bazel_tools//tools/jdk:java"),
executable = True,
cfg = "host",
allow_files = True,
),
"_jdk": attr.label(
default = Label("@bazel_tools//tools/jdk:current_java_runtime"),
),
"_zip": attr.label(
default = Label("@bazel_tools//tools/zip:zipper"),
executable = True,
cfg = "host",
allow_single_file = True,
),
},
outputs = {
"output_war": "root.war",
},
)
def _gwt_dev_impl(ctx):
# Find all transitive dependencies that need to go on the classpath
all_deps = _get_dep_jars(ctx)
dep_paths = [dep.short_path for dep in all_deps]
cmd = "#!/bin/bash\n\n"
# Copy pubs to the war directory
cmd += "rm -rf war\nmkdir war\ncp -LR %s war\n" % (
" ".join([pub.path for pub in ctx.files.pubs]),
)
# Set up a working directory for dev mode
cmd += "mkdir -p dev-workdir\n"
# Determine the root directory of the package hierarchy. This needs to be on
# the classpath for GWT to see changes to source files.
cmd += 'echo "Dev mode working directoy is $(pwd)"\n'
cmd += 'javaRoots=("%s")\n' % '" "'.join(ctx.attr.java_roots)
cmd += "srcClasspath=''\n"
cmd += "for root in ${javaRoots[@]}; do\n"
cmd += " rootDir=$(pwd | sed -e 's:\(.*\)%s.*:\\1:')../../../$root\n" % (ctx.attr.package_name)
cmd += " if [ -d $rootDir ]; then\n"
cmd += " srcClasspath+=:$rootDir\n"
cmd += ' echo "Using Java sources rooted at $rootDir"\n'
cmd += " else\n"
cmd += ' echo "No Java sources found under $rootDir"\n'
cmd += " fi\n"
cmd += "done\n"
# Run dev mode
cmd += "%s %s -cp $srcClasspath:%s com.google.gwt.dev.DevMode -war %s -workDir ./dev-workdir %s %s\n" % (
ctx.executable._java.path,
" ".join(ctx.attr.jvm_flags),
":".join(dep_paths),
"war/" + ctx.attr.output_root,
" ".join(ctx.attr.dev_flags),
" ".join(ctx.attr.modules),
)
# Return the script and all dependencies needed to run it
ctx.actions.write(
output = ctx.outputs.executable,
content = cmd,
)
return struct(
executable = ctx.outputs.executable,
runfiles = ctx.runfiles(files = list(all_deps) + ctx.files.pubs + ctx.files._jdk),
)
_gwt_dev = rule(
implementation = _gwt_dev_impl,
attrs = {
"package_name": attr.string(mandatory = True),
"java_roots": attr.string_list(mandatory = True),
"deps": attr.label_list(mandatory = True, allow_files = True),
"modules": attr.string_list(mandatory = True),
"pubs": attr.label_list(allow_files = True),
"output_root": attr.string(default = "."),
"dev_flags": attr.string_list(),
"jvm_flags": attr.string_list(),
"_java": attr.label(
default = Label("@bazel_tools//tools/jdk:java"),
executable = True,
cfg = "host",
allow_files = True,
),
"_jdk": attr.label(
default = Label("@bazel_tools//tools/jdk:jdk"),
),
},
executable = True,
)
def _get_dep_jars(ctx):
all_deps = depset(ctx.files.deps).to_list()
for this_dep in ctx.attr.deps:
if hasattr(this_dep, "java"):
all_deps += this_dep.java.transitive_runtime_deps
all_deps += this_dep.java.transitive_source_jars
return all_deps
def gwt_application(
name,
srcs = [],
resources = [],
modules = [],
pubs = [],
deps = [],
visibility = [],
output_root = ".",
java_roots = ["java", "javatests", "src/main/java", "src/test/java"],
compiler_flags = [],
compiler_jvm_flags = [],
dev_flags = [],
dev_jvm_flags = []):
all_deps = deps + [
"@com_google_gwt_gwt_dev//jar",
"@com_google_gwt_gwt_user//jar",
"@com_google_gwt_gwt_servlet//jar",
"@colt_colt//jar",
"@ant_ant//jar",
"@org_ow2_asm_asm//jar",
"@commons_io_commons_io//jar",
"@com_google_code_gson_gson//jar",
"@javax_servlet_javax_servlet_api//jar",
"@javax_validation_validation_api//jar",
"@javax_validation_api_sources//jar",
"@com_google_jsinterop_jsinterop_annotations//jar",
"@jsinterop_sources//jar",
"@javax_annotation_javax_annotation_api//jar",
"@org_w3c_css_sac//jar",
"@tapestry_tapestry//jar",
"@com_google_code_findbugs_jsr305//jar",
"@com_ibm_icu_icu4j//jar",
"@net_sourceforge_htmlunit_htmlunit//jar",
"@org_eclipse_jetty_jetty_webapp//jar",
"@org_eclipse_jetty_jetty_servlet//jar",
"@org_eclipse_jetty_jetty_server//jar",
"@org_eclipse_jetty_jetty_util//jar",
"@org_eclipse_jetty_jetty_annotations//jar",
"@org_eclipse_jetty_jetty_http//jar",
"@org_eclipse_jetty_jetty_io//jar",
"@org_eclipse_jetty_jetty_jndi//jar",
"@org_eclipse_jetty_jetty_plus//jar",
"@org_eclipse_jetty_jetty_security//jar",
"@org_eclipse_jetty_jetty_servlets//jar",
"@org_eclipse_jetty_jetty_xml//jar",
]
if len(srcs) > 0:
native.java_binary(
name = name + "-deps",
main_class = name,
resources = resources,
srcs = srcs,
deps = all_deps,
)
else:
native.java_binary(
name = name + "-deps",
main_class = name,
resources = resources,
runtime_deps = all_deps,
)
# Create the war and dev mode targets
_gwt_war(
name = name,
output_root = output_root,
pubs = pubs,
deps = [
name + "-deps_deploy.jar",
name + "-deps_deploy-src.jar",
],
modules = modules,
visibility = visibility,
compiler_flags = compiler_flags,
jvm_flags = compiler_jvm_flags,
)
_gwt_dev(
name = name + "-dev",
java_roots = java_roots,
output_root = output_root,
package_name = native.package_name(),
deps = [
name + "-deps_deploy.jar",
name + "-deps_deploy-src.jar",
],
modules = modules,
visibility = visibility,
pubs = pubs,
dev_flags = dev_flags,
jvm_flags = dev_jvm_flags,
)
|
tadeoj/bazel_gwt_2.8.2 | bazel/gwt_2_8_2_deps.bzl | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_jar")
load("@bazel_tools//tools/build_defs/repo:jvm.bzl", "jvm_maven_import_external")
def generated_maven_jars(
omit_commons_io_commons_io = False,
omit_org_apache_commons_commons_io = False,
omit_com_google_code_findbugs_jsr305 = False,
omit_javax_annotation_javax_annotation_api = False,
omit_commons_codec_commons_codec = False,
omit_org_apache_httpcomponents_httpcore = False,
omit_commons_logging_commons_logging = False,
omit_com_google_code_gson_gson = False,
omit_org_apache_httpcomponents_httpclient = False,
omit_org_apache_commons_commons_lang3 = False):
if not omit_commons_io_commons_io:
commons_io_commons_io_native()
if not omit_org_apache_commons_commons_io:
org_apache_commons_commons_io_native()
if not omit_com_google_code_findbugs_jsr305:
com_google_code_findbugs_jsr305_native()
if not omit_javax_annotation_javax_annotation_api:
javax_annotation_javax_annotation_api_native()
if not omit_commons_codec_commons_codec:
commons_codec_commons_codec_native()
if not omit_org_apache_httpcomponents_httpcore:
org_apache_httpcomponents_httpcore_native()
if not omit_commons_logging_commons_logging:
commons_logging_commons_logging_native()
if not omit_com_google_code_gson_gson:
com_google_code_gson_gson_native()
if not omit_org_apache_httpcomponents_httpclient:
org_apache_httpcomponents_httpclient_native()
if not omit_org_apache_commons_commons_lang3:
org_apache_commons_commons_lang3_native()
all_maven_jars()
def commons_io_commons_io_native():
jvm_maven_import_external(
name = "commons_io_commons_io",
artifact = "commons-io:commons-io:2.4",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def org_apache_commons_commons_io_native():
jvm_maven_import_external(
name = "org_apache_commons_commons_io",
artifact = "org.apache.commons:commons-io:1.3.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def com_google_code_findbugs_jsr305_native():
jvm_maven_import_external(
name = "com_google_code_findbugs_jsr305",
artifact = "com.google.code.findbugs:jsr305:1.3.9",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def javax_annotation_javax_annotation_api_native():
jvm_maven_import_external(
name = "javax_annotation_javax_annotation_api",
artifact = "javax.annotation:javax.annotation-api:1.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def commons_codec_commons_codec_native():
jvm_maven_import_external(
name = "commons_codec_commons_codec",
artifact = "commons-codec:commons-codec:1.9",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def org_apache_httpcomponents_httpcore_native():
jvm_maven_import_external(
name = "org_apache_httpcomponents_httpcore",
artifact = "org.apache.httpcomponents:httpcore:4.4.3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def commons_logging_commons_logging_native():
jvm_maven_import_external(
name = "commons_logging_commons_logging",
artifact = "commons-logging:commons-logging:1.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def com_google_code_gson_gson_native():
jvm_maven_import_external(
name = "com_google_code_gson_gson",
artifact = "com.google.code.gson:gson:2.6.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def org_apache_httpcomponents_httpclient_native():
jvm_maven_import_external(
name = "org_apache_httpcomponents_httpclient",
artifact = "org.apache.httpcomponents:httpclient:4.5.1",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def org_apache_commons_commons_lang3_native():
jvm_maven_import_external(
name = "org_apache_commons_commons_lang3",
artifact = "org.apache.commons:commons-lang3:3.4",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
def all_maven_jars():
jvm_maven_import_external(
name = "com_google_gwt_gwt_servlet",
artifact = "com.google.gwt:gwt-servlet:2.8.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["unencumbered"], # No License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_toolchain_jetty_schemas",
artifact = "org.eclipse.jetty.toolchain:jetty-schemas:3.1.M0",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
jvm_maven_import_external(
name = "org_ow2_asm_asm_util",
artifact = "org.ow2.asm:asm-util:5.0.3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # BSD License
)
jvm_maven_import_external(
name = "net_sourceforge_nekohtml_nekohtml",
artifact = "net.sourceforge.nekohtml:nekohtml:1.9.22",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # BSD License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_server",
artifact = "org.eclipse.jetty:jetty-server:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
jvm_maven_import_external(
name = "org_ow2_asm_asm",
artifact = "org.ow2.asm:asm:5.0.3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # BSD License
)
jvm_maven_import_external(
name = "org_mortbay_jasper_apache_el",
artifact = "org.mortbay.jasper:apache-el:8.0.9.M3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0
)
jvm_maven_import_external(
name = "org_w3c_css_sac",
artifact = "org.w3c.css:sac:1.3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # W3C License
)
jvm_maven_import_external(
name = "ant_ant",
artifact = "ant:ant:1.6.5",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "xml_apis_xml_apis",
artifact = "xml-apis:xml-apis:1.3.04",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_plus",
artifact = "org.eclipse.jetty:jetty-plus:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "net_sourceforge_htmlunit_htmlunit_core_js",
artifact = "net.sourceforge.htmlunit:htmlunit-core-js:2.17",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # MPL
)
jvm_maven_import_external(
name = "org_eclipse_jetty_websocket_websocket_api",
artifact = "org.eclipse.jetty.websocket:websocket-api:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "com_google_gwt_gwt_dev",
artifact = "com.google.gwt:gwt-dev:2.8.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["unencumbered"], # No License
)
jvm_maven_import_external(
name = "javax_validation_validation_api",
artifact = "javax.validation:validation-api:1.0.0.GA",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
http_jar(
name = "javax_validation_api_sources",
url = "http://repo1.maven.org/maven2/javax/validation/validation-api/1.0.0.GA/validation-api-1.0.0.GA-sources.jar",
sha256 = "a394d52a9b7fe2bb14f0718d2b3c8308ffe8f37e911956012398d55c9f9f9b54",
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_servlets",
artifact = "org.eclipse.jetty:jetty-servlets:9.2.14.v20151106",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "com_google_jsinterop_jsinterop_annotations",
artifact = "com.google.jsinterop:jsinterop-annotations:1.0.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["unencumbered"], # No License
)
http_jar(
name = "jsinterop_sources",
url = "http://central.maven.org/maven2/com/google/jsinterop/jsinterop-annotations/1.0.2/jsinterop-annotations-1.0.2-sources.jar",
)
jvm_maven_import_external(
name = "xalan_xalan",
artifact = "xalan:xalan:2.7.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "tapestry_tapestry",
artifact = "tapestry:tapestry:4.0.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_servlet",
artifact = "org.eclipse.jetty:jetty-servlet:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_ow2_asm_asm_tree",
artifact = "org.ow2.asm:asm-tree:5.0.3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # BSD
)
jvm_maven_import_external(
name = "com_google_gwt_gwt_user",
artifact = "com.google.gwt:gwt-user:2.8.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["unencumbered"], # No License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_jndi",
artifact = "org.eclipse.jetty:jetty-jndi:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_ow2_asm_asm_commons",
artifact = "org.ow2.asm:asm-commons:5.0.3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_apache_jsp",
artifact = "org.eclipse.jetty:apache-jsp:9.2.14.v20151106",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_apache_httpcomponents_httpmime",
artifact = "org.apache.httpcomponents:httpmime:4.5.1",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_webapp",
artifact = "org.eclipse.jetty:jetty-webapp:9.2.14.v20151106",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_annotations",
artifact = "org.eclipse.jetty:jetty-annotations:9.2.14.v20151106",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_websocket_websocket_common",
artifact = "org.eclipse.jetty.websocket:websocket-common:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "net_sourceforge_htmlunit_htmlunit",
artifact = "net.sourceforge.htmlunit:htmlunit:2.19",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "colt_colt",
artifact = "colt:colt:1.2.0",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "xerces_xercesImpl",
artifact = "xerces:xercesImpl:2.11.0",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "net_sourceforge_cssparser_cssparser",
artifact = "net.sourceforge.cssparser:cssparser:0.9.18",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_io",
artifact = "org.eclipse.jetty:jetty-io:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_mortbay_jasper_apache_jsp",
artifact = "org.mortbay.jasper:apache-jsp:8.0.9.M3",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "commons_collections_commons_collections",
artifact = "commons-collections:commons-collections:3.2.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "com_ibm_icu_icu4j",
artifact = "com.ibm.icu:icu4j:50.1.1",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_util",
artifact = "org.eclipse.jetty:jetty-util:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "com_google_gwt_gwt_codeserver",
artifact = "com.google.gwt:gwt-codeserver:2.8.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "xalan_serializer",
artifact = "xalan:serializer:2.7.2",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_websocket_websocket_client",
artifact = "org.eclipse.jetty.websocket:websocket-client:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_http",
artifact = "org.eclipse.jetty:jetty-http:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_continuation",
artifact = "org.eclipse.jetty:jetty-continuation:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_security",
artifact = "org.eclipse.jetty:jetty-security:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "javax_servlet_javax_servlet_api",
artifact = "javax.servlet:javax.servlet-api:3.1.0",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "org_eclipse_jetty_jetty_xml",
artifact = "org.eclipse.jetty:jetty-xml:9.2.13.v20150730",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
jvm_maven_import_external(
name = "net_sourceforge_nekohtml_nekohtml",
artifact = "net.sourceforge.nekohtml:nekohtml:1.9.22",
server_urls = ["http://central.maven.org/maven2"],
licenses = ["notice"], # Apache 2.0 License
)
|
inyutin/aiopg | tests/test_sa_cursor.py | import pytest
import sqlalchemy as sa
meta = sa.MetaData()
tbl = sa.Table(
'sa_tbl5', meta,
sa.Column('ID', sa.String, primary_key=True, key='id'),
sa.Column('Name', sa.String(255), key='name'),
)
@pytest.fixture
def connect(make_sa_connection, loop):
async def start():
conn = await make_sa_connection()
await conn.execute('DROP TABLE IF EXISTS sa_tbl5')
await conn.execute(
'CREATE TABLE sa_tbl5 ('
'"ID" VARCHAR(255) NOT NULL, '
'"Name" VARCHAR(255), '
'PRIMARY KEY ("ID"))'
)
await conn.execute(
tbl.insert().values(id='test1', name='test_name'))
await conn.execute(
tbl.insert().values(id='test2', name='test_name'))
await conn.execute(
tbl.insert().values(id='test3', name='test_name'))
return conn
return loop.run_until_complete(start())
async def test_insert(connect):
await connect.execute(tbl.insert().values(id='test-4', name='test_name'))
await connect.execute(tbl.insert().values(id='test-5', name='test_name'))
assert 5 == len(await (await connect.execute(tbl.select())).fetchall())
async def test_two_cursor_create_context_manager(make_engine, connect):
engine = await make_engine(maxsize=1)
async with engine.acquire() as conn:
r1 = await conn.execute(tbl.insert().values(id='1', name='test'))
r2 = await conn.execute(tbl.select())
await r2.fetchone()
assert not r2.closed
r3 = await conn.execute(tbl.insert().values(id='3', name='test'))
assert r1.closed
assert r2.closed
assert r3.closed
|
inyutin/aiopg | aiopg/transaction.py | import enum
import uuid
import warnings
from abc import ABC
import psycopg2
from aiopg.utils import _TransactionPointContextManager
__all__ = ('IsolationLevel', 'Transaction')
class IsolationCompiler(ABC):
__slots__ = ('_isolation_level', '_readonly', '_deferrable')
def __init__(self, isolation_level, readonly, deferrable):
self._isolation_level = isolation_level
self._readonly = readonly
self._deferrable = deferrable
@property
def name(self):
return self._isolation_level
def savepoint(self, unique_id):
return f'SAVEPOINT {unique_id}'
def release_savepoint(self, unique_id):
return f'RELEASE SAVEPOINT {unique_id}'
def rollback_savepoint(self, unique_id):
return f'ROLLBACK TO SAVEPOINT {unique_id}'
def commit(self):
return 'COMMIT'
def rollback(self):
return 'ROLLBACK'
def begin(self):
query = 'BEGIN'
if self._isolation_level is not None:
query += (
f' ISOLATION LEVEL {self._isolation_level.upper()}'
)
if self._readonly:
query += ' READ ONLY'
if self._deferrable:
query += ' DEFERRABLE'
return query
def __repr__(self):
return self.name
class ReadCommittedCompiler(IsolationCompiler):
__slots__ = ()
def __init__(self, readonly, deferrable):
super().__init__('Read committed', readonly, deferrable)
class RepeatableReadCompiler(IsolationCompiler):
__slots__ = ()
def __init__(self, readonly, deferrable):
super().__init__('Repeatable read', readonly, deferrable)
class SerializableCompiler(IsolationCompiler):
__slots__ = ()
def __init__(self, readonly, deferrable):
super().__init__('Serializable', readonly, deferrable)
class DefaultCompiler(IsolationCompiler):
__slots__ = ()
def __init__(self, readonly, deferrable):
super().__init__(None, readonly, deferrable)
@property
def name(self):
return 'Default'
class IsolationLevel(enum.Enum):
serializable = SerializableCompiler
repeatable_read = RepeatableReadCompiler
read_committed = ReadCommittedCompiler
default = DefaultCompiler
def __call__(self, readonly, deferrable):
return self.value(readonly, deferrable)
class Transaction:
__slots__ = ('_cursor', '_is_begin', '_isolation', '_unique_id')
def __init__(self, cursor, isolation_level,
readonly=False, deferrable=False):
self._cursor = cursor
self._is_begin = False
self._unique_id = None
self._isolation = isolation_level(readonly, deferrable)
@property
def is_begin(self):
return self._is_begin
async def begin(self):
if self._is_begin:
raise psycopg2.ProgrammingError(
'You are trying to open a new transaction, use the save point')
self._is_begin = True
await self._cursor.execute(self._isolation.begin())
return self
async def commit(self):
self._check_commit_rollback()
await self._cursor.execute(self._isolation.commit())
self._is_begin = False
async def rollback(self):
self._check_commit_rollback()
if not self._cursor.closed:
await self._cursor.execute(self._isolation.rollback())
self._is_begin = False
async def rollback_savepoint(self):
self._check_release_rollback()
if not self._cursor.closed:
await self._cursor.execute(
self._isolation.rollback_savepoint(self._unique_id))
self._unique_id = None
async def release_savepoint(self):
self._check_release_rollback()
await self._cursor.execute(
self._isolation.release_savepoint(self._unique_id))
self._unique_id = None
async def savepoint(self):
self._check_commit_rollback()
if self._unique_id is not None:
raise psycopg2.ProgrammingError('You do not shut down savepoint')
self._unique_id = f's{uuid.uuid1().hex}'
await self._cursor.execute(
self._isolation.savepoint(self._unique_id))
return self
def point(self):
return _TransactionPointContextManager(self.savepoint())
def _check_commit_rollback(self):
if not self._is_begin:
raise psycopg2.ProgrammingError('You are trying to commit '
'the transaction does not open')
def _check_release_rollback(self):
self._check_commit_rollback()
if self._unique_id is None:
raise psycopg2.ProgrammingError('You do not start savepoint')
def __repr__(self):
return (f"<{self.__class__.__name__} "
f"transaction={self._isolation} id={id(self):#x}>")
def __del__(self):
if self._is_begin:
warnings.warn(
f"You have not closed transaction {self!r}",
ResourceWarning)
if self._unique_id is not None:
warnings.warn(
f"You have not closed savepoint {self!r}",
ResourceWarning)
async def __aenter__(self):
return await self.begin()
async def __aexit__(self, exc_type, exc, tb):
if exc_type is not None:
await self.rollback()
else:
await self.commit()
|
DeanDupalov/my_project | grocery_store/grocery_store/cart/views.py | <filename>grocery_store/grocery_store/cart/views.py
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.exceptions import ObjectDoesNotExist
from django.shortcuts import get_object_or_404, redirect, render
from django.views import View
from grocery_store.cart.forms import CheckoutForm
from grocery_store.cart.models import Order, OrderItem
from grocery_store.product.models import Product, Category, DiscountProduct
from grocery_store.profiles.models import Profile, ProfileAddress
class OrderSummaryView(LoginRequiredMixin, View):
def get(self, *args, **kwargs):
try:
user_profile = get_object_or_404(Profile, user=self.request.user)
order = Order.objects.get(user=user_profile, ordered=False)
context = {
'object': order,
'categories': Category.objects.all(),
}
return render(self.request, 'grocery/cart/order_summary.html', context)
except ObjectDoesNotExist:
messages.warning(self.request, "You do not have an active order")
return redirect("landing page")
@login_required()
def add_to_cart(request, pk):
item = Product.objects.get(pk=pk)
user_profile = get_object_or_404(Profile, user=request.user)
order_item, created = OrderItem.objects.get_or_create(
item=item,
user=user_profile,
ordered=False,
)
order_qs = Order.objects.filter(
user=user_profile,
ordered=False,
)
if order_qs.exists():
order = order_qs[0]
if order_item in order.items.all():
order_item.quantity += 1
order_item.save()
messages.info(request, "This product quantity was updated.")
return redirect("order details")
else:
order.items.add(order_item)
messages.info(request, "This product was added to your cart.")
return redirect("order details")
else:
order = Order.objects.create(user=user_profile)
order.items.add(order_item)
return redirect('landing page')
@login_required()
def delete_one_from_cart(request, pk):
item = get_object_or_404(Product, pk=pk)
user_profile = get_object_or_404(Profile, user=request.user)
order_qs = Order.objects.filter(
user=user_profile,
ordered=False,
)
if order_qs.exists():
order = order_qs[0]
order_item = OrderItem.objects.filter(
item=item,
user=user_profile,
ordered=False,
)[0]
if order_item.quantity > 1:
order_item.quantity -= 1
order_item.save()
messages.info(request, "This item quantity was reduced.")
else:
order.items.remove(order_item)
messages.info(request, "This item was removed.")
return redirect("order details")
else:
messages.info(request, "You do not have an active order")
return redirect('list products')
@login_required()
def delete_from_cart(request, pk):
item = get_object_or_404(Product, pk=pk)
user_profile = get_object_or_404(Profile, user=request.user)
order_qs = Order.objects.filter(
user=user_profile,
ordered=False,
)
if order_qs.exists():
order = order_qs[0]
order_item = OrderItem.objects.filter(
item=item,
user=user_profile,
ordered=False,
)[0]
order_item.quantity = 1
order_item.save()
order.items.remove(order_item)
messages.info(request, "This item was removed.")
return redirect("order details")
else:
messages.info(request, "You do not have an active order")
return redirect('list products')
class CheckoutView(View):
def get(self, *args, **kwargs):
profile = Profile.objects.get(pk=self.request.user.id)
try:
order = Order.objects.get(user=profile)
form = CheckoutForm()
context = {
'form': form,
'order': order,
}
shipping_address_qs = ProfileAddress.objects.filter(
profile=profile,
)
if shipping_address_qs.exists():
context.update(
{'default_shipping_address': shipping_address_qs[0]})
return render(self.request, "grocery/cart/checkout.html", context)
except ObjectDoesNotExist:
messages.info(self.request, "You do not have an active order")
return redirect("profile details")
def post(self, *args, **kwargs):
pass |
DeanDupalov/my_project | grocery_store/grocery_store/product/models.py | from django.core.validators import MinValueValidator
from django.db import models
class Category(models.Model):
BAKERY = 'Bakery'
FRUITS = 'Fruits'
VEGETABLES = 'Vegetables'
MEAT = 'Meat'
RICE_AND_PASTA = 'Rice and Pasta'
FISH_AND_SEAFOOD = 'Fish and Seafood'
OILS_VINEGAR_AND_DRIED_HERBS = 'Oils, Vinegar and Dried herbs'
BEER_WINE_AND_SPIRITS = 'Bear, Wine and Spirits'
SOFT_DRINKS = 'Soft drinks'
CRISPS_SNACKS_AND_NUTS = 'Crisps, Snacks and Nuts'
CHOCOLATE_AND_SWEETS = 'Chocolate and Sweets'
UNKNOWN = 'unknown'
CATEGORY_TYPES = (
(BAKERY, 'Bakery'),
(FRUITS, 'Fruits'),
(VEGETABLES, 'Vegetables'),
(MEAT, 'Meat'),
(RICE_AND_PASTA, 'Rice And Pasta'),
(FISH_AND_SEAFOOD, 'Fish and Seafood'),
(OILS_VINEGAR_AND_DRIED_HERBS, 'Oils, Vinegar and Dried Herbs'),
(BEER_WINE_AND_SPIRITS, 'Bear, Wine and Spirits'),
(SOFT_DRINKS, 'Soft Drinks'),
(CRISPS_SNACKS_AND_NUTS, 'Crisps, Snacks and Nuts'),
(CHOCOLATE_AND_SWEETS, 'Chocolate and Sweets'),
(UNKNOWN, 'Unknown'),
)
type = models.CharField(max_length=60, choices=CATEGORY_TYPES, default='UNKNOWN', blank=False)
def __str__(self):
return self.type
class Product(models.Model):
name = models.CharField(max_length=20)
price = models.DecimalField(
max_digits=10,
decimal_places=2,
validators=[MinValueValidator(0.0)],
)
description = models.TextField()
image = models.ImageField(upload_to='product')
category = models.ForeignKey(Category, on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
class DiscountProduct(models.Model):
product = models.OneToOneField(
Product,
on_delete=models.CASCADE,
primary_key=True,
)
price = models.DecimalField(
max_digits=10,
decimal_places=2,
validators=[MinValueValidator(0.0)],
)
def __str__(self):
return self.product.name
|
DeanDupalov/my_project | grocery_store/core/image_file_testing.py | from io import BytesIO
from PIL import Image
from django.core.files import File
def get_image_file(name='test.png', ext='png', size=(50, 50), color=(256, 0, 0)):
file_obj = BytesIO()
image = Image.new("RGB", size=size, color=color)
image.save(file_obj, ext)
file_obj.seek(0)
return File(file_obj, name=name) |
DeanDupalov/my_project | grocery_store/grocery_store/store/forms.py | from django import forms
from core.mixins.bootstrap_form import BootstrapFormMixin
class ContactForm(forms.Form, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
first_name = forms.CharField(max_length=50, required=True)
last_name = forms.CharField(max_length=50, required=True)
subject = forms.CharField(required=True)
email = forms.EmailField(max_length=200, required=True)
message = forms.CharField(widget=forms.Textarea, required=True)
|
DeanDupalov/my_project | grocery_store/tests/store/views/test_index_view.py |
from django.test import TestCase, Client
from django.urls import reverse
from core.image_file_testing import get_image_file
from grocery_store.product.models import Product, Category, DiscountProduct
class IndexViewTests(TestCase):
def setUp(self):
self.test_client = Client()
def test_IndexView_whenProductsLessOrEqual3_shouldRenderCorrectTemplate(self):
product = Product.objects.create(
name='Name',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type='Fruits'),
)
response = self.test_client.get(reverse('landing page'))
self.assertTemplateUsed('grocery/index.html')
products = response.context['products']
products = response.context['categories']
self.assertLessEqual(len(products), 3)
def test_IndexView_whenNoProducts_shouldRenderCorrectTemplateWithNoProducts(self):
response = self.test_client.get(reverse('landing page'))
self.assertTemplateUsed('grocery/index.html')
products = response.context['products']
self.assertEqual(0, len(products))
def test_IndexView_whenDiscountedProducts_shouldRenderCorrectTemplateWithDiscountedProducts(self):
product = Product.objects.create(
name='Name',
price=3,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type='Fruits'),
)
discounted_product = DiscountProduct.objects.create(
product=product,
price=1.5,
)
response = self.test_client.get(reverse('landing page'))
self.assertTemplateUsed('grocery/index.html')
discounted_products = response.context['discounted_products']
self.assertIsNotNone(discounted_products)
def test_IndexView_whenNoDiscountedProducts_shouldRenderCorrectTemplate(self):
product = Product.objects.create(
name='Name',
price=3,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type='Fruits'),
)
response = self.test_client.get(reverse('landing page'))
self.assertTemplateUsed('grocery/index.html')
discounted_products = response.context['discounted_products']
self.assertEqual(0, len(discounted_products))
def test_IndexView_whenCategories_shouldRenderCorrectTemplate(self):
category = Category.objects.create(type='Fruits')
product = Product.objects.create(
name='Name',
price=3,
description='Lorem',
image=get_image_file(),
category=category,
)
response = self.test_client.get(reverse('landing page'))
self.assertTemplateUsed('grocery/index.html')
categories = response.context['categories']
self.assertEqual(1, len(categories))
def test_IndexView_whenNoCategories_shouldRenderCorrectTemplateWithoutCategories(self):
response = self.test_client.get(reverse('landing page'))
self.assertTemplateUsed('grocery/index.html')
categories = response.context['categories']
self.assertEqual(0, len(categories)) |
DeanDupalov/my_project | grocery_store/grocery_store/product/migrations/0001_initial.py | # Generated by Django 3.2.5 on 2021-07-31 12:12
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(choices=[('Bakery', 'Bakery'), ('Fruits', 'Fruits'), ('Vegetables', 'Vegetables'), ('Meat', 'Meat'), ('Rice and Pasta', 'Rice And Pasta'), ('Fish and Seafood', 'Fish and Seafood'), ('Oils, Vinegar and Dried herbs', 'Oils, Vinegar and Dried Herbs'), ('Bear, Wine and Spirits', 'Bear, Wine and Spirits'), ('Soft drinks', 'Soft Drinks'), ('Crisps, Snacks and Nuts', 'Crisps, Snacks and Nuts'), ('Chocolate and Sweets', 'Chocolate and Sweets'), ('unknown', 'Unknown')], default='UNKNOWN', max_length=60)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('price', models.DecimalField(decimal_places=2, max_digits=10, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='price')),
('description', models.TextField()),
('image', models.ImageField(upload_to='product')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='updated')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.category')),
],
),
migrations.CreateModel(
name='Like',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.product')),
],
),
migrations.CreateModel(
name='DiscountProduct',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('price', models.DecimalField(decimal_places=2, max_digits=10, validators=[django.core.validators.MinValueValidator(0.0)])),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='product.product')),
],
),
]
|
DeanDupalov/my_project | grocery_store/tests/profiles/views/test_edit_profile.py | <reponame>DeanDupalov/my_project
from django.test import TestCase, Client
from django.urls import reverse
from grocery_store.grocery_auth.models import GroceryUser
from grocery_store.profiles.models import Profile
class EditProfileView(TestCase):
def setUp(self):
self.test_client = Client()
self.user = GroceryUser.objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
self.test_client.login(email='<EMAIL>', password='<PASSWORD>')
def test_getEditProfile_shouldRenderRightTemplate(self):
response = self.test_client.get(reverse('edit profile'))
self.assertTemplateUsed('grocery/profile/edit-profile.html')
user_form = response.context['user_form']
self.assertIsNotNone(user_form)
profile_form = response.context['profile_form']
self.assertIsNotNone(profile_form)
address_form = response.context['profile_address_form']
self.assertIsNotNone(address_form)
# def test_postEditProfile_whenValidData_shouldRedirectToProfileDetails(self):
# data = {
# 'first_name': 'First',
# 'surname': 'Surname',
# 'street_address': 'test street',
# 'apartment_number': 11,
# 'town': 'Sofia',
# 'country': 'Bulgaria',
# 'zip': '5000'
# }
# response = self.test_client.post(reverse('edit profile'), data=data)
# self.assertRedirects(response, reverse('profile details'))
def test_postEditProfile_whenInvalidData_shouldRenderEditProfileAndContainsErrors(self):
pass |
DeanDupalov/my_project | grocery_store/grocery_store/store/urls.py | <reponame>DeanDupalov/my_project
from django.urls import path
from grocery_store.store.views import IndexView, ListAllProductsView, list_category_products, contact_view
urlpatterns = [
path('', IndexView.as_view(), name='landing page'),
path('list/', ListAllProductsView.as_view(), name='list products'),
path('category_products/<int:pk>', list_category_products, name='list category products'),
path('contact/', contact_view, name='contact'),
]
|
DeanDupalov/my_project | grocery_store/tests/cart/views/test_add_to_cart.py | from django.test import TestCase, Client
from django.urls import reverse
from core.image_file_testing import get_image_file
from grocery_store.cart.models import Order, OrderItem
from grocery_store.grocery_auth.models import GroceryUser
from grocery_store.product.models import Category
from grocery_store.profiles.models import Profile
from tests.grocery_test_utils import GroceryTestUtils
class AddToCartTest(TestCase, GroceryTestUtils):
def setUp(self):
self.client = Client()
self.user = GroceryUser.objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.profile = Profile.objects.get(pk=self.user.pk)
self.product = self.create_product(
name='Name Test',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type=Category.FRUITS),
)
def test_get_whenNoItemInCart_ShouldRedirect(self):
item = self.create_order_item(
user=self.profile,
order=False,
item=self.product,
)
order = Order.objects.create(
user=self.profile,
)
response = self.client.get(reverse('add to cart', kwargs={'pk': order.pk}))
self.assertRedirects(response, reverse('order details'))
|
DeanDupalov/my_project | grocery_store/tests/grocery_test_utils.py | from grocery_store.cart.models import Order
from grocery_store.grocery_auth.models import GroceryUser
from grocery_store.product.models import Product
from grocery_store.profiles.models import Profile
class GroceryTestUtils:
def create_profile(self, **kwargs):
return Profile.objects.create()
def create_product(self, **kwargs):
return Product.objects.create(**kwargs)
def create_order_item(self, **kwargs):
pass
def create_order(self, **kwargs):
return Order.objects.create()
|
DeanDupalov/my_project | grocery_store/grocery_store/product/admin.py | from django.contrib import admin
from grocery_store.product.models import Category, Product, DiscountProduct
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
pass
@admin.register(Product)
class ProductAdmin(admin.ModelAdmin):
pass
@admin.register(DiscountProduct)
class DiscountProductAdmin(admin.ModelAdmin):
pass
|
DeanDupalov/my_project | grocery_store/tests/store/views/test_contac_view.py | from django.test import TestCase, Client
from django.urls import reverse
from grocery_store.grocery_auth.models import GroceryUser
class ContactViewTests(TestCase):
def setUp(self):
self.test_client = Client()
self.user = GroceryUser.objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
self.test_client.login(email='<EMAIL>', password='<PASSWORD>')
def test_getContactView_shouldReturnFormAndCorrectTemplate(self):
response = self.test_client.get(reverse('contact'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('grocery/contact.html')
form = response.context['form']
self.assertIsNotNone(form)
def test_postContactView_whenValidForm_shouldRedirectToLandingPage(self):
first_name = 'Gosho'
last_name = 'Testov'
subject = 'test subject'
email = '<EMAIL>'
message = 'test message'
data = {
'first_name': first_name,
'last_name': last_name,
'subject': subject,
'email': email,
'message': message,
}
response = self.test_client.post(reverse('contact'), data=data)
self.assertRedirects(response, reverse('landing page'))
def test_postContactView_whenInvalidEmail_shouldReturnContactAndErrors(self):
first_name = 'Gosho1'
last_name = 'Testov'
subject = 'test subject'
email = 'test.abv.bg'
message = 'test message'
data = {
'first_name': first_name,
'last_name': last_name,
'subject': subject,
'email': email,
'message': message,
}
response = self.test_client.post(reverse('contact'), data=data)
self.assertTemplateUsed(response, 'grocery/contact.html')
form = response.context['form']
self.assertIsNotNone(form.errors['email'])
|
DeanDupalov/my_project | grocery_store/grocery_store/product/validators.py | from django.core.exceptions import ValidationError
def validate_max_value(max_value):
def internal_validate(value):
if value >= max_value:
raise ValidationError(f'{value} is greater or equal to the {max_value}')
return internal_validate
|
DeanDupalov/my_project | grocery_store/grocery_store/product/views.py | from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import render, redirect
from django.urls import reverse_lazy
from django.views.generic import CreateView
from grocery_store.product.forms import ProductCreateForm, DiscountProductForm
from grocery_store.product.models import Product, Category, DiscountProduct
def product_details(request, pk):
is_discounted = False
discounted_product = DiscountProduct.objects.filter(product_id=pk)
if discounted_product:
product = discounted_product[0]
is_discounted = True
else:
product = Product.objects.get(pk=pk)
context = {
'categories': Category.objects.all(),
'product': product,
'is_discounted': is_discounted,
'can_edit': request.user.has_perm('auth.change_product'),
'can_delete': request.user.has_perm('auth.delete_product'),
}
return render(request, 'grocery/product/product-details.html', context)
class AddProduct(LoginRequiredMixin, CreateView):
model = Product
template_name = 'grocery/product/add-product.html'
success_url = reverse_lazy('list products')
form_class = ProductCreateForm
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['categories'] = Category.objects.all()
return context
@login_required
def edit_product(request, pk):
product = Product.objects.get(pk=pk)
if request.method == 'POST':
form = ProductCreateForm(request.POST, request.FILES, instance=product)
if form.is_valid():
form.save()
return redirect('product details', pk)
else:
form = ProductCreateForm(instance=product)
context = {
'categories': Category.objects.all(),
'form': form,
'product': product,
}
return render(request, 'grocery/product/edit-product.html', context)
@login_required
def delete_products(request, pk):
product = Product.objects.get(pk=pk)
if request.method == 'GET':
context = {
'categories': Category.objects.all(),
'product': product,
}
return render(request, 'grocery/product/delete-product.html', context)
else:
product.delete()
return redirect('list products')
class AddDiscountProduct(LoginRequiredMixin, CreateView):
model = DiscountProduct
form_class = DiscountProductForm
template_name = 'grocery/product/add-discounted_product.html'
success_url = reverse_lazy('landing page')
|
DeanDupalov/my_project | grocery_store/grocery_store/grocery_auth/models.py | <reponame>DeanDupalov/my_project<gh_stars>0
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin, User
from django.db import models
from grocery_store.grocery_auth.manager import GroceryUserManager
class GroceryUser(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
unique=True,
)
is_staff = models.BooleanField(
default=False,
)
date_joined = models.DateTimeField(
auto_now_add=True,
)
USERNAME_FIELD = 'email'
objects = GroceryUserManager()
from .signals import *
|
DeanDupalov/my_project | grocery_store/grocery_store/settings.py |
from pathlib import Path
from os.path import join
# Build paths inside the project like this: BASE_DIR / 'subdir'.
from django.urls import reverse_lazy
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'grocery_store.grocery_auth',
'grocery_store.profiles',
'grocery_store.store',
'grocery_store.product',
'grocery_store.cart',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'grocery_store.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'grocery_store.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'grocery_store_db_new',
'USER': 'postgres',
'PASSWORD': '<PASSWORD>',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
# {
# 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
# },
# {
# 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
# },
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = join(BASE_DIR, 'media')
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
LOGIN_URL = reverse_lazy('sign in')
AUTH_USER_MODEL = 'grocery_auth.GroceryUser'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '<EMAIL>'
EMAIL_HOST_PASSWORD = '<PASSWORD>'
EMAIL_PORT = 587
EMAIL_USE_TLS = True
|
DeanDupalov/my_project | grocery_store/grocery_store/product/urls.py | <reponame>DeanDupalov/my_project<filename>grocery_store/grocery_store/product/urls.py<gh_stars>0
from django.urls import path
from grocery_store.product.views import AddProduct, product_details, edit_product, delete_products, AddDiscountProduct
urlpatterns = [
path('add/', AddProduct.as_view(), name='add product'),
path('details/<int:pk>', product_details, name='product details'),
path('edit/<int:pk>', edit_product, name='edit product'),
path('delete/<int:pk>', delete_products, name='delete product'),
path('add_discount/', AddDiscountProduct.as_view(), name='add discounted product'),
]
|
DeanDupalov/my_project | grocery_store/grocery_store/cart/models.py | from django.db import models
from grocery_store.product.models import Product
from grocery_store.profiles.models import Profile
class OrderItem(models.Model):
user = models.ForeignKey(
Profile,
on_delete=models.CASCADE,
)
ordered = models.BooleanField(default=False)
item = models.ForeignKey(Product, on_delete=models.CASCADE)
date_added = models.DateTimeField(auto_now=True)
date_ordered = models.DateTimeField(null=True)
quantity = models.IntegerField(default=1)
def __str__(self):
return self.item.name
def get_total_item_price(self):
return self.quantity * self.item.price
class Order(models.Model):
user = models.ForeignKey(
Profile,
on_delete=models.CASCADE,
)
items = models.ManyToManyField(OrderItem)
ordered = models.BooleanField(default=False)
def get_cart_items(self):
return self.items.all()
def get_cart_total(self):
result = 0
for order_item in self.items.all():
result += order_item.get_total_item_price()
return result
def __str__(self):
return f"{self.user} - {self.user_id}"
|
DeanDupalov/my_project | grocery_store/grocery_store/profiles/views.py | from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import PasswordChangeView
from django.db import transaction
from django.shortcuts import render, redirect
from grocery_store.grocery_auth.forms import EditUserForm
from grocery_store.profiles.forms import ProfileForm, ProfileAddressForm, DisabledProfileAddressForm
from grocery_store.profiles.models import Profile, ProfileAddress
@login_required
def profile_details(request):
profile = Profile.objects.get(pk=request.user.pk)
profile_address = ProfileAddress.objects.get(pk=profile.pk)
address_form = DisabledProfileAddressForm(instance=profile_address)
context = {
'grocery_user': request.user,
'profile': profile,
'address_form': address_form,
}
return render(request, 'grocery/profile/profile_details.html', context)
@login_required
@transaction.atomic
def edit_profile(request):
profile = Profile.objects.get(pk=request.user.pk)
profile_address = ProfileAddress.objects.get(pk=profile.pk)
if request.method == 'GET':
context = {
'user_form': EditUserForm(instance=request.user),
'profile_form': ProfileForm(instance=profile),
'profile_address_form': ProfileAddressForm(instance=profile_address),
}
return render(request, 'grocery/profile/edit-profile.html', context)
else:
user_form = EditUserForm(request.POST, instance=request.user)
profile_form = ProfileForm(request.POST, instance=profile)
profile_address = ProfileAddressForm(request.POST, instance=profile_address)
if user_form.is_valid() and profile_form.is_valid() and profile_address.is_valid():
user_form.save()
profile_form.save()
profile_address.save()
return redirect('profile details')
context = {
'user_form': user_form,
'profile_form': profile_form,
'profile_address_form': profile_address,
}
return render(request, 'grocery/profile/edit-profile.html', context)
class ChangePasswordView(PasswordChangeView):
def get_form(self, **kwargs):
form = super().get_form(**kwargs)
self.__apply_bootstrap_classes(form)
return form
def __apply_bootstrap_classes(self, form):
for (_, field) in form.fields.items():
field.widget.attrs = {
'class': 'form-control',
}
template_name = 'grocery/profile/change_password.html'
success_url = '/'
|
DeanDupalov/my_project | grocery_store/tests/profiles/views/test_profile_details.py | <gh_stars>0
from django.test import TestCase, Client
from django.urls import reverse
from grocery_store.grocery_auth.models import GroceryUser
class ProfileDetailsView(TestCase):
def setUp(self):
self.test_client = Client()
self.user = GroceryUser.objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
self.test_client.login(email='<EMAIL>', password='<PASSWORD>')
def test_getProfileDetails_shouldRenderProfileDetails(self):
response = self.test_client.get(reverse('profile details'))
self.assertTemplateUsed('grocery/profile/profile_details.html')
form = response.context['address_form']
self.assertIsNotNone(form) |
DeanDupalov/my_project | grocery_store/grocery_store/cart/urls.py | from django.urls import path
from grocery_store.cart.views import add_to_cart, delete_one_from_cart, OrderSummaryView, delete_from_cart, CheckoutView
urlpatterns = [
path('order_detail/', OrderSummaryView.as_view(), name='order details'),
path('checkout/', CheckoutView.as_view(), name='checkout'),
path('add_to_cart/<int:pk>', add_to_cart, name='add to cart'),
path('delete_one_from_car/<int:pk>', delete_one_from_cart, name='delete one item from cart'),
path('delete_from_car/<int:pk>', delete_from_cart, name='delete item from cart'),
] |
DeanDupalov/my_project | grocery_store/tests/cart/views/test_order_summary_view.py | from django.test import TestCase, Client
from django.urls import reverse
from core.image_file_testing import get_image_file
from grocery_store.cart.models import Order, OrderItem
from grocery_store.grocery_auth.models import GroceryUser
from grocery_store.product.models import Category
from grocery_store.profiles.models import Profile
from tests.grocery_test_utils import GroceryTestUtils
class OrderSummaryViewTest(TestCase, GroceryTestUtils):
def setUp(self):
self.client = Client()
self.user = GroceryUser.objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
self.client.login(email='<EMAIL>', password='<PASSWORD>')
self.profile = Profile.objects.get(pk=self.user.pk)
self.product = self.create_product(
name='Name',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type=Category.FRUITS),
)
def test_get__whenHaveOrder__shouldRenderCorrectTemplate(self):
item = self.create_order_item(
user=self.profile,
order=False,
item=self.product,
)
order = Order.objects.create(
user=self.profile,
)
response = self.client.get(reverse('order details'))
self.assertEqual(200, response.status_code)
self.assertTemplateUsed('grocery/cart/order_summary.html')
def test_get__whenDoesNotHaveOrder__shouldRedirectLandingPage(self):
response = self.client.get(reverse('order details'))
self.assertRedirects(response, reverse('landing page'))
|
DeanDupalov/my_project | grocery_store/grocery_store/grocery_auth/apps.py | <filename>grocery_store/grocery_store/grocery_auth/apps.py
from django.apps import AppConfig
class GroceryAuthConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'grocery_store.grocery_auth'
|
DeanDupalov/my_project | grocery_store/grocery_store/store/views.py | from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.mail import send_mail, BadHeaderError
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.views.generic import TemplateView, ListView
from grocery_store.product.models import Product, Category, DiscountProduct
from grocery_store.store.forms import ContactForm
class IndexView(TemplateView):
template_name = 'grocery/index.html'
def get_context_data(self, **kwargs):
products = list(Product.objects.order_by('id'))[-3:]
return {
'categories': Category.objects.all(),
'products': products,
'discounted_products': DiscountProduct.objects.all(),
}
class ListAllProductsView(ListView):
model = Product
context_object_name = 'products'
template_name = 'grocery/items-list.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['categories'] = Category.objects.all()
return context
def list_category_products(request, pk):
category = Category.objects.get(pk=pk)
context = {
'categories': Category.objects.all(),
'products': Product.objects.filter(category__id=category.id)
}
return render(request, 'grocery/items-list.html', context)
@login_required
def contact_view(request):
if request.method == 'GET':
form = ContactForm()
else:
form = ContactForm(request.POST)
if form.is_valid():
subject = form.cleaned_data['subject']
body = {
'first_name': form.cleaned_data['first_name'],
'last_name': form.cleaned_data['last_name'],
'email': form.cleaned_data['email'],
'message': form.cleaned_data['message'],
}
message = "\n".join(body.values())
try:
send_mail(subject, message, '', ['<EMAIL>'])
except BadHeaderError:
return HttpResponse('Invalid header found.')
messages.info(request, "Your message has been sent!")
return redirect('landing page')
context = {
'form': form,
'categories': Category.objects.all(),
}
return render(request, 'grocery/contact.html', context)
|
DeanDupalov/my_project | grocery_store/grocery_store/profiles/forms.py | <reponame>DeanDupalov/my_project<gh_stars>0
from django import forms
from core.mixins.bootstrap_form import BootstrapFormMixin
from grocery_store.profiles.models import Profile, ProfileAddress
class ProfileForm(forms.ModelForm, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
class Meta:
model = Profile
exclude = ('user', 'products')
class ProfileAddressForm(forms.ModelForm, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
class Meta:
model = ProfileAddress
exclude = ('profile',)
class DisabledProfileAddressForm(ProfileAddressForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for (_, field) in self.fields.items():
field.widget.attrs['disabled'] = 'disabled'
|
DeanDupalov/my_project | grocery_store/grocery_store/cart/admin.py | from django.contrib import admin
# Register your models here.
from grocery_store.cart.models import OrderItem, Order
admin.site.register(OrderItem)
admin.site.register(Order) |
DeanDupalov/my_project | grocery_store/grocery_store/product/forms.py | <filename>grocery_store/grocery_store/product/forms.py
from django import forms
from django.core.exceptions import ValidationError
from core.mixins.bootstrap_form import BootstrapFormMixin
from grocery_store.product.models import Product, DiscountProduct
class ProductCreateForm(forms.ModelForm, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
class Meta:
model = Product
fields = '__all__'
class DiscountProductForm(forms.ModelForm, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
class Meta:
model = DiscountProduct
fields = '__all__'
def clean(self):
if self.cleaned_data['price'] >= self.cleaned_data['product'].price:
raise ValidationError('New price must be lower than current price.')
return self.cleaned_data |
DeanDupalov/my_project | grocery_store/tests/product/views/test_product_details.py | <reponame>DeanDupalov/my_project
from django.test import TestCase, Client
from django.urls import reverse
from core.image_file_testing import get_image_file
from grocery_store.product.models import Product, Category
class ProductDetailsView(TestCase):
def setUp(self):
self.test_client = Client()
def test_getProductDetails_shouldRenderCorrectTemplate(self):
product = Product.objects.create(
name='Carrot',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type=Category.FRUITS),
)
response = self.test_client.get(reverse('product details', kwargs={'pk': product.pk}))
self.assertEqual(200, response.status_code)
self.assertTemplateUsed('grocery/profile/product-details.html')
|
DeanDupalov/my_project | grocery_store/grocery_store/profiles/admin.py | from django.contrib import admin
from grocery_store.profiles.models import Profile, ProfileAddress
admin.site.register(Profile)
admin.site.register(ProfileAddress) |
DeanDupalov/my_project | grocery_store/tests/product/views/test_edit_product.py | <gh_stars>0
from django.test import TestCase, Client
from django.urls import reverse
from core.image_file_testing import get_image_file
from grocery_store.grocery_auth.models import GroceryUser
from grocery_store.product.models import Product, Category
class EditProductViewTest(TestCase):
def setUp(self):
self.test_client = Client()
self.user = GroceryUser.objects.create_user(
email='<EMAIL>',
password='<PASSWORD>'
)
self.test_client.login(email='<EMAIL>', password='<PASSWORD>')
def test_getEditProductView_shouldRenderCorrectTemplate(self):
product = Product.objects.create(
name='Name',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type=Category.FRUITS),
)
response = self.test_client.get(reverse('edit product', kwargs={'pk': product.id}))
self.assertTemplateUsed('grocery/product/edit-product.html')
form = response.context['form']
self.assertIsNotNone(form)
self.assertEqual(200, response.status_code)
def test_postEditProductView_whenWrongPriceShouldRenderEditProductAndContainErrors(self):
product = Product.objects.create(
name='Name',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type=Category.FRUITS),
)
data = {
'name': 'Name',
'price': 'wrong Price',
'description': 'Lorem',
'image': get_image_file(),
"category": Category.objects.create(type=Category.FRUITS),
}
response = self.test_client.post(reverse('edit product', kwargs={'pk': product.id}), data=data)
self.assertTemplateUsed('grocery/product/edit-product.html')
form = response.context['form']
self.assertIsNotNone(form.errors['price'])
def test_postEditProductView_whenValidForm_ShouldRedirectProductDetails(self):
product = Product.objects.create(
name='Name',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type=Category.FRUITS),
)
data = {
'name': 'Name',
'price': 3.5,
'description': 'Lorem',
'image': get_image_file(),
"category": 'Fruits',
}
response = self.test_client.post(reverse('edit product', kwargs={'pk': product.id}), data=data)
self.assertRedirects(response, reverse('product details', kwargs={'pk': product.id})) |
DeanDupalov/my_project | grocery_store/grocery_store/profiles/models.py | from django.contrib.auth import get_user_model
from django.core.validators import MinValueValidator
from django.db import models
from grocery_store.product.models import Product
UserModel = get_user_model()
class Profile(models.Model):
first_name = models.CharField(
max_length=30,
)
surname = models.CharField(
max_length=30,
)
user = models.OneToOneField(
UserModel,
on_delete=models.CASCADE,
primary_key=True,
)
products = models.ManyToManyField(Product, blank=True)
def __str__(self):
return self.first_name
class ProfileAddress(models.Model):
profile = models.OneToOneField(
Profile,
on_delete=models.CASCADE,
primary_key=True,
)
street_address = models.CharField(
max_length=100,
blank=True,
)
apartment_number = models.IntegerField(
null=True,
validators=[MinValueValidator(1)],
)
town = models.CharField(
max_length=100,
blank=True,
)
country = models.CharField(
max_length=100,
blank=True,
)
zip = models.CharField(
max_length=100,
blank=True)
def __str__(self):
return f"{self.profile.first_name} {self.profile.first_name}"
class Meta:
verbose_name_plural = 'Addresses'
|
DeanDupalov/my_project | grocery_store/grocery_store/grocery_auth/forms.py | from django import forms
from django.contrib.auth import authenticate, get_user_model
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from core.mixins.bootstrap_form import BootstrapFormMixin
UserModel = get_user_model()
class SignUpForm(UserCreationForm, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
class Meta:
model = UserModel
fields = ('email',)
class SignInForm(forms.Form, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
user = None
email = forms.EmailField()
password = forms.CharField(
widget=forms.PasswordInput(),
)
def clean_password(self):
self.user = authenticate(
email=self.cleaned_data['email'],
password=self.cleaned_data['password'],
)
if not self.user:
raise forms.ValidationError('Email or password are incorrect!')
def save(self):
return self.user
class EditUserForm(UserChangeForm, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
class Meta:
model = UserModel
fields = ('email',)
class InfoUserForm(EditUserForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for (_, field) in self.fields.items():
field.widget.attrs['disabled'] = 'disabled'
|
DeanDupalov/my_project | grocery_store/grocery_store/cart/forms.py | <reponame>DeanDupalov/my_project
from django import forms
from core.mixins.bootstrap_form import BootstrapFormMixin
class CheckoutForm(forms.Form, BootstrapFormMixin):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setup_form()
shipping_address = forms.CharField(required=False)
shipping_country = forms.CharField(required=False)
shipping_zip = forms.CharField(required=False)
billing_address = forms.CharField(required=False)
billing_country = forms.CharField(required=False)
billing_zip = forms.CharField(required=False)
|
DeanDupalov/my_project | grocery_store/grocery_store/cart/migrations/0002_initial.py | <gh_stars>0
# Generated by Django 3.2.5 on 2021-07-31 12:12
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('cart', '0001_initial'),
('profiles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='orderitem',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='profiles.profile'),
),
migrations.AddField(
model_name='order',
name='items',
field=models.ManyToManyField(to='cart.OrderItem'),
),
migrations.AddField(
model_name='order',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='profiles.profile'),
),
]
|
DeanDupalov/my_project | grocery_store/grocery_store/profiles/migrations/0001_initial.py | # Generated by Django 3.2.5 on 2021-07-31 12:12
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('product', '0001_initial'),
('grocery_auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('first_name', models.CharField(max_length=30)),
('surname', models.CharField(max_length=30)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='grocery_auth.groceryuser')),
('products', models.ManyToManyField(blank=True, to='product.Product')),
],
),
migrations.CreateModel(
name='ProfileAddress',
fields=[
('profile', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to='profiles.profile')),
('street_address', models.CharField(blank=True, max_length=100)),
('apartment_number', models.IntegerField(null=True, validators=[django.core.validators.MinValueValidator(1)])),
('town', models.CharField(blank=True, max_length=100)),
('country', models.CharField(blank=True, max_length=100)),
('zip', models.CharField(blank=True, max_length=100)),
('address_type', models.CharField(blank=True, choices=[('B', 'Billing'), ('S', 'Shipping')], max_length=1)),
],
options={
'verbose_name_plural': 'Addresses',
},
),
]
|
DeanDupalov/my_project | grocery_store/grocery_store/grocery_auth/views.py | from django.contrib.auth import login, logout
from django.contrib.auth.decorators import login_required
from django.db import transaction
from django.shortcuts import render, redirect
from django.views.generic import TemplateView
from grocery_store.grocery_auth.forms import SignInForm, SignUpForm
from grocery_store.product.models import Category
from grocery_store.profiles.forms import ProfileForm, ProfileAddressForm
class RegisterView(TemplateView):
template_name = 'grocery/auth/sign_up.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form'] = SignUpForm()
context['profile_form'] = ProfileForm()
context['categories'] = Category.objects.all()
return context
@transaction.atomic
def post(self, request):
form = SignUpForm(request.POST)
profile_form = ProfileForm(request.POST)
if form.is_valid() and profile_form.is_valid():
user = form.save()
profile = profile_form.save(commit=False)
profile.user = user
profile.save()
address = ProfileAddressForm().save(commit=False)
address.profile = profile
address.save()
login(request, user)
return redirect('landing page')
context = {
'categories': Category.objects.all(),
'form': SignUpForm(),
'profile_form': ProfileForm()
}
return render(request, 'grocery/auth/sign_up.html', context)
def sign_in(request):
if request.method == 'POST':
form = SignInForm(request.POST)
if form.is_valid():
user = form.save()
login(request, user)
return redirect('landing page')
else:
form = SignInForm()
context = {
'categories': Category.objects.all(),
'form': form,
}
return render(request, 'grocery/auth/sign_in.html', context)
@login_required
def sign_out(request):
logout(request)
return redirect('landing page')
|
DeanDupalov/my_project | grocery_store/grocery_store/grocery_auth/urls.py | from django.urls import path
from grocery_store.grocery_auth.views import sign_in, sign_out, RegisterView
urlpatterns = [
path('sign-up/', RegisterView.as_view(), name='sign up'),
path('sign-in/', sign_in, name='sign in'),
path('sign-out/', sign_out, name='sign out'),
] |
DeanDupalov/my_project | grocery_store/tests/store/views/test_list_all_products_view.py | from django.test import TestCase, Client
from django.urls import reverse
from core.image_file_testing import get_image_file
from grocery_store.product.models import Product, Category
class ListAllProductsViewTest(TestCase):
def setUp(self):
self.test_client = Client()
def test_ListAllProductsView_whenProducts_shouldRenderCorrectTemplateWhitProducts(self):
product = Product.objects.create(
name='Carrot',
price=1,
description='Lorem',
image=get_image_file(),
category=Category.objects.create(type='Vegetables'),
)
response = self.test_client.get(reverse('list products'))
self.assertTemplateUsed('grocery/items-list.html')
products = response.context['products']
self.assertLessEqual(len(products), 1)
def test_ListAllProductsView_whenNoProducts_shouldRenderCorrectTemplateWhitNoProducts(self):
response = self.test_client.get(reverse('list products'))
self.assertTemplateUsed('grocery/items-list.html')
products = response.context['products']
self.assertEqual(0, len(products))
def test_ListAllProductsView_whenCategories_shouldRenderCorrectTemplate(self):
category = Category.objects.create(type='Fruits')
product = Product.objects.create(
name='Name',
price=3,
description='Lorem',
image=get_image_file(),
category=category,
)
response = self.test_client.get(reverse('list products'))
self.assertTemplateUsed('grocery/items-list.html')
categories = response.context['categories']
self.assertEqual(1, len(categories))
def test_ListAllProductsView_whenNoCategories_shouldRenderCorrectTemplateWithoutCategories(self):
response = self.test_client.get(reverse('list products'))
self.assertTemplateUsed('grocery/items-list.html')
categories = response.context['categories']
self.assertEqual(0, len(categories))
|
DeanDupalov/my_project | grocery_store/grocery_store/urls.py | <filename>grocery_store/grocery_store/urls.py
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('auth/', include('grocery_store.grocery_auth.urls')),
path('', include('grocery_store.store.urls')),
path('product/', include('grocery_store.product.urls')),
path('profiles/', include('grocery_store.profiles.urls')),
path('cart/', include('grocery_store.cart.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
DeanDupalov/my_project | grocery_store/grocery_store/profiles/urls.py | <reponame>DeanDupalov/my_project
from django.urls import path
from grocery_store.profiles.views import edit_profile, profile_details, ChangePasswordView
urlpatterns = [
path('profile_details/', profile_details, name='profile details'),
path('edit_profile/', edit_profile, name='edit profile'),
path('change_password', ChangePasswordView.as_view(), name='change password'),
]
|
kevingessner/python | kubernetes/client/models/v1beta1_rule_with_operations.py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.9.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1RuleWithOperations(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_groups': 'list[str]',
'api_versions': 'list[str]',
'operations': 'list[str]',
'resources': 'list[str]'
}
attribute_map = {
'api_groups': 'apiGroups',
'api_versions': 'apiVersions',
'operations': 'operations',
'resources': 'resources'
}
def __init__(self, api_groups=None, api_versions=None, operations=None, resources=None):
"""
V1beta1RuleWithOperations - a model defined in Swagger
"""
self._api_groups = None
self._api_versions = None
self._operations = None
self._resources = None
self.discriminator = None
if api_groups is not None:
self.api_groups = api_groups
if api_versions is not None:
self.api_versions = api_versions
if operations is not None:
self.operations = operations
if resources is not None:
self.resources = resources
@property
def api_groups(self):
"""
Gets the api_groups of this V1beta1RuleWithOperations.
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required.
:return: The api_groups of this V1beta1RuleWithOperations.
:rtype: list[str]
"""
return self._api_groups
@api_groups.setter
def api_groups(self, api_groups):
"""
Sets the api_groups of this V1beta1RuleWithOperations.
APIGroups is the API groups the resources belong to. '*' is all groups. If '*' is present, the length of the slice must be one. Required.
:param api_groups: The api_groups of this V1beta1RuleWithOperations.
:type: list[str]
"""
self._api_groups = api_groups
@property
def api_versions(self):
"""
Gets the api_versions of this V1beta1RuleWithOperations.
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required.
:return: The api_versions of this V1beta1RuleWithOperations.
:rtype: list[str]
"""
return self._api_versions
@api_versions.setter
def api_versions(self, api_versions):
"""
Sets the api_versions of this V1beta1RuleWithOperations.
APIVersions is the API versions the resources belong to. '*' is all versions. If '*' is present, the length of the slice must be one. Required.
:param api_versions: The api_versions of this V1beta1RuleWithOperations.
:type: list[str]
"""
self._api_versions = api_versions
@property
def operations(self):
"""
Gets the operations of this V1beta1RuleWithOperations.
Operations is the operations the admission hook cares about - CREATE, UPDATE, or * for all operations. If '*' is present, the length of the slice must be one. Required.
:return: The operations of this V1beta1RuleWithOperations.
:rtype: list[str]
"""
return self._operations
@operations.setter
def operations(self, operations):
"""
Sets the operations of this V1beta1RuleWithOperations.
Operations is the operations the admission hook cares about - CREATE, UPDATE, or * for all operations. If '*' is present, the length of the slice must be one. Required.
:param operations: The operations of this V1beta1RuleWithOperations.
:type: list[str]
"""
self._operations = operations
@property
def resources(self):
"""
Gets the resources of this V1beta1RuleWithOperations.
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required.
:return: The resources of this V1beta1RuleWithOperations.
:rtype: list[str]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""
Sets the resources of this V1beta1RuleWithOperations.
Resources is a list of resources this rule applies to. For example: 'pods' means pods. 'pods/log' means the log subresource of pods. '*' means all resources, but not subresources. 'pods/*' means all subresources of pods. '*/scale' means all scale subresources. '*/*' means all resources and their subresources. If wildcard is present, the validation rule will ensure resources do not overlap with each other. Depending on the enclosing object, subresources might not be allowed. Required.
:param resources: The resources of this V1beta1RuleWithOperations.
:type: list[str]
"""
self._resources = resources
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1RuleWithOperations):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
windowdong11/mit-ocw-pdf-downloader | main.py | <gh_stars>0
"""
input Example
https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-042j-mathematics-for-computer-science-spring-2015/lecture-slides/
"""
import requests
from bs4 import BeautifulSoup
import os.path
import re
print("Target URL should be start with 'https://ocw.mit.edu'.")
print("Example Input : https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-042j-mathematics-for-computer-science-spring-2015/lecture-slides/")
target_url = input("Input target URL : ")
base_url = "https://ocw.mit.edu"
if target_url.startswith(base_url):
url = target_url
response = requests.get(url)
if response.status_code == 200:
html = response.text
soup = BeautifulSoup(html, 'html.parser')
rows = soup.find(id="course_inner_section").find_all('tr')
for tr in rows:
if not (tr.find('th') or tr.find('strong')):
# content 1 : unit
# content 5 : pdf files link & name
# unit-filenumber-pdf name
# print(tr.contents[1])
lectureNumber = tr.contents[1].string
files = tr.contents[5].find_all('a')
# Slice with "(PDF", leave only filename
print(tr.contents[1].string)
filesCount = 1
for file in files:
# Remove *, |, /, :, ?, <, >, \
# To follow file naming rule in windows
regex = re.compile(r"[*|/:?<>\\\\]")
fileName = file.string.split(" (PDF")[0]
fileName = regex.sub('', fileName)
fileName = lectureNumber + '.' + str(filesCount) + '-' + fileName + '.pdf'
fileUrl = base_url + file['href']
print(fileName)
if not os.path.isfile(fileName):
# Download and write to file, if file not exist
response = requests.get(fileUrl)
with open(fileName, 'wb') as fd:
for chunk in response.iter_content(2048):
fd.write(chunk)
filesCount += 1
else:
print(response.status_code)
print("Please retry. Or send me issue.")
else:
print("Target URL : ", target_url)
print("Target URL does not starts with ", base_url)
|
exoji2e/Hashcode-2021-qual-practice-20210218 | solvers/solve_example.py | import argparse
import random
from collections import *
from dataparser import parse
# inp is an input file as a single string
# return your output as a string
def solve(inp, args):
# TODO: Solve the problem
random.seed(args['seed'])
ns = parse(inp)
return '''2
2 1 4
3 0 2 3'''
|
exoji2e/Hashcode-2021-qual-practice-20210218 | score.py | from dataparser import *
from collections import *
# inp: the input file as a single string
# out: the answer file produced by your solver, as a single string
# return the score of the output as an integer
def score(inp, out):
ns = parse(inp)
itr = (line for line in out.split('\n'))
D = ni(itr)
assert 1 <= D <= ns.T2 + ns.T3 + ns.T4
used = set()
cnt = Counter()
tot_score = 0
for d in range(D):
L = nl(itr)
no = L[0]
pizza_id = L[1:]
assert no == len(pizza_id)
assert 2 <= no <= 4
cnt[no] += 1
ingred = set()
for p_id in pizza_id:
assert p_id not in used
used.add(p_id)
p_ing = ns.pizzas[p_id]['ingredients']
ingred |= set(p_ing)
sc = len(ingred)**2
tot_score += sc
assert cnt[2] <= ns.T2
assert cnt[3] <= ns.T3
assert cnt[4] <= ns.T4
return tot_score
|
exoji2e/Hashcode-2021-qual-practice-20210218 | sum_score.py | #!/user/bin/env pypy3
import glob
from util import path, score2str
try:
import json
j = json.loads(open('max.json', 'r').read())
except:
j = {}
S = 0
for name in sorted(j.keys()):
v = j[name]['score']
f = j[name]['folder']
pys = glob.glob('{}/*.py'.format(f))
sol_name = ''
if pys:
sol_name = ' '.join(path(pyf).name for pyf in pys)
print('{:25}: {:20} {:20} {}'.format(name, score2str(v), sol_name, f))
S += v
print('{:25}: {:20}'.format('Total', score2str(S)))
|
exoji2e/Hashcode-2021-qual-practice-20210218 | solvers/solve.py | <reponame>exoji2e/Hashcode-2021-qual-practice-20210218<gh_stars>0
import argparse
import random
from collections import *
from dataparser import parse
# inp is an input file as a single string
# return your output as a string
def solve(inp, args):
# TODO: Solve the problem
random.seed(args['seed'])
ns = parse(inp)
pizzas_left = set()
for i in range(ns.M):
pizzas_left.add(i)
def pick(no):
picked = []
if len(pizzas_left) < no: return None
for _ in range(no):
p_id = pizzas_left.pop()
picked.append(p_id)
return picked
teams = []
for _ in range(ns.T4):
picked = pick(4)
if picked == None: break
teams.append(' '.join(map(str, [4] + picked)))
for _ in range(ns.T3):
picked = pick(3)
if picked == None: break
teams.append(' '.join(map(str, [3] + picked)))
for _ in range(ns.T2):
picked = pick(2)
if picked == None: break
teams.append(' '.join(map(str, [2] + picked)))
out = [str(len(teams))] + teams
return '\n'.join(out)
|
bclau/trening | testing/gigi.py | print "Buna! Sung gigi! Sunt petrolier! <NAME>'!"
print "Gigi este momentan conflictuat despre ce sa faca..."
print "Azi sunt mai vorbaret decat deobicei! N-am ce face, se termina sesiunea..."
|
timoschick/form-context-model | fcm/my_log.py | <gh_stars>10-100
import logging
names = set()
def __setup_custom_logger(name: str) -> logging.Logger:
formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(module)s - %(message)s')
names.add(name)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger = logging.getLogger(name)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
return logger
def get_logger(name: str) -> logging.Logger:
if name in names:
return logging.getLogger(name)
else:
return __setup_custom_logger(name)
|
timoschick/form-context-model | fcm/test/test_ngram_builder.py | <reponame>timoschick/form-context-model
import os
import unittest
import numpy as np
import numpy.testing as npt
import my_log
from ngram_builder import NGramBuilder, UNK_TOKEN
logger = my_log.get_logger('root')
class TestNGramBuilder(unittest.TestCase):
def test_ngram_builder(self):
vocab_file = os.path.join(os.path.dirname(__file__), 'data', 'vocab.txt')
ngram_builder = NGramBuilder(vocab_file, ngram_threshold=4, nmin=3, nmax=3)
self.assertEqual(len(ngram_builder.id2ngram), 6)
self.assertEqual(set(ngram_builder.get_ngram_features('ngrax').ngrams), {'<S>ng', "ngr", "gra", "rax", "ax<S>"})
self.assertEqual(set([ngram_builder.id2ngram[x] for x in ngram_builder.get_ngram_features('ngrax').ngram_ids]),
{'<S>ng', "ngr", "gra", UNK_TOKEN})
self.assertEqual(len(ngram_builder.get_ngram_features('ngrax').ngram_ids), 5)
ngram_builder = NGramBuilder(vocab_file, ngram_threshold=3, nmin=3, nmax=3)
self.assertEquals(len(ngram_builder.id2ngram), 8)
self.assertTrue("rd<S>" in ngram_builder.id2ngram)
features_a = ngram_builder.get_ngram_features('sword')
features_b = ngram_builder.get_ngram_features('ngramngramngram')
features_c = ngram_builder.get_ngram_features('rd')
# features_a: UNK UNK UNK ord rdS PAD PAD PAD ... PAD
# features_b: Sng ngr gra ram UNK UNK ngr gra ... UNK
# features_c: UNK rdS PAD PAD PAD PAD PAD PAD ... PAD
batched_features = ngram_builder.batchify([features_a, features_b, features_c])
npt.assert_array_equal(batched_features.ngram_ids, np.array([[0, 0, 0, 6, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 3, 4, 5, 0, 0, 3, 4, 5, 0, 0, 3, 4, 5, 0],
[0, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]))
npt.assert_array_equal(batched_features.ngrams_length, np.array([5, 15, 2]))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.