text stringlengths 8 6.05M |
|---|
/Users/matthewpeterson/anaconda3/lib/python3.7/posixpath.py |
'''
A sequence of number is called arithmetic if it consists of at least three elements and if the difference between any two consecutive elements is the same.
https://leetcode.com/problems/arithmetic-slices/#/description
错了2次,用的是动态规划的思路,但是最后超时,是因为没有加缓存吗?
Time Limit Exceeded
'''
class SolutionA(object):
def numberOfArithmeticSlices(self, A):
"""
:type A: List[int]
:rtype: int
"""
def judge(s):
if len(s) < 3:
return False
equal = 0
len_S = len(s)
for i in range(1, len_S):
if i == 1:
equal = s[i] - s[0]
continue
if s[i] - s[i - 1] == equal:
continue
else:
return False
return True
count = [0]
def find_slices(S):
if len(S) <= 3:
if judge(S):
count[0] += 1
return
for j in range(len(S) - 3, -1, -1):
if judge(S[j:]):
count[0] += 1
find_slices(S[:-1])
find_slices(A)
return count[0]
'''
Runtime: 38 ms
Your runtime beats 63.96 % of python submissions.
'''
class SolutionB(object):
def numberOfArithmeticSlices(self, A):
"""
:type A: List[int]
:rtype: int
"""
# 我去,典型的动态规划,我居然没有想到从头往尾来考虑,只是想着从尾到头递归了
cache = {
0: 0,
1: 0
}
i = 1
for j in range(2, len(A)):
if A[j] - A[j - 1] == A[j - 1] - A[j - 2]:
cache[j] = cache[j - 1] + i
i += 1
else:
cache[j] = cache[j - 1]
i = 1
return cache[len(A) - 1] if (len(A) - 1) in cache else 0
|
from methods import ask_user
print(ask_user())
|
# Assignment 5 - Kevin Nolan
# - CS 4720 -
#
# This program sends a request to a url provided by the user
# and receives either a 200 (OK) response from the server or
# an error message 404. It then prints and saves the response to an output file.
#
import requests
userRequest = input("Enter Website: ") # website from user input
userOutput = input("Enter output file: ") # output file from user
# object set to hold HTTP request results
webReq = requests.get(userRequest)
# used to raise exception for a bad HTTP response code 404
webReq.raise_for_status()
# variable to hold response code from server
pageStatus = webReq.status_code
print(pageStatus) # print code to confirm value
# checks to see if code is 200/success, if so then it will execute the following print out
if pageStatus == 200:
print("HTTP Request 200 - OK status!") # prints that url is valid
responseText = webReq.text # saves text from response
# Prints out the HTTP response to the console for the user
print("******** HTTP Response Output **********")
print(responseText)
print("******** End of HTTP Response **********")
# Saves response text to output file as requested by user
outputFile = open(userOutput, "w")
outputFile.write(responseText)
outputFile.close()
print("HTTP Response saved as requested to: " + userOutput)
# Close the connection to the server
webReq.close()
|
from employee import Employee
'''
emp_1 = Employee(1,"Sunny","M.Tech",56000,"CS")
emp_2 = Employee(2,"Bunny","M.Tech",56000,"IS")
emp_1.show_info()
emp_2.show_info()
emp_1.increment_salary(2000)
emp_1.show_info()
emp_2.show_info()
'''
lst_emp=[]
def load_emp():
with open("empdata.txt") as f:
fdata = f.readlines()
for data in fdata:
edata = data.strip("\n").split(",")
empno = int(edata[0])
ename = edata[1]
qualification = edata[2]
salary = int(edata[3])
dept_name = edata[4]
emp = Employee(empno, ename, qualification, salary, dept_name)
lst_emp.append(emp)
print(f"Total Employees count : {len(lst_emp)}")
def showDeptNames():
dnames = set(map(lambda emp:emp.dept_name, lst_emp))
for name in dnames:
print(name)
def showAllQualifications():
qualifications = set(map(lambda emp:emp.qualification, lst_emp))
for qual in qualifications:
print(qual)
def maxSalaryEmp():
sal = max(list(map(lambda emp:emp.salary, lst_emp)))
lst = list(filter(lambda x : x.salary == sal,lst_emp))
for emp in lst:
emp.show_info()
def showEmpCountByDeptName():
pass
def showTotalSalByDeptName():
pass
def showEmpCountByQual():
pass
load_emp()
print("All Dept name")
showDeptNames()
print("All qualifications")
showAllQualifications()
print("Max Salary Employees")
maxSalaryEmp() |
"""
Скрипт для rsynca двух каталогов с рекурсивным обходом.
Не добавляет то что уже есть в каталоге назначения.
Для работы с путями используется MagicPath
"""
import os
from dirsync import sync
import MagicPath
source_dir = "/home/user/dir/git"
dest_dir = "/home/user/git"
class Prog:
@classmethod
def run(cls):
sourcedir = MagicPath.FilePath(os.path.abspath(source_dir))
targetdir = MagicPath.FilePath(os.path.abspath(dest_dir))
listdirs = sourcedir.ls_dir()
print(listdirs.__len__())
sourcedirs = []
for dirr in listdirs:
print(dirr)
if not dirr.basename() == "_iso" and not dirr.basename()[0] == "$" and not dirr.basename()[0] == ".":
sourcedirs.append(dirr)
print(sourcedirs.__len__())
for source in sourcedirs:
if source.ls().__len__() < 1:
print(source.basename())
print("Not file")
continue
if targetdir.addpath(source.basename()).is_dir():
print(source.basename())
print("Is already in dest")
continue
print(source.path())
sync(source.path(), targetdir.addpath(source.basename()).path(), 'sync', create=True)
if __name__ == "__main__":
Prog.run()
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.core.validators import RegexValidator
class CustomUser(AbstractUser):
first_name = models.CharField(max_length=20, null=False, blank=False)
last_name = models.CharField(max_length=20, null=False, blank=False)
GENDER_CHOICES = (
(0, 'male'),
(1, 'female'),
(2, 'not specified'),
)
gender = models.IntegerField(choices=GENDER_CHOICES, null=True, blank=True)
email = models.EmailField(max_length=150, blank=False, unique=True)
phone_regex = RegexValidator(regex=r'^\+?1?\d{9,15}$', message="Phone number must be entered in the format: '+919999999999'. Up to 15 digits allowed.")
phone_number = models.CharField(validators=[phone_regex], max_length=17,null=True, blank=True, unique=True)
is_active = models.BooleanField(default=True, blank=True)
address = models.CharField(max_length=100, null=True, blank=True)
birth_date = models.DateField(null=False, blank=False)
avatar = models.ImageField(upload_to='avatars/', null=True, blank=True)
session_token = models.CharField(max_length=10, default='0')
username = None
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['first_name', 'last_name', 'birth_date'] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch.nn as nn
from core.circuit import Circuit
from core.controllers.lstm_controller import LSTMController as Controller
from core.accessors.static_accessor import StaticAccessor as Accessor
class NTMCircuit(Circuit):
def __init__(self, args):
super(NTMCircuit, self).__init__(args)
# functional components
self.controller = Controller(self.controller_params)
self.accessor = Accessor(self.accessor_params)
# build model
self.hid_to_out = nn.Linear(
self.hidden_dim + self.read_vec_dim, self.output_dim
)
self._reset()
def _init_weights(self):
pass
|
n=int(input())
l=[]
for i in range(n):
l.append(int(input()))
small=min(l)
ans=[num-small for num in l]
for t in ans:
print(t)
|
class Solution(object):
def longestCommonPrefix(self, strs):
"""
:type strs: List[str]
:rtype: str
"""
if len(strs) == 0: return ""
prefix = strs[0]
for s in strs:
if s.startswith(prefix):
continue
elif s == '':
prefix = ''
else:
out = -1;
for i in range(len(prefix)):
if i >= len(s):
break
elif s[i] == prefix[i]:
out = i;
else:
break
prefix = prefix[0:out+1]
return prefix
print(Solution().longestCommonPrefix(['abb','abbc'])[-1:-2])
|
from peewee import MySQLDatabase, PrimaryKeyField, Model, DateTimeField
from datetime import datetime
from config import *
database = MySQLDatabase(host = DATABASE["host"],
port = DATABASE["port"],
user = DATABASE["user"],
password = DATABASE["password"],
database = DATABASE["database"])
class BaseModel(Model):
id = PrimaryKeyField(unique=True)
created_at = DateTimeField(default=datetime.now().strftime("%Y/%m/%d %H:%M:%S"))
updated_at = DateTimeField(default=datetime.now().strftime("%Y/%m/%d %H:%M:%S"))
def save(self, *args, **kwargs):
self.update_at = datetime.now()
super(BaseModel, self).save()
def to_hash(self, model, data):
''' Returns a hash of the BaseModel in the database '''
data['id'] = self.id
data['created_at'] = self.created_at
data['updated_at'] = self.updated_at
return data
class Meta:
database = database
order_by = ("id", )
|
from django.urls import path
from django.contrib.auth import views as auth_views
from .views import index, city_view, add_new_place, place_view, all_places
urlpatterns = [
path('login/', auth_views.LoginView.as_view(), name='login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('', index, name='index'),
path('places/all/', all_places, name='all_places'),
path('<str:city>/', city_view, name='city_view'),
path('<str:city>/new/', add_new_place, name='new_place'),
path('<str:city>/<str:place>/', place_view, name='place_view'),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.3 on 2016-09-05 08:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('elections', '0013_vote_last_modification'),
]
operations = [
migrations.RemoveField(
model_name='vote',
name='last_modification',
),
]
|
#refer to
#http://docs.opencv.org/3.0-beta/doc/py_tutorials/py_gui/py_video_display/py_video_display.html
#2016-4-26
import numpy as np
import cv2
cap = cv2.VideoCapture(0)
while (True):
ret, frame = cap.read()
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
|
import numpy as np
def mean_absolute_percentage_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
x = np.mean(np.abs((y_true - y_pred) / y_true)) * 100
return x
def symetrique_mean_absolute_percentage_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
x = np.mean(np.abs((y_true - y_pred) / (y_pred + y_true))) * 200
return x
def mean_absolute_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
x = np.mean(np.abs(y_true - y_pred))
return x
# In[] code
from pandas import datetime
from matplotlib import pyplot as plt
import plotly.plotly as py
import plotly.graph_objs as go
import pandas as pd
import numpy as np
def parser(x):
return datetime.strptime(x, '%d/%m/%Y %H:%M:%S')
df = pd.read_excel('/Users/bounouamustapha/Desktop/data/Passage.xlsx', parse_dates=[2, 3], date_parser=parser)
data = df.drop(columns=['NUM_SEJOUR', 'CODE', 'DATE_SORTIE', 'CCMU', 'GEMSA'], axis=1)
data.index = data['DATE_ARRIVEE']
del data['DATE_ARRIVEE']
data['nb'] = 1
hourly = data.resample('H').sum()
daily = data.resample('D').sum()
import matplotlib.pyplot as plt
# In[] code
def showByHour(x, y):
data_hour = hourly[x:y]
data = [
go.Scatter(
x=data_hour.index,
y=data_hour
)
]
py.plot(data)
def getInfo(x, y):
data_hour = hourly[x:y]
mean = hourly.resample('Y').mean()
print("max: " + str(data_hour.max()))
print("min: " + str(data_hour.min()))
print("moyenne: " + str(mean))
print("variance: " + str(np.var(hourly['nb'])))
np.var
# In[
def showPerHourWeek(x, y):
from pandas import DataFrame
from datetime import timedelta
data_hour_week = hourly[x:y]
data_hour_week['day_of_week'] = data_hour_week.index.weekday_name
start = data_hour_week.index[0]
lines = []
for i in range(0, 7):
end = start + timedelta(days=1)
f = data_hour_week[start:end]
f['nb'].index = range(0, 25)
dmd = DataFrame(index=f.index.hour, data=f['nb'])
dmd = dmd.head(24)
line = go.Scatter(
x=np.arange(25),
y=dmd['nb'],
mode='lines',
name=str(i)
)
lines.append(line)
start = end
py.plot(lines)
return
# In[]
def getPerDayMonth(x, y):
data_week = daily[x:y]
data_week['day_of_week'] = data_week.index.weekday_name
z = 0
lines = []
for i in range(0, len(data_week) - len(data_week) % 7):
if i % 7 == 0:
z += 1
dd = data_week[i:i + 6]
# dd = dd.groupby('day_of_week')['nb'].mean()
line = go.Scatter(
x=dd.index,
y=dd,
mode='lines',
name=str(i)
)
lines.append(line)
py.plot(lines)
return
# In[]
def getMeanByWeek(x, y):
data_week = hourly[x:y]
wed = data_week.groupby([data_week.index.weekday_name, data_week.index.hour])['nb'].mean()
lines = []
for idate in wed.index.get_level_values(level=0).unique():
line = go.Scatter(
x=np.arange(0, 25),
y=np.array(wed[idate]),
mode='lines',
name=idate
)
lines.append(line)
py.plot(lines)
return
# In[] code
def getMeanByWeek(x, y):
data_week = hourly[x:y]
wed = data_week.groupby([data_week.index.weekday_name, data_week.index.hour])['nb'].mean()
lines = []
for idate in wed.index.get_level_values(level=0).unique():
line = go.Scatter(
x=np.arange(0, 25),
y=np.array(wed[idate]),
mode='lines',
name=idate
)
lines.append(line)
py.plot(lines)
return
# In[
def gethistoday(x, y):
data_week = daily[x:y]
wed = data_week.groupby([data_week.index.weekday_name])['nb'].mean()
print(wed)
line = go.Scatter(
x=np.array(wed.index.get_level_values(level=0).unique()),
y=np.array(wed),
mode='lines',
)
py.plot([line], filename='basic-bar')
return
# In[]:
def decompose(x, y):
from plotly.plotly import plot_mpl
from statsmodels.tsa.seasonal import seasonal_decompose
h = hourly[x:y]
result = seasonal_decompose(h, model='additive')
fig = result.plot()
plot_mpl(fig)
# In[] code
def arima(data, testdate, horizon, nbjourtest, seasonal, seasonality):
from pyramid.arima import auto_arima
from pyramid.arima import ARIMA
from datetime import timedelta
test_date_time = datetime.strptime(testdate, '%d/%m/%Y')
end_test = test_date_time + timedelta(hours=horizon - 1)
end_train = test_date_time - timedelta(1)
start_train = test_date_time - timedelta(hours=nbjourtest)
train = data[start_train:end_train]
test = data[test_date_time:end_test]
print('training set :' + str(start_train) + ' au ' + str(end_train))
print('test set :' + str(test_date_time) + ' au ' + str(end_test))
arima_model = auto_arima(train, seasonal=True, m=24, error_action='ignore', trace=1, stepwise=True)
# arima_model = auto_arima(train,seasonal=seasonal, m=seasonality,error_action='ignore')
prevision = arima_model.predict(horizon)
precision = mean_absolute_error(test, prevision)
print(arima_model.summary())
print('-----------------------------------------------------------------------------')
print('--------Mae : --------' + str(precision) + '--------------------------------')
x = hourly[start_train:end_test]
return prevision
plt.plot(x.index, x)
plt.plot(test.index, prevision)
plt.legend(['observation', 'prevision'])
plt.title('La prevision sur un horizon de :' + str(horizon))
plt.show()
# In[] test
def acf(y,lag):
import statsmodels.tsa.api as smt
smt.graphics.plot_acf(y, lags=lag)
plt.show()
# In[] test
def pacf(y,lag):
import statsmodels.tsa.api as smt
smt.graphics.plot_pacf(y, lags=lag)
plt.show()
# In[] test
def seasonal_diff(y,lag):
import statsmodels.tsa.api as smt
y_diff = y - y.shift(24)
smt.graphics.plot_acf(y_diff, lags=lag)
tsplot(y_diff[24:], lag)
return y_diff[24:]
# In[] test
# In[] test
def adf(y):
import statsmodels.api as sm
import statsmodels.tsa.api as smt
y = y - y.shift(1)
y = y[24:]
result=sm.tsa.stattools.adfuller(y)
print('ADF Statistic: %f' % result[0])
print('p-value: %f' % result[1])
print('Critical Values:')
for key, value in result[4].items():
print('\t%s: %.3f' % (key, value))
return result
# In[] test
def tsplot(y, lags=None, figsize=(12, 7), style='bmh'):
import statsmodels.api as sm
import statsmodels.tsa.api as smt
"""
Plot time series, its ACF and PACF, calculate Dickey–Fuller test
y - timeseries
lags - how many lags to include in ACF, PACF calculation
"""
if not isinstance(y, pd.Series):
y = pd.Series(y)
with plt.style.context(style):
fig = plt.figure(figsize=figsize)
layout = (2, 2)
ts_ax = plt.subplot2grid(layout, (0, 0), colspan=2)
acf_ax = plt.subplot2grid(layout, (1, 0))
pacf_ax = plt.subplot2grid(layout, (1, 1))
y.plot(ax=ts_ax)
p_value = sm.tsa.stattools.adfuller(y)[1]
ts_ax.set_title('Time Series Analysis Plots\n Dickey-Fuller: p={0:.5f}'.format(p_value))
smt.graphics.plot_acf(y, lags=lags, ax=acf_ax)
smt.graphics.plot_pacf(y, lags=lags, ax=pacf_ax)
plt.tight_layout()
plt.show()
# In[
def sarima(data,p,d,q,P,D,Q,s):
import statsmodels.api as sm
model = sm.tsa.statespace.SARIMAX(data, order=(p, d, q),
seasonal_order=
(P, D, Q, s)).fit(disp=-1)
print(model.summary())
# In[
h = hourly['1/1/2018':'31/1/2018']
sarima(h, 0, 0, 3, 0, 1, 2, 24)
|
from collections import defaultdict
class node:
def __init__(self,begin,target,weight):
self.b=begin
self.t=target
self.w=weight
class Graph():
def __init__(self, vertices):
self.V = vertices
self.graph = []
self.graph_matrix = [[0 for column in range(vertices)]
for row in range(vertices)]
def addEdge(self,u,v,w):
new=node(u,v,w)
self.graph.append(new)
def Dijkstra(self, s):
result=dict()
e=str(s)
result[e]=0
check=self.graph[s]
quiz=[None]*len(check)
for i in range(len(check)):
quiz[i]=i+1
quiz[s]=0
need=[]
need.append(s)
while len(need)<(len(check)):
for t in range(len(quiz)):
if quiz[t]!=0 and check[t]!=0:
z=t
break
else:continue
for i in range(len(check)):
if quiz[i]!=0:
if check[i]!=0 and check[z]>check[i] and z not in need:
z=i
need.append(z)
y=str(z)
result[y]=check[z]
check1=[]
quiz[z]=0
for n in range(len(self.graph[z])):
if self.graph[z][n]!=0:
p=result[y]+self.graph[z][n]
check1.append(p)
else:
check1.append(0)
for m in range(len(check)):
if check[m]==0:
check[m]=check1[m]
elif check1[m]!=0 and check1[m]<check[m]:
check[m]=check1[m]
else:check[m]=check[m]
for m in range(len(check)-1):
if quiz[m]==0:
check[m]=0
want=dict()
for i in range(len(self.graph[0])):
l=str(i)
want[l]=result[l]
return want
def Kruskal(self):
result=dict()
g=self.graph
arr=self.quicksort(g)
if len(arr)==1:
one=arr[0]
f=str(one.b)+'-'+str(one.t)
result[f]=one.w
return result
an=[]
for n in arr:
if n.b not in an:
an.append(n.b)
if n.t not in an:
an.append(n.t)
an=sorted(an)
for n in range(len(an)):
a=[an[n]]
an[n]=a
one=arr[0]
two=arr[1]
f=str(one.b)+'-'+str(one.t)
s=str(two.b)+'-'+str(two.t)
result[f]=one.w
result[s]=two.w
i=2
an[one.b].append(one.t)
an.remove(an[one.t])
for n in range(len(an)):
if two.b in an[n]:
x=n
if two.t in an[n]:
y=n
if x!=y:
an[x].extend(an[y])
an.remove(an[y])
while len(an)!=1:
for n in range(len(an)):
if arr[i].b in an[n]:
x=n
if arr[i].t in an[n]:
y=n
if x!=y:
an[x].extend(an[y])
an.remove(an[y])
t=str(arr[i].b)+'-'+str(arr[i].t)
result[t]=arr[i].w
else:
i+=1
return result
def quicksort(self,data):
if len(data)<2:
return data
else:
mid=data[len(data)-1]
left=[]
right=[]
data.remove(mid)
for a in data:
if a.w>=mid.w:
right.append(a)
else:
left.append(a)
return self.quicksort(left)+[mid]+self.quicksort(right)
"""
https://www.cnblogs.com/skywang12345/p/3711496.html
https://blog.csdn.net/sm20170867238/article/details/89988982
https://github.com/yanjiyue/sac/blob/master/HW1/quicksort.ipynb
https://blog.csdn.net/aivenzhong/article/details/93648557
https://www.bilibili.com/video/av79186816?from=search&seid=17195856858988781128
https://www.bilibili.com/video/av36884622/?spm_id_from=333.788.videocard.5
https://www.bilibili.com/video/av36885495/?spm_id_from=333.788.videocard.2
https://www.bilibili.com/video/av36884375?from=search&seid=718402740995726549
https://www.bilibili.com/video/av26403085?from=search&seid=718402740995726549
https://baike.baidu.com/item/最短路径/6334920?fr=aladdin
https://blog.csdn.net/yalishadaa/article/details/55827681
https://baike.baidu.com/item/最小生成树/5223845?fr=aladdin
https://www.jianshu.com/p/4377fa388ab9
"""
|
#!/usr/bin/env python
# Funtion:
# Filename:
import socketserver
import json, hashlib, os
from conf import settings
class MyTCPHandlers(socketserver.BaseRequestHandler):
def handle(self):
self.authentication() # 用户认证
while True:
try:
self.data = self.request.recv(1024)
pass
except ConnectionResetError as e:
print('error:', e)
break
def authentication(self): # 用户登陆函数
self.user_data = self.request.recv(1024)
if self.user_data == b'': # 表明已经断开了连接
print('123')
print(self.user_data)
User_data = self.user_data.decode()
username = User_data['user_name']
user_info_dir = settings.BASE_DIR+'/db'+'/user_info'
def run():
print("server is running...")
server = socketserver.ThreadingTCPServer((settings.HOST, settings.PORT), MyTCPHandlers)
server.serve_forever()
server.server_close() |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0021_auto_20150803_2332'),
]
operations = [
migrations.AlterField(
model_name='member',
name='member_type',
field=models.CharField(blank=True, max_length=3, null=True, choices=[(b'STD', b'Student'), (b'EMP', b'Employee'), (b'STF', b'Staff')]),
preserve_default=True,
),
]
|
from base import BaseTestCase
from app.models.place import Place
from app.models.state import State
from app.models.city import City
from app.models.user import User
from fixtures import *
import unittest # for unittest.skip()
class PlaceTestCase(BaseTestCase):
table = [User, State, City, Place]
path = '/places'
example = fixt_places[0]
def test_cities(self):
last_state, last_city = self.create_states_and_cities()
self.check(last_state.name, fixt_states[-1]['name'])
self.check(last_city.name, fixt_cities[-1]['name'])
city = City.get(City.id == 3, City.state == 1)
self.check(fixt_cities[2]['name'], city.name)
def test_users(self):
last_user = self.create_users()
self.check(last_user.email, fixt_users[-1]['email'])
def test_create(self):
self.create_states_and_cities()
self.create_users()
count = 1
for place in fixt_places:
# It should create a place with sequential ids.
last_place = self.create_row(place, '201 CREATED')
self.check(last_place.id, count)
count += 1
# It should return bad request with bad data.
last_place = self.create_row(fixt_place_br, '400 BAD REQUEST')
self.check(last_place.id, 3)
# It should return code 10003 when trying to create place with duplicated name
#last_place = self.create_row(fixt_dupl_place, 10003)
#self.check(last_place.id, 3)
def test_list(self):
self.create_states_and_cities()
self.create_users()
self.check_list()
def test_get(self):
self.create_states_and_cities()
self.create_users()
self.check_get('Place')
def test_delete(self):
self.create_states_and_cities()
self.create_users()
self.check_delete('Place')
def test_update(self):
self.create_states_and_cities()
self.create_users()
upd_data = {'name':'Hilton', 'number_bathrooms':2, 'max_guest': 10}
self.check_update(upd_data)
|
from django.urls import path
from . import views
app_name = 'micro'
urlpatterns = [
path('', views.index, name='index'),
path('send/', views.send, name='send'),
] |
import numpy as np
def anisodiff1(img, niter=10, kappa=20, option=1):
"""
Anisotropic diffusion.
Usage:
imgout = anisodiff(im, niter, kappa, gamma, option)
Arguments:
img - input image
niter - number of iterations
kappa - conduction coefficient 20-100 ?
option - 1 sigmoidFormula
2 tanhFormula
Returns:
imgout - diffused image.
"""
img = img.astype('float32')
imgout = img.copy()
shiftedY = np.zeros_like(imgout)
shiftedX = np.zeros_like(imgout)
for i in range(niter):
dY, dX = forwardDifferenceGradient(imgout)
if option == 1:
cY, cX = sigmoidFormula(dY, dX, kappa)
elif option == 2:
cY, cX = tanhFormula(dY, dX, kappa)
shiftedY[:] = cY
shiftedX[:] = cX
shiftedY[1:,:] -= cY[:-1,:]
shiftedX[:,1:] -= cX[:,:-1]
imgout += 0.25*(shiftedY+shiftedX)
return imgout
def forwardDifferenceGradient(img):
diffY = np.zeros_like(img)
diffX = np.zeros_like(img)
diffY[:-1, :] = np.diff(img, axis = 0)
diffX[:, :-1] = np.diff(img, axis = 1)
return diffY, diffX
def sigmoidFormula(gradientY, gradientX, k):
cGradientY = np.exp(-(gradientY/k) **2.)
cGradientX = np.exp(-(gradientX/k) **2.)
YVal = cGradientY * gradientY
XVal = cGradientX * gradientX
return YVal, XVal
def tanhFormula(gradientY, gradientX, k):
cGradientY = 1./(1. + ((gradientY/k)**2))
cGradientX = 1./(1. + ((gradientX/k)**2))
YVal = cGradientY * gradientY
XVal = cGradientX * gradientX
return YVal, XVal |
import numpy as np
import cv2
import matplotlib.pyplot as plt
import scipy.io as sio
def phase_correlation(src_1, src_2):
rows, cols = src_1.shape
src_ph = cv2.copyMakeBorder(src_1, rows, rows, cols, cols, cv2.BORDER_CONSTANT, value=0)
dst_ph = cv2.copyMakeBorder(src_2, rows, rows, cols, cols, cv2.BORDER_CONSTANT, value=0)
src_1_pc = np.float32(src_ph)
src_2_pc = np.float32(dst_ph)
[x0, y0], _ = cv2.phaseCorrelate(src_1_pc, src_2_pc)
if abs(x0) > rows or abs(y0) > cols:
return np.nan, np.nan
else:
return x0, y0
def high_pass_filter(ht, wd):
eta = np.cos(np.pi * np.linspace(-0.5, 0.5, num=ht)).reshape(1, ht)
neta = np.cos(np.pi * np.linspace(-0.5, 0.5, num=wd)).reshape(1, wd)
X = np.dot(eta.T, neta)
H = (1.0 - X) * (2.0 - X)
return H
# def phase_correlation(src_1, src_2):
# rows, cols = src_1.shape
# x0 = 0
# y0 = 0
# # Fourier Transfer
# f1 = np.fft.fft2(src_1)
# f2 = np.fft.fft2(src_2)
#
# # f1_shift = np.fft.fftshift(f1)
# # f2_shift = np.fft.fftshift(f2)
#
# R = f1 * np.conj(f2) / np.absolute(f1 * np.conj(f2))
#
# # r_shift = np.fft.ifftshift(R)
# r = np.fft.ifft2(R)
#
# x0, y0 = np.unravel_index(r.argmax(), r.shape)
#
# x0 = rows - x0
# y0 = cols - y0
#
# # if x0 >
#
#
# return x0, y0
img = cv2.imread('lena.png', 0)
rows, cols = img.shape
#########################################################
# translation testing
tx = 105.
ty = -115.
M = np.float32([[1, 0, tx],
[0, 1, ty]])
dst = cv2.warpAffine(img, M, (cols, rows))
x0, y0 = phase_correlation(src_1=img, src_2=dst)
ret = np.array([x0, y0])
print(np.round(ret))
# cv2.namedWindow('img', flags=cv2.WINDOW_NORMAL)
# cv2.namedWindow('dst', flags=cv2.WINDOW_NORMAL)
# cv2.imshow('img', img)
# cv2.imshow('dst', dst)
# cv2.waitKey(0)
# #########################################################
# # rotation and scale testing
# theta = 91
# scale = 1
# M = cv2.getRotationMatrix2D((cols/2, rows/2), theta, scale)
# dst = cv2.warpAffine(dst, M, (cols, rows))
#
# plt.subplot(321),plt.imshow(img, cmap='gray')
# plt.title('Input Image 1'), plt.xticks([]), plt.yticks([])
# plt.subplot(322),plt.imshow(dst, cmap='gray')
# plt.title('Input Image 2'), plt.xticks([]), plt.yticks([])
#
# ###########################
# # Fourier Transform
# dft_src = np.fft.fft2(img)
# dft_src_shift = np.fft.fftshift(dft_src)
#
# dft_dst = np.fft.fft2(dst)
# dft_dst_shift = np.fft.fftshift(dft_dst)
#
# ###########################
# # Compute maginitude and transfer to polar coordinate
# magnitude_spectrum_src = np.abs(dft_src_shift)
# magnitude_spectrum_dst = np.abs(dft_dst_shift)
#
# # plt.subplot(323),plt.imshow(np.log(magnitude_spectrum_src), cmap='gray')
# # plt.title('Magnitude Spectrum Image 1'), plt.xticks([]), plt.yticks([])
# # plt.subplot(324),plt.imshow(np.log(magnitude_spectrum_dst), cmap='gray')
# # plt.title('Magnitude Spectrum Image 2'), plt.xticks([]), plt.yticks([])
#
# # High pass filter
# magnitude_spectrum_src_hp = high_pass_filter(rows, cols) * magnitude_spectrum_src
# magnitude_spectrum_dst_hp = high_pass_filter(rows, cols) * magnitude_spectrum_dst
#
# plt.subplot(323),plt.imshow(magnitude_spectrum_src_hp, cmap='gray')
# plt.title('Magnitude Spectrum Image 1'), plt.xticks([]), plt.yticks([])
# plt.subplot(324),plt.imshow(magnitude_spectrum_dst_hp, cmap='gray')
# plt.title('Magnitude Spectrum Image 2'), plt.xticks([]), plt.yticks([])
#
# # change to log-polar
# polar_magnitude_spectrum_src = cv2.logPolar(magnitude_spectrum_src_hp, (rows/2, cols/2), 1, cv2.INTER_LINEAR)
# polar_magnitude_spectrum_dst = cv2.logPolar(magnitude_spectrum_dst_hp, (rows/2, cols/2), 1, cv2.INTER_LINEAR)
#
# ###########################
# # phase correlation
# scale0, theta0= phase_correlation(polar_magnitude_spectrum_src, polar_magnitude_spectrum_dst)
#
# # t_f_src = np.fft.fft2(polar_magnitude_spectrum_src)
# # t_f_dst = np.fft.fft2(polar_magnitude_spectrum_dst)
# #
# # a1 = np.angle(t_f_src)
# # a2 = np.angle(t_f_dst)
# #
# # theta_cross = np.exp(1j * (a1 - a2))
# # theta_phase = np.real(np.fft.ifft2(theta_cross))
# #
# # theta0, scale0 = np.unravel_index(theta_phase.argmax(), theta_phase.shape)
#
# DPP = 360 / rows
#
# theta0 = DPP * (theta0 - 1)
#
# scale0 = np.exp(scale0)
#
# print(theta0, scale0)
#
# plt.subplot(325),plt.imshow(polar_magnitude_spectrum_src, cmap='gray')
# plt.title('Log Polar Magnitude Spectrum Image 1'), plt.xticks([]), plt.yticks([])
# plt.subplot(326),plt.imshow(polar_magnitude_spectrum_dst, cmap='gray')
# plt.title('Log Polar Magnitude Spectrum Image 2'), plt.xticks([]), plt.yticks([])
# # plt.show()
#########################################################
# rotation and scale testing
theta = 45
scale = 0.5
M = cv2.getRotationMatrix2D((cols/2, rows/2), theta, scale)
dst = cv2.warpAffine(dst, M, (cols, rows))
plt.figure()
plt.subplot(321), plt.imshow(img, cmap='gray')
plt.title('Input Image 1'), plt.xticks([]), plt.yticks([])
plt.subplot(322), plt.imshow(dst, cmap='gray')
plt.title('Input Image 2'), plt.xticks([]), plt.yticks([])
###########################
# Fourier Transform
dft_src = cv2.dft(np.float32(img), flags=cv2.DFT_COMPLEX_OUTPUT)
dft_src_shift = np.fft.fftshift(dft_src)
dft_dst = cv2.dft(np.float32(dst), flags=cv2.DFT_COMPLEX_OUTPUT)
dft_dst_shift = np.fft.fftshift(dft_dst)
###########################
# Compute maginitude and transfer to polar coordinate
magnitude_spectrum_src = cv2.magnitude(dft_src_shift[:, :, 0], dft_src_shift[:, :, 1])
magnitude_spectrum_dst = cv2.magnitude(dft_dst_shift[:, :, 0], dft_dst_shift[:, :, 1])
plt.subplot(323), plt.imshow(np.log(magnitude_spectrum_src), cmap='gray')
plt.title('Magnitude Spectrum Image 1'), plt.xticks([]), plt.yticks([])
plt.subplot(324), plt.imshow(np.log(magnitude_spectrum_dst), cmap='gray')
plt.title('Magnitude Spectrum Image 2'), plt.xticks([]), plt.yticks([])
# High pass filter
magnitude_spectrum_src_hp = high_pass_filter(rows, cols) * magnitude_spectrum_src
magnitude_spectrum_dst_hp = high_pass_filter(rows, cols) * magnitude_spectrum_dst
plt.subplot(325), plt.imshow(magnitude_spectrum_src_hp, cmap='gray')
plt.title('Magnitude Spectrum Image 1'), plt.xticks([]), plt.yticks([])
plt.subplot(326), plt.imshow(magnitude_spectrum_dst_hp, cmap='gray')
plt.title('Magnitude Spectrum Image 2'), plt.xticks([]), plt.yticks([])
# change to log-polar
polar_src = cv2.logPolar(magnitude_spectrum_src_hp, (rows/2, cols/2), 1, cv2.INTER_LINEAR)
polar_dst = cv2.logPolar(magnitude_spectrum_dst_hp, (rows/2, cols/2), 1, cv2.INTER_LINEAR)
###########################
# phase correlation
# scale0, theta0 = phase_correlation(polar_src, polar_dst)
# phase correlation
theta_f1 = np.fft.fft2(polar_src)
theta_f2 = np.fft.fft2(polar_dst)
a1 = np.angle(theta_f1)
a2 = np.angle(theta_f2)
theta_cross = np.exp(1j * (a1 - a2))
theta_phase = np.real(np.fft.ifft2(theta_cross))
theta0, scale0 = np.unravel_index(theta_phase.argmax(), theta_phase.shape)
# DPP = 360 / cols
#
# theta0 = DPP * theta0
#
# scale0 = np.exp(scale0)
print(scale0, theta0)
# plt.figure()
# plt.subplot(121),plt.imshow(polar_src.T, cmap='gray')
# plt.title('Log Polar Magnitude Spectrum Image 1'), plt.xticks([]), plt.yticks([])
# plt.subplot(122),plt.imshow(polar_dst.T, cmap='gray')
# plt.title('Log Polar Magnitude Spectrum Image 2'), plt.xticks([]), plt.yticks([])
plt.show()
cv2.imwrite('lena.png', img)
cv2.imwrite('lena_t.png', dst)
###########################################################################################
# img = sio.loadmat('pc_1.mat')
# img = img['L1']
# rows, cols = img.shape
#
# dst = sio.loadmat('pc_2.mat')
# dst = dst['L2']
#
# plt.figure()
# plt.subplot(121),plt.imshow(img, cmap='gray')
# plt.title('Log Polar Magnitude Spectrum Image 1'), plt.xticks([]), plt.yticks([])
# plt.subplot(122),plt.imshow(dst, cmap='gray')
# plt.title('Log Polar Magnitude Spectrum Image 2'), plt.xticks([]), plt.yticks([])
#
# # phase correlation
# theta_f1 = np.fft.fft2(img)
# theta_f2 = np.fft.fft2(dst)
#
# a1 = np.angle(theta_f1)
# a2 = np.angle(theta_f2)
#
# theta_cross = np.exp(1j * (a1 - a2))
# theta_phase = np.real(np.fft.ifft2(theta_cross))
#
# x0, y0 = np.unravel_index(theta_phase.argmax(), theta_phase.shape)
#
# ret = np.array([x0, y0])
# print(ret)
# print(np.round(ret))
# plt.show()
|
import logging
import absl.logging
import os
import click
import importlib
@click.command()
@click.option('--runner', default="run-ppo", help='Choose runner to start')
@click.option('--working_dir', default="./__WORKING_DIRS__/__STANDARD__/", help='Path to working directory')
@click.option('--config', default="", help="Name of config to load. Leave empty for standard variables in cfg class")
def run(runner, working_dir, config):
r = importlib.import_module('runners.' + runner)
r.main(working_dir, config)
if __name__ == '__main__':
logging.root.removeHandler(absl.logging._absl_handler)
absl.logging._warn_preinit_stderr = False
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
run()
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 27 13:03:23 2020
@author: rahul
Here we will use a large spacy model to calculate document vectors and use it along with XGBoost for classification of twitter texts
"""
import pandas as pd
import re
from gensim.parsing import remove_stopwords
import spacy
from sklearn.model_selection import train_test_split
from sklearn.metrics import f1_score, roc_auc_score
import numpy as np
from xgboost.sklearn import XGBClassifier
def clean_data(text):
text = re.sub('@[\w]*', '', text) # remove @user
text = re.sub('&','',text) # remove &
text = re.sub('[?!.;:,,#@-]', '', text) # remove special characters
text = re.sub(r'[^\x00-\x7F]+', '', text) # remove Unicode characters
text = text.replace("[^A-Za-z#]", "") # Replace everything except alphabets and hash
text = text.lower() # make everything lowercase for uniformity
# removing short words which are of length 3 or lower(eg. hmm, oh) since they dont add any value
text = " ".join(w for w in text.split() if len(w)>3)
# removing stop-words eg. 'we', 'our', 'ours', 'ourselves', 'just', 'don', "don't", 'should'
text = remove_stopwords(text)
return text
# ************************* read training data *******************************
df = pd.read_csv('.//data//train_tweets.csv')
print(df.head())
df.drop('id', axis=1, inplace=True)
df.drop_duplicates()
print(df.isna().sum())
tweets = df['tweet']
labels = df['label']
# clean the texts
tweets = tweets.apply(lambda x : clean_data(x))
# Since the data is highly skewed
SCALE_FACTOR = labels.value_counts()[0] / labels.value_counts()[1]
# we load a large model becuase we need vectors
nlp = spacy.load("en_core_web_lg")
# ********************** compute the vectors *********************************
# Disabling other pipes because we don't need them and it'll speed up this part a bit
with nlp.disable_pipes():
docs = list(nlp.pipe(tweets))
doc_vectors = np.array([doc.vector for doc in docs])
print("doc vectors shape=", doc_vectors.shape)
X_train, X_test, y_train, y_test = train_test_split(doc_vectors, labels, test_size=0.3, random_state=1, stratify=labels)
xgb_model=None
xgb_model = XGBClassifier(
n_estimators=100,
scale_pos_weight=SCALE_FACTOR,
objective='binary:logistic',
colsample=0.9,
colsample_bytree=0.5,
eta=0.1,
max_depth=8,
min_child_weight=6,
subsample=0.9)
print("Training xgb model....")
xgb_model.fit(X_train, y_train)
print("score=", xgb_model.score)
preds = xgb_model.predict(X_test)
print("f1 score=", f1_score(preds, y_test))
# we get an f1 score of arroud 0.64
print("ROC AUC score = ", roc_auc_score(y_test, preds)) |
# -*- coding: utf-8 -*-
__version__ = "1.2.4"
import os
import platform
from selenium import webdriver
import time
from unidecode import unidecode
import urllib2
import httplib
import json
import sys
import speech_recognition as sr
import audioop
import urllib
from update import update
from pydub import AudioSegment
from bs4 import BeautifulSoup
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.firefox.options import Options
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from os import path
_author_ = "Sayan Bhowmik and lefela4(Felix)"
fo = open('cred')
fo_ = open('voclist')
r = sr.Recognizer()
html = ''
skipAudio = 0
tries = 0
chrome_options = webdriver.ChromeOptions()
chrome_options.add_experimental_option(
"excludeSwitches", ["ignore-certificate-errors"])
check_driver_version = 1
driver = webdriver.Chrome(chrome_options=chrome_options)
#driver = webdriver.Firefox(capabilities=firefox_capabilities,firefox_binary=binary, firefox_options = opts)
#====================================================================================================================================================#
login_page = "https://www.vocabulary.com/login/"
my_username = fo.readlines()[0]
fo.seek(0, 0)
my_pass = fo.readlines()[1]
############################################################################
# Link to assignment [For Demo]
url = fo_.readlines()[0] #YOUR URL HERE
##############################################################################
print "Voc at " + url
a_page = url
lastAudioLen = 0
print "[+] STARTING VOCABULARY BOT"
usr = ""
base = ""
old_html = ""
source = ""
soup = ""
op1 = ""
op2 = ""
op3 = ""
op4 = ""
options = []
word = ""
#====================================================================================================================================================#
def main():
'''
# Ignore this section, I actually ended up making a keygen to protect it from the hands of students at my University
ck = 0
if(platform.system() == "Linux" or platform.system() == "Darwin" and len(key) >= 10 and ck == 0):
base = platform.uname()[0][0]
usr = platform.uname()[1][0]
u = key[-2:][0]
b = key[-2:][1]
if(usr == u and base == b):
time.sleep(2)
login();
assignment();
ck += 1
if(platform.system() == "Windows" and len(key) >= 10 and ck == 0):
usr = os.getenv('username')[2]
base = platform.uname()[0][0]
u = key[-2:][0]
b = key[-2:][1]
if(usr == u and base == b):
time.sleep(2)
login();
assignment();
ck += 1
'''
time.sleep(2)
login()
assignment()
#====================================================================================================================================================#
def login():
driver.get(login_page)
time.sleep(3)
print "Attemp to login in"
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
username.send_keys(my_username)
password.send_keys(my_pass)
driver.find_element_by_class_name("green").click()
time.sleep(1)
try:
alertObj = driver.switch_to.alert
alertObj.accept()
print "Alert detected!"
driver.get(url)
except Exception as e:
print("No alert found!")
#====================================================================================================================================================#
def assignment():
try:
alertObj = driver.switch_to.alert
alertObj.accept()
print "Alert detected!"
driver.get(url)
except Exception as e:
print("No alert found!")
time.sleep(3)
driver.get(a_page)
time.sleep(2)
driver.execute_script("window.scrollTo(100, 100);")
option_high_score = scrapper()
click_op(option_high_score)
print "[+] STARTING VOCABULARY BOT [1]"
print "\a\a\a\a\a\a\a"
#====================================================================================================================================================#
def speech_to_text(audio):
song = AudioSegment.from_mp3("audio.mp3")
song.export("audio.wav", format="wav") # Is the same as:
time.sleep(2)
with sr.AudioFile("audio.wav") as source:
audio = r.record(source)
try:
text = r.recognize_google(audio)
print("You said " + text)
if(text == "tents"):
text = "dense"
if(text == "Tents"):
text = "dense"
if(text == "Bode"):
text = "mode"
if(text == "lute"):
text = "loot"
if(text == "heroin"):
text = "harrowing"
if(text == "and you were"):
text = "inure"
return text
except sr.UnknownValueError:
print("Could not understand audio")
except sr.RequestError as e:
print("Could not request results; {0}".format(e))
def scrapper():
try:
alertObj = driver.switch_to.alert
alertObj.accept()
print "Alert detected!"
driver.get(url)
except Exception as e:
print("No alert found!")
driver.execute_script("""window.location.reload();""")
time.sleep(2)
global html
global source
global old_html
global soup
global op1
global op2
global op3
global op4
global options
global word
global lastAudioLen
try:
html = driver.execute_script("return document.getElementsByTagName('body')[0].innerHTML;")
except Exception as e:
print("Error: " + str(e))
time.sleep(1)
driver.get(url)
source = unidecode(html)
old_html = source
time.sleep(1)
soup = BeautifulSoup(source, "html.parser")
driver.execute_script("function getElementByXpath(path) { return document.evaluate(path, document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue; }; window.getElementByXpath = getElementByXpath;")
try:
c = driver.find_element_by_class_name("wrapper").find_element_by_class_name("instructions").text
if(c == "choose the best picture for"):
driver.get(url)
time.sleep(3)
return 5
except Exception as e:
print "No img detected!"
try:
c = driver.find_element_by_class_name('next')
if(c):
nextQ = driver.find_element_by_class_name('next')
nextQ.click()
time.sleep(1)
return 5
except Exception as e:
eee = str(e)
print "No button detected! "
try:
isAudio = 0
try:
length_check = len(
soup.findAll('div', attrs={'class': 'instructions'})[0].text.split(" "))
if(length_check == 0):
isAudio = 1
except Exception as e:
isAudio = 1
print "AUDIO!"
c_list = driver.execute_script('return document.getElementsByClassName("spellit")') #driver.find_elements_by_class_name('spellit')
len_list = len(c_list) - 1
if(isAudio):
lastAudioLen = len_list - 1
print "AUDIO: " + str(len_list)
print str(c_list)
c = c_list[len_list]
if(c and lastAudioLen != len(c_list)):
print "SPEACH DETECTED! LIST: " + str(len_list)
if(skipAudio):
time.sleep(1)
text_area = driver.find_element_by_class_name('wordspelling')
text_area.send_keys("Life is good (LG)")
time.sleep(1)
try:
c.click()
time.sleep(1)
c.click()
time.sleep(1)
c.click()
time.sleep(1)
element2 = driver.find_element_by_class_name('surrender')
element2.click()
time.sleep(2)
element3 = driver.find_element_by_class_name('next')
element3.click()
time.sleep(1)
element4 = driver.find_element_by_class_name('next')
element4.click()
driver.get(url)
time.sleep(3)
except Exception as e:
a = str(e)
print "Error at: " + a
else:
try:
lastAudioLen = len(c_list)
audio = driver.find_element_by_class_name('playword')
#link_ = driver.execute_script("""return jQuery(".challenge-slide").data().audio;""")
link_ = driver.execute_script("""var list = document.getElementsByClassName("challenge-slide"); var obj = list[list.length - 1]; return jQuery(obj).data().audio;""")
link = ''.join(["https://audio.vocab.com/1.0/us/", link_, ".mp3"])
time.sleep(1)
print link
testfile = urllib.URLopener()
testfile.retrieve(link, "audio.mp3")
print "Downloading..."
time.sleep(2)
text = speech_to_text("audio.mp3")
time.sleep(1)
text_area_list = driver.find_elements_by_class_name('wordspelling')
text_area = text_area_list[len(text_area_list) - 1]
text_area.send_keys(text)
time.sleep(2)
c = c_list[len_list]
c.click()
time.sleep(2)
element4 = driver.find_element_by_class_name('next')
element4.click()
time.sleep(1)
except Exception as e:
a = str(e)
print "Error at: " + a
return 5
except Exception as e:
eee = str(e)
print "No speach detected!"
try:
length_check = len(
soup.findAll('div', attrs={'class': 'instructions'})[0].text.split(" "))
if(length_check != 0):
word = driver.find_element_by_xpath("//strong[1]").text
if(str(word) == ""):
word = driver.find_element_by_class_name("sentence").find_element_by_xpath("//strong[1]").text
dic_exceptions = ['and', 'up', 'as', 'if', 'the', 'who', 'has', 'a', 'an', 'to', 'for', 'from', 'is', 'where', 'when', 'why',
'how', 'which', 'of', 'one', "one's", 'or', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten']
#========================== Options ==========================#
val1 = driver.execute_script("""return window.getElementByXpath("//a[@accesskey='1A']").text""")
val2 = driver.execute_script("""return window.getElementByXpath("//a[@accesskey='2B']").text""")
val3 = driver.execute_script("""return window.getElementByXpath("//a[@accesskey='3C']").text""")
val4 = driver.execute_script("""return window.getElementByXpath("//a[@accesskey='4D']").text""")
op1 = (val1 + "\n").rstrip('\n').split(" ")
op2 = (val2 + "\n").rstrip('\n').split(" ")
op3 = (val3 + "\n").rstrip('\n').split(" ")
op4 = (val4 + "\n").rstrip('\n').split(" ")
final = []
options = [op1, op2, op3, op4]
#========================== Options ==========================#
op_st = ''.join(["Options: ", str(options)])
#print op_st
for option in options:
for item in option:
for x in dic_exceptions:
if x == item:
p = option.index(x)
option.pop(p)
#========================== Options Rading ==========================#
s_link = "https://www.vocabulary.com/dictionary/"
link = s_link + word
html = urllib2.urlopen(link)
soup = BeautifulSoup(html, "html.parser")
if(word == "________"):
return 0
source_dic2 = None
print "Word: " + word
try:
test = soup.find('div', {"class" : "definitionsContainer"})
source_dic2 = unidecode(test.prettify())
except Exception as e:
eee = str(e)
print "Error" + eee
return 0
a = 0
rate_arr = []
cpy_rate_arr = []
for option in options:
for item in option:
if item in source_dic2:
a += 1
print ("{0} -> {1}".format(option, a))
rate_arr.append(a)
a = 0
#========================== Options Rading ==========================#
cpy_rate_arr = sorted(rate_arr)
x_pos = cpy_rate_arr[len(cpy_rate_arr) - 1]
x_pos_2 = cpy_rate_arr[len(cpy_rate_arr) - 2]
choice = rate_arr.index(max(rate_arr))
if (x_pos == x_pos_2):
print "No position found."
h = choice
print h
return h
else:
driver.quit
print "Error: length_check is less or equal to 0"
except Exception as e:
print e
def click_op(i):
try:
if(i == 5):
time.sleep(1)
option_high_score = scrapper()
time.sleep(1)
click_op(option_high_score)
return
op = i + 1
ar = ["", "A", "B", "C", "D"]
high = str(op)
b = ''.join([high, ar[op]])
element = driver.find_element_by_xpath('//a[@accesskey="' + b + '"]')
try:
element.click()
except Exception as e:
a = str(e)
print "Error at: " + a
try:
nextQ = driver.find_element_by_class_name('next')
nextQ.click()
except Exception as e:
option_high_score = scrapper()
time.sleep(1)
click_op(option_high_score)
a = str(e)
print "Error quitting... " . a
time.sleep(1)
option_high_score = scrapper()
time.sleep(1)
click_op(option_high_score)
except Exception as e:
option_high_score = scrapper()
time.sleep(1)
click_op(option_high_score)
a = str(e)
print "Error quitting... " . a
#====================================================================================================================================================#
def autoUpdate(): #main()
updated = update("https://raw.githubusercontent.com/lefela4/Vocabulary.com-AutoBot/master/src/vocab_Demo1.py", __version__)
if(updated == 0):
# Nothing to update
main()
elif(updated == 1):
# The file have been updated!
print("Please restart the program!")
elif(updated == 2):
# Error
print("Sorry, an error occurred while preparing the installation. Please go in https://github.com/lefela4/Vocabulary.com-AutoBot/issues and create a new issue with the screen shot of the error!")
autoUpdate()
|
with open("/home/cyagen1/Downloads/rosalind_kmer.txt")as f:
f.readline()
w=f.readline().strip()
l=""
while w:
l+=w
w = f.readline().strip()
for j,k in enumerate(range(4,13,2)):
L=[]
for i in range(len(l)):
if len(l[i:i+k])==k:
L.append(l[i:i+k])
i+=1
else:
break
for hh,i in enumerate(L):
t=i[:j + 1:-1].replace('A','t').replace('T','a').replace('G','c').replace('C','g').upper()
if i[0:j+2]==t:
print hh+1, len(i)
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Meta Configure
$Id$
"""
import zope.schema
import zope.configuration.fields
from zope.component.zcml import handler
from zope.interface import Interface
from zope.publisher.interfaces import browser
from zope.security.checker import CheckerPublic, NamesChecker
from zope.browserresource import metadirectives
from zope.browserresource import metaconfigure as resourcemeta
import z3c.zrtresource
class IZRTResourceDirective(metadirectives.IBasicResourceInformation):
"""Defines a browser ZRT resource"""
name = zope.schema.TextLine(
title=u"The name of the resource",
description=u"""
This is the name used in resource urls. Resource urls are of
the form site/@@/resourcename, where site is the url of
"site", a folder with a site manager.
We make resource urls site-relative (as opposed to
content-relative) so as not to defeat caches.""",
required=True
)
file = zope.configuration.fields.Path(
title=u"File",
description=u"The file containing the resource data.",
required=True
)
def zrtresource(_context, name, file, layer=browser.IDefaultBrowserLayer,
permission='zope.Public'):
if permission == 'zope.Public':
permission = CheckerPublic
checker = NamesChecker(resourcemeta.allowed_names, permission)
factory = z3c.zrtresource.ZRTFileResourceFactory(file, checker, name)
_context.action(
discriminator = ('resource', name, browser.IBrowserRequest, layer),
callable = handler,
args = ('registerAdapter',
factory, (layer,), Interface, name, _context.info),
)
|
# coding=gbk
import cv2
import tensorflow as tf
from mtcnn.mtcnn import MTCNN
from skimage import transform as trans
import numpy as np
import pdb
import facenet
import sys
#将拼音转成汉字
pinyin2hanzi = {'lingengxin':'林更新','zhoudongyu':'周冬雨','liangjiahui':'梁家辉','shenteng':'沈腾','liyifeng':'李易峰','dengchao':'邓超','shuqi':'舒淇','huangbo':'黄渤','zhangmanyu':'张曼玉','liushishi':'刘诗诗','fanbingbing':'范冰冰','zhaoyouting':'赵又廷','zhourunfa':'周润发','louyixiao':'娄艺潇','liangjiahui':'梁家辉','liruotong':'李若彤','liuhaoran':'刘昊然','xienuo':'谢娜','hangeng':'韩庚','liuyifei':'刘亦菲','zhenzidan':'甄子丹','liudehua':'刘德华','songqian':'宋茜','guanzhilin':'关之琳','nini':'倪妮','wangzuxian':'王祖贤','tongliya': '佟丽娅','dilireba': '迪丽热巴',
'wangbaoqiang': '王宝强',
'yangying' :'杨颖',
'zhoujielun': '周杰伦',
'tangyan' :'唐嫣',
'xuzheng': '徐峥',
'lijiaxin': '李嘉欣',
'zhangbaizhi': '张柏芝',
'handongjun': '韩东君',
'gulinuozha': '古力娜扎',
'chenhe': '陈赫',
'chenqiaoen' :'陈乔恩',
'gutianle': '古天乐',
'yaochen': '姚晨',
'guofucheng' :'郭富城',
'zhangyixing': '张艺兴',
'yangzi': '杨紫',
'zhouhuimin' :'周慧敏',
'sunli' :'孙俪',
'mayili': '马伊俐',
'fengshaofeng' :'冯绍峰',
'jingtian': '景甜',
'liujialing' :'刘嘉玲',
'chenkun': '陈坤',
'jiangwen' :'姜文',
'pengyuyan' :'彭于晏',
'luhan' :'鹿晗',
'gaoyuanyuan' :'高圆圆',
'zhaoliying':'赵丽颖',
'wulei':'吴磊',
'zhangjiahui':'张家辉'}
#标准人脸中五个点的坐标,方便将人脸矫正
POINTS_SRC = np.array([
[30.2946, 51.6963],
[65.5318, 51.5014],
[48.0252, 71.7366],
[33.5493, 92.3655],
[62.7299, 92.2041] ], dtype=np.float32)
#矫正的人脸的标准大小
SHAPE = [112,96]
IMG_SIZE = 160
#识别模型地址
MODEL_DIR = '/root/sourlab/test/models/20180402-114759.pb'
class sourlab_face(object):
#类变量,graph和txt的文件地址
#初始化类,并加载graph和txt
_graph = tf.Graph()
_sess = tf.Session(graph=_graph)
movie_star = []
feature_list = []
_detector = MTCNN(min_face_size = 12)
txt_file = '/root/sourlab/test/feature.txt'
print('load detection module')
with _graph.as_default():
_recognition = facenet.load_model(MODEL_DIR)
print('load recognition module')
def __init__(self, min_face_size=12):
pass
def detect_and_warp(self, img_file):
#返回人脸的个数
image = cv2.imread(img_file)
if image is None:
print('image load error')
return None, None
#检测人脸
result = sourlab_face._detector.detect_faces(image)
num_face = len(result)
points_dst = np.zeros((num_face,5,2))
warped_face = np.zeros((num_face,SHAPE[0],SHAPE[1],3))
#将检测到的人脸矫正到正常的位置
for i in range(num_face):
keypoints = result[i]['keypoints']
points_dst[i,0,:] = keypoints['left_eye']
points_dst[i,1,:] = keypoints['right_eye']
points_dst[i,2,:] = keypoints['nose']
points_dst[i,3,:] = keypoints['mouth_left']
points_dst[i,4,:] = keypoints['mouth_right']
# get the transform matrix and warp the face
tform = trans.SimilarityTransform()
tform.estimate(points_dst[i,:,:], POINTS_SRC)
M = tform.params[0:2,:]
warped_face[i,:,:,:] = cv2.warpAffine(image,M,(SHAPE[1],SHAPE[0]), borderValue = 0.0)
return num_face, warped_face
def recognition(self, img_file):
#识别前首先检测和矫正人脸
num_face, warped_face = self.detect_and_warp(img_file)
#没有人或者多于2个人脸,都返回-1
if num_face is None:
print('detect no faces')
return -1
if num_face == 0:
print('detect no face')
return -1
if num_face>=2:
print('detect more than 2 faces in the picture')
return 0
try:
face_checked = cv2.resize(warped_face[0],(IMG_SIZE,IMG_SIZE), interpolation=cv2.INTER_CUBIC)
except BaseException:
pdb.set_trace()
else:
pass
print('detect face')
#对图片进行预处理
face_checked = facenet.prewhiten(face_checked)
input_data = []
input_data.append(face_checked.reshape(-1,IMG_SIZE ,IMG_SIZE ,3))
#提取出人脸的512维特征
return self.extract_feature(input_data)
#调用识别模型,得到用户图片的512维特征向量
def extract_feature(self, input_data):
#返回抽取到的512维的特征
with sourlab_face._graph.as_default():
image_placeholder = sourlab_face._graph.get_tensor_by_name("input:0")
embedding = sourlab_face._graph.get_tensor_by_name('embeddings:0')
phase_train_placeholder = sourlab_face._graph.get_tensor_by_name("phase_train:0")
embedding_size = embedding.get_shape()[1]
embed_array = np.zeros((1,embedding_size))
with sourlab_face._sess.as_default():
embed_array[0] = sourlab_face._sess.run(embedding, feed_dict={image_placeholder: input_data[0], phase_train_placeholder: False })
return embed_array
#静态方法加载feature
@staticmethod
def load_feature():
f = open(sourlab_face.txt_file, 'r')
lines = f.readlines()
for line in lines:
line = line.strip()
line = line.split(' ')
sourlab_face.movie_star.append(line[0])
#tmp = []
#print(len(line))
for i in range(len(line) - 1):
sourlab_face.feature_list.append(float(line[i + 1]))
#根据人脸找到最近的人脸位置
def find_nearest(self,feature):
#返回明星的名字,得分,和图片的地址
sourlab_face.feature_list = np.array(sourlab_face.feature_list)
dim = 512
sourlab_face.feature_list = sourlab_face.feature_list.reshape(-1,dim)
# feature_list = feature_list.reshape(feature_list.shape[0],feature_list.shape[2])
dist = np.zeros(sourlab_face.feature_list.shape[0])
for i in range(dist.shape[0]):
dist[i] = np.sqrt(np.sum(np.square(np.subtract(sourlab_face.feature_list[i], feature))))
idx = np.argsort(dist)[0]
if dist[idx]<=0.5:
score = 90+10*(0.5-dist[idx])
if 0.5<dist[idx]<=0.9:
score = 50+(0.9-dist[idx])*100
if dist[idx]>0.9:
score = 20+(np.max(dist)-dist[idx])*(30/(np.max(dist)-0.9))
img_path = sourlab_face.movie_star[np.argsort(dist)[0]]
return pinyin2hanzi[img_path.split('/')[-2]],score,img_path
if __name__ == '__main__':
img_name1 , img_name2 = sys.argv[1], sys.argv[2]
recog1 = sourlab_face(12)
sourlab_face.load_feature()
feature1 = recog1.recognition(img_name1)
a,b,c = recog1.find_nearest(feature1)
pdb.set_trace()
|
import pytest
import json
import numpy as np
import pickle
import tensorflow as tf
from google.protobuf import json_format
from tensorflow.core.framework.tensor_pb2 import TensorProto
from seldon_core.proto import prediction_pb2
from seldon_core.microservice import get_data_from_json, array_to_grpc_datadef, grpc_datadef_to_array, rest_datadef_to_array, array_to_rest_datadef
from seldon_core.microservice import SeldonMicroserviceException
def test_normal_data():
data = {"data": {"tensor": {"shape": [1, 1], "values": [1]}}}
arr = get_data_from_json(data)
assert isinstance(arr, np.ndarray)
assert arr.shape[0] == 1
assert arr.shape[1] == 1
assert arr[0][0] == 1
def test_bin_data():
a = np.array([1, 2, 3])
serialized = pickle.dumps(a)
data = {"binData": serialized}
arr = get_data_from_json(data)
assert not isinstance(arr, np.ndarray)
assert arr == serialized
def test_str_data():
data = {"strData": "my string data"}
arr = get_data_from_json(data)
assert not isinstance(arr, np.ndarray)
assert arr == "my string data"
def test_bad_data():
with pytest.raises(SeldonMicroserviceException):
data = {"foo": "bar"}
arr = get_data_from_json(data)
def test_proto_array_to_tftensor():
arr = np.array([[1, 2, 3], [4, 5, 6]])
datadef = array_to_grpc_datadef(arr, [], "tftensor")
print(datadef)
assert datadef.tftensor.tensor_shape.dim[0].size == 2
assert datadef.tftensor.tensor_shape.dim[1].size == 3
assert datadef.tftensor.dtype == 9
def test_proto_tftensor_to_array():
names = ["a", "b"]
array = np.array([[1, 2], [3, 4]])
datadef = prediction_pb2.DefaultData(
names=names,
tftensor=tf.make_tensor_proto(array)
)
array2 = grpc_datadef_to_array(datadef)
assert array.shape == array2.shape
assert np.array_equal(array, array2)
def test_json_tftensor_to_array():
names = ["a", "b"]
array = np.array([[1, 2], [3, 4]])
datadef = prediction_pb2.DefaultData(
names=names,
tftensor=tf.make_tensor_proto(array)
)
jStr = json_format.MessageToJson(datadef)
j = json.loads(jStr)
array2 = rest_datadef_to_array(j)
assert np.array_equal(array, array2)
def test_json_array_to_tftensor():
array = np.array([[1, 2], [3, 4]])
original_datadef = {"tftensor": {}}
datadef = array_to_rest_datadef(array, [], original_datadef)
assert "tftensor" in datadef
tfp = TensorProto()
json_format.ParseDict(datadef.get("tftensor"), tfp,
ignore_unknown_fields=False)
array2 = tf.make_ndarray(tfp)
assert np.array_equal(array, array2)
|
# funny.py
import re
def is_funny(s):
return re.match('(ha)+!+', s) != None
|
from setuptools import setup
# Install python package
setup(
name="mastl",
version=0.1,
author="Mathias Lohne",
author_email="mathialo@ifi.uio.no",
license="MIT, LPGLv3",
description="Code from my master thesis",
install_requires=["tensorflow>=1.5", "numpy", "sigpy"],
packages=["mastl"])
|
from morepath.request import Response
from onegov.wtfs import _
from onegov.wtfs import WtfsApp
from onegov.wtfs.forms import CreateInvoicesForm
from onegov.wtfs.layouts import InvoiceLayout
from onegov.wtfs.models import Invoice
from onegov.wtfs.security import ViewModel
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from onegov.core.request import CoreRequest
from onegov.core.types import RenderData
@WtfsApp.form(
model=Invoice,
template='form.pt',
permission=ViewModel,
form=CreateInvoicesForm
)
def create_invoices(
self: Invoice,
request: 'CoreRequest',
form: CreateInvoicesForm
) -> 'Response | RenderData':
""" Create invoices and download them as CSV. """
layout = InvoiceLayout(self, request)
if form.submitted(request):
form.update_model(self)
response = Response(
content_type='text/csv',
content_disposition='inline; filename=rechnungen.csv'
)
self.export(response.body_file)
return response
return {
'layout': layout,
'form': form,
'button_text': _("Create invoice"),
'cancel': layout.cancel_url
}
|
import numpy as np
import chainer
class Dataset(chainer.dataset.DatasetMixin):
def __init__(self, dataset, digit):
super().__init__()
self.dataset = dataset
self.digit = digit
def __len__(self):
return len(self.dataset)
def get_example(self, i):
image, label = self.dataset[i]
if label == self.digit:
label = np.ones(1, dtype=np.int32)[0]
else:
label = np.zeros(1, dtype=np.int32)[0]
return image, label
def load_mnist(digit, percent=0.5):
train_data, test_data = chainer.datasets.get_mnist(ndim=3)
train_key = np.where(np.array(train_data)[:, 1] == digit)[0]
test_key1 = np.where(np.array(test_data)[:, 1] == digit)[0]
test_key2 = np.where(np.array(test_data)[:, 1] != digit)[0][:int(len(test_key1)/(1-percent)*percent)]
train = Dataset(np.array(train_data)[train_key], digit)
test = Dataset(chainer.datasets.ConcatenatedDataset(np.array(test_data)[test_key1], np.array(test_data)[test_key2]), digit)
return train, test
|
# Copyright Alexander Wood 2016.
# Solutions for Coursera's Bioinformatics 1 Course.
# The following is a variation on the HammingDistance function which quits running its calculations once a bound
# on the hamming distance has been reached. So for instance if the bound is d and p and q have a hamming distance of
# anything greater than d, it will simply return d+1.
def HammingDistance_bound(p, q, d):
if len(p) != len(q):
return 'Invalid input'
hamming_dist = 0 # Initialize to zero
for i in range(len(p)):
if p[i] == q[i]:
continue
elif p[i] != q[i] and hamming_dist <= d:
hamming_dist += 1
continue
else:
break
#print hamming_dist
return hamming_dist
'''
We say that a k-mer Pattern appears as a substring of Text with at most d mismatches if there is some k-mer substring Pattern'
of Text having d or fewer mismatches with Pattern, i.e., HammingDistance(Pattern, Pattern') ≤ d. Our observation that a DnaA
box may appear with slight variations leads to the following generalization of the Pattern Matching Problem.
Approximate Pattern Matching Problem: Find all approximate occurrences of a pattern in a string.
Input: Strings Pattern and Text along with an integer d.
Output: All starting positions where Pattern appears as a substring of Text with at most d mismatches.
Code Challenge: Solve the Approximate Pattern Matching Problem.
'''
def ApproximatePatternMatching(Pattern, Text, d):
# Create a list to store indexes of approximate matches.
approx_match = []
for i in range(len(Text) - len(Pattern) + 1):
temp = HammingDistance_bound(Pattern, Text[i: i+len(Pattern)], d)
if temp <= d:
approx_match.append(str(i))
return approx_match
|
import math
INFINITY = 1e200 * 1e200
try:
from rpython.rlib.rfloat import formatd, DTSF_ADD_DOT_0, DTSF_STR_PRECISION
from rpython.rlib.rfloat import round_double # pylint: disable=unused-import
def float_to_str(value):
return formatd(value, "g", DTSF_STR_PRECISION, DTSF_ADD_DOT_0)
except ImportError:
"NOT_RPYTHON"
def float_to_str(value):
return str(value)
def round_double(value, _ndigits):
# round() from libm, which is not available on all platforms!
# This version rounds away from zero.
abs_value = abs(value)
rounded = math.floor(abs_value + 0.5)
if rounded - abs_value < 1.0:
return math.copysign(rounded, value)
# 'abs_value' is just in the wrong range: its exponent is precisely
# the one for which all integers are representable but not any
# half-integer. It means that 'abs_value + 0.5' computes equal to
# 'abs_value + 1.0', which is not equal to 'abs_value'. So 'rounded - abs_value'
# computes equal to 1.0. In this situation, we can't return
# 'rounded' because 'abs_value' was already an integer but 'rounded' is the next
# integer! But just returning the original 'x' is fine.
return value
|
#!/usr/bin/env python
import sys
import rospy
from std_msgs.msg import Float32MultiArray
from std_msgs.msg import Float32
from geometry_msgs.msg import TransformStamped
from sensor_msgs.msg import JointState
from hardware_tools import Dynamixel
import tf
global armTorqueActive
global gripperTorqueActive
gripperTorqueActive = False
armTorqueActive = False
def printRegisters(portName1, portBaud1):
dynMan1 = Dynamixel.DynamixelMan(portName1, portBaud1)
dynMan1.GetRegistersValues(0)
dynMan1.GetRegistersValues(1)
dynMan1.GetRegistersValues(2)
dynMan1.GetRegistersValues(3)
dynMan1.GetRegistersValues(4)
dynMan1.GetRegistersValues(5)
dynMan1.GetRegistersValues(6)
def printHelp():
print "LEFT ARM NODE BY MARCOSOfT. Options:"
def callbackGripper(msg):
global dynMan1
global gripperTorqueActive
if gripperTorqueActive == False:
dynMan1.SetTorqueEnable(7, 1)
dynMan1.SetTorqueEnable(8, 1)
dynMan1.SetMovingSpeed(7, 50)
dynMan1.SetMovingSpeed(8, 50)
gripperTorqueActive = True
gripperPos = msg.data
gripperGoal_1 = int(( (gripperPos)/(360.0/4095.0*3.14159265358979323846/180.0) ) + 1400 )
gripperGoal_2 = int(( (gripperPos)/(360.0/4095.0*3.14159265358979323846/180.0) ) + 1295 )
dynMan1.SetGoalPosition(7, gripperGoal_1)
dynMan1.SetGoalPosition(8, gripperGoal_2)
def callbackPos(msg):
global dynMan1
global armTorqueActive
Pos = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
goalPos = [0, 0, 0, 0, 0, 0, 0, 0, 0]
if armTorqueActive == False:
### Set Servomotors Torque Enable
for i in range(len(Pos)):
dynMan1.SetTorqueEnable(i, 1)
### Set Servomotors Speeds
for i in range(len(Pos)):
dynMan1.SetMovingSpeed(i, 60)
armTorqueActive = True
### Read the data of publisher
for i in range(len(Pos)):
Pos[i] = msg.data[i]
# Conversion float to int for registers
goalPos[0] = int(+(Pos[0]/(251.0/4095.0*3.14159265358979323846/180.0) ) + 2052 )
goalPos[1] = int(+(Pos[1]/(360.0/4095.0*3.14159265358979323846/180.0) ) + 2309 )
goalPos[2] = int(-(Pos[2]/(360.0/4095.0*3.14159265358979323846/180.0) ) + 1787 )
goalPos[3] = int(+(Pos[3]/(360.0/4095.0*3.14159265358979323846/180.0) ) + 1969 )
goalPos[4] = int(-(Pos[4]/(360.0/4095.0*3.14159265358979323846/180.0) ) + 2048 )
goalPos[5] = int(-(Pos[5]/(360.0/4095.0*3.14159265358979323846/180.0) ) + 1848 )
goalPos[6] = int(-(Pos[6]/(360.0/4095.0*3.14159265358979323846/180.0) ) + 2068 )
### Set GoalPosition
for i in range(len(Pos)):
dynMan1.SetGoalPosition(i, goalPos[i])
def main(portName1, portBaud1):
print "INITIALIZING LEFT ARM NODE BY MARCOSOFT..."
###Communication with dynamixels:
global dynMan1
dynMan1 = Dynamixel.DynamixelMan(portName1, portBaud1)
msgCurrentPose = Float32MultiArray()
msgCurrentPose.data = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
msgCurrentGripper = Float32()
msgBatery = Float32()
msgBatery = 0.0
curretPos = [0,0,0,0,0,0,0,0]
bitsPerRadian_0 = (4095)/((251)*(3.14159265/180))
bitsPerRadian = (4095)/((360)*(3.141592/180))
i = 0
### Set controller parameters
dynMan1.SetCWComplianceSlope(0, 32)
dynMan1.SetCCWComplianceSlope(0, 32)
dynMan1.SetCWComplianceSlope(1, 32)
dynMan1.SetCCWComplianceSlope(1, 32)
for i in range(2, 6):
dynMan1.SetDGain(i, 25)
dynMan1.SetPGain(i, 16)
dynMan1.SetIGain(i, 1)
### Set servos features
for i in range(0, 6):
dynMan1.SetMaxTorque(i, 1024)
dynMan1.SetTorqueLimit(i, 512)
dynMan1.SetHighestLimitTemperature(i, 80)
###Connection with ROS
rospy.init_node("left_arm")
br = tf.TransformBroadcaster()
jointStates = JointState()
jointStates.name = ["la_1_joint", "la_2_joint", "la_3_joint", "la_4_joint", "la_5_joint", "la_6_joint", "la_7_joint"]
jointStates.position = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
subPos = rospy.Subscriber("/hardware/left_arm/goal_pose", Float32MultiArray, callbackPos)
subGripper = rospy.Subscriber("/hardware/left_arm/gripper_pose", Float32, callbackGripper)
pubJointStates = rospy.Publisher("/joint_states", JointState, queue_size = 1)
pubArmPose = rospy.Publisher("left_arm/current_pose", Float32MultiArray, queue_size = 1)
pubGripper = rospy.Publisher("left_arm/current_gripper", Float32, queue_size = 1)
pubBatery = rospy.Publisher("/hardware/robot_state/left_arm_battery", Float32, queue_size = 1)
loop = rospy.Rate(10)
while not rospy.is_shutdown():
pos0 = float(-(2054-dynMan1.GetPresentPosition(0))/bitsPerRadian_0)
pos1 = float((2309-dynMan1.GetPresentPosition(1))/bitsPerRadian)
pos2 = float(-(1787-dynMan1.GetPresentPosition(2))/bitsPerRadian)
pos3 = float(-(1969-dynMan1.GetPresentPosition(3))/bitsPerRadian)
pos4 = float(-(2048-dynMan1.GetPresentPosition(4))/bitsPerRadian)
pos5 = float((1848-dynMan1.GetPresentPosition(5))/bitsPerRadian)
pos6 = float(-(2048-dynMan1.GetPresentPosition(6))/bitsPerRadian)
#posD21 = float((1400-dynMan1.GetPresentPosition(7))/bitsPerRadian)
#posD22 = float((1295-dynMan1.GetPresentPosition(8))/bitsPerRadian)
jointStates.header.stamp = rospy.Time.now()
jointStates.position[0] = pos0
jointStates.position[1] = pos1
jointStates.position[2] = pos2
jointStates.position[3] = pos3
jointStates.position[4] = pos4
jointStates.position[5] = pos5
jointStates.position[6] = pos6
msgCurrentPose.data[0] = pos0
msgCurrentPose.data[1] = pos1
msgCurrentPose.data[2] = pos2
msgCurrentPose.data[3] = pos3
msgCurrentPose.data[4] = pos4
msgCurrentPose.data[5] = pos5
msgCurrentPose.data[6] = pos6
#msgCurrentGripper.data = posD21
pubJointStates.publish(jointStates)
pubArmPose.publish(msgCurrentPose)
pubGripper.publish(msgCurrentGripper)
if i == 10:
msgBatery = float(dynMan1.GetPresentVoltage(2)/10)
pubBatery.publish(msgBatery)
i=0
i+=1
loop.sleep()
if __name__ == '__main__':
try:
if "--help" in sys.argv:
printHelp()
elif "-h" in sys.argv:
printHelp()
if "--registers" in sys.argv:
printRegisters("/dev/ttyUSB0", 115200)
else:
portName1 = "/dev/ttyUSB0"
portBaud1 = 115200
if "--port1" in sys.argv:
portName1 = sys.argv[sys.argv.index("--port1") + 1]
if "--port2" in sys.argv:
portName2 = sys.argv[sys.argv.index("--port2") + 1]
if "--baud1" in sys.argv:
portBaud1 = int(sys.argv[sys.argv.index("--baud1") + 1])
if "--baud2" in sys.argv:
portBaud2 = int(sys.argv[sys.argv.index("--baud2") + 1])
main(portName1, portBaud1)
except rospy.ROSInterruptException:
pass
|
'''
Settings: Controls resolution, colors, difficulties (?).
'''
class Settings():
def __init__(self):
self.display.set_mode((1200, 800))
|
# -*- coding: utf-8 -*-
# Copyright (c) 2010 Infrae. All rights reserved.
# See also LICENSE.txt
# $Id$
import unittest
from silva.core.services.interfaces import IMemberService
from silva.security.renameusers.testing import FunctionalLayer
from zope.component import getUtility
from Products.Silva.tests.helpers import test_filename
from Products.Silva.ftesting import zmi_settings
class RenameUsersTestCase(unittest.TestCase):
layer = FunctionalLayer
FORM_URL = '/root/service_extensions/manage_renameUsers'
def setUp(self):
self.root = self.layer.get_application()
self.layer.login('manager')
def create_member_objects(self):
members = getUtility(IMemberService)
for userid in ['member', 'author', 'chiefeditor', 'editor']:
# Getting a member object for the first time will create
# it.
members.get_member(userid)
def test_form_access_validation(self):
browser = self.layer.get_browser(zmi_settings)
self.assertEqual(browser.open(self.FORM_URL), 401)
browser.login('manager')
self.assertEqual(browser.open(self.FORM_URL), 200)
self.assertEqual(browser.inspect.zmi_title, ['Rename users'])
form = browser.get_form('form')
self.assertEqual(form.inspect.actions, ['Rename'])
self.assertEqual(form.inspect.actions['rename'].click(), 200)
self.assertEqual(browser.inspect.zmi_status, ['There were errors.'])
def test_rename_member_objects(self):
self.create_member_objects()
self.assertEqual(
list(self.root.Members.objectIds()),
['author', 'chiefeditor', 'editor', 'manager'])
browser = self.layer.get_browser(zmi_settings)
browser.login('manager')
self.assertEqual(browser.open(self.FORM_URL), 200)
csv_filename = test_filename('renames.csv', globals())
form = browser.get_form('form')
form.get_control('form.field.update_roles').value = False
form.get_control('form.field.update_ownership').value = False
form.get_control('form.field.update_members').value = True
form.get_control('form.field.mapping').value = csv_filename
self.assertEqual(form.inspect.actions['rename'].click(), 200)
self.assertEqual(
browser.inspect.zmi_status,
['Updated 3 users, renamed 2 members objects.'])
self.assertEqual(
list(self.root.Members.objectIds()),
['arthur', 'chiefeditor', 'editor', 'sylvain'])
self.assertEqual(
self.root.Members.arthur.id,
'arthur')
def test_rename_roles(self):
get_user_roles = self.root.get_local_roles_for_userid
self.assertEqual(get_user_roles('sylvain'), ())
self.assertEqual(get_user_roles('manager'), ('Owner',))
browser = self.layer.get_browser(zmi_settings)
browser.login('manager')
self.assertEqual(browser.open(self.FORM_URL), 200)
csv_filename = test_filename('renames.csv', globals())
form = browser.get_form('form')
form.get_control('form.field.update_roles').value = True
form.get_control('form.field.update_ownership').value = False
form.get_control('form.field.update_members').value = False
form.get_control('form.field.mapping').value = csv_filename
self.assertEqual(form.inspect.actions['rename'].click(), 200)
self.assertEqual(
browser.inspect.zmi_status,
['Updated 3 users, reaffected 2 roles.'])
self.assertEqual(get_user_roles('sylvain'), ('Owner',))
self.assertEqual(get_user_roles('manager'), ())
def test_change_ownership(self):
get_owner = lambda obj: obj.getOwner().getId()
self.assertEqual(get_owner(self.root), 'manager')
self.assertEqual(get_owner(self.root.index), 'manager')
browser = self.layer.get_browser(zmi_settings)
browser.login('manager')
self.assertEqual(browser.open(self.FORM_URL), 200)
csv_filename = test_filename('renames.csv', globals())
form = browser.get_form('form')
form.get_control('form.field.update_roles').value = False
form.get_control('form.field.update_ownership').value = True
form.get_control('form.field.update_members').value = False
form.get_control('form.field.mapping').value = csv_filename
self.assertEqual(form.inspect.actions['rename'].click(), 200)
self.assertEqual(
browser.inspect.zmi_status,
['Updated 3 users, changed 1 owners.'])
self.assertEqual(get_owner(self.root), 'sylvain')
self.assertEqual(get_owner(self.root.index), 'sylvain')
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(RenameUsersTestCase))
return suite
|
import os
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
from model.model import Spoofing, Classifier
from dataset import faceDataset
from loss import TripletLoss
from torch.autograd import Variable
from torch.utils.data import DataLoader
from sklearn.metrics import roc_auc_score
if __name__ == '__main__':
torch.manual_seed(1234)
torch.cuda.manual_seed(1234)
test_data = np.load('test_data.npy')
test_dataset = faceDataset('oulu_test', './oulu/test', data=test_data, sequence=True)
batch_size = 3
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False)
model = Spoofing().cuda()
model.load_state_dict(torch.load('spoofer.pth'))
classifier = Classifier().cuda()
classifier.load_state_dict(torch.load('classifier.pth'))
model.eval()
classifier.eval()
cue_threshold = 0.01
with torch.no_grad():
output = np.zeros(1)
for i, data in enumerate(test_loader):
batch = data.shape[0]
data = data.view(-1, 3, test_dataset.new_size, test_dataset.new_size)
# output_cue, score = model(data.cuda())
output_cue = model(data.cuda())
score = classifier(model.encoder(output_cue[-1].detach() + data.cuda())[-1])
# output_cue = output_cue[-1].cpu().numpy()
# output_cue = np.abs(output_cue).sum(axis=(1, 2, 3)) / (3 * (test_dataset.new_size**2))
# output_cue[output_cue > cue_threshold] = 1
# output_cue[output_cue <= cue_threshold] = 0
# output_cue = output_cue.reshape(batch, 11)
# mean = np.mean(output_cue, axis=1)
# for j in range(batch):
# if mean[j] <= (5/11):
# output_cue[j] = -output_cue[j] + 1
# score = torch.sigmoid(score)
# non_zero = (output_cue != 0)
# score = score.cpu().numpy().reshape(batch, 11)
# output = np.concatenate((output, np.sum(output_cue * score, axis=1) / non_zero.sum(axis=1)))
score = torch.sigmoid(score)
score = score.cpu().numpy().reshape(batch, 11)
output = np.concatenate((output, np.mean(score, axis=1)))
output = output[1:]
with open('oulu_output.csv', 'w') as f:
titles = 'video_id,label'
f.write(titles + '\n')
for i, y in enumerate(output):
pred = '{},'.format(i)
pred += str(y)
f.write(pred + '\n')
|
import sys, pygame
FRAME_UPDATE_EVENT = pygame.USEREVENT+1
screen = None
RES_X = 800
RES_Y = 600
key_up = False
key_down = False
key_left = False
key_right = False
def initialiseGame():
pass
def update():
pass
def draw():
pass
def keyDown(key):
global key_up, key_down, key_left, key_right
if key == pygame.K_UP: key_up = True
elif key == pygame.K_DOWN: key_down = True
elif key == pygame.K_LEFT: key_left = True
elif key == pygame.K_RIGHT: key_right = True
def keyUp(key):
global key_up, key_down, key_left, key_right
if key == pygame.K_UP: key_up = False
elif key == pygame.K_DOWN: key_down = False
elif key == pygame.K_LEFT: key_left = False
elif key == pygame.K_RIGHT: key_right = False
def processEvent(event):
if event.type == FRAME_UPDATE_EVENT:
update()
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
keyDown(event.key)
elif event.type == pygame.KEYUP:
keyUp(event.key)
def main():
pygame.init()
global screen
screen = pygame.display.set_mode((RES_X, RES_Y))
frameTime = 1000//60 # in ms
pygame.time.set_timer(FRAME_UPDATE_EVENT, frameTime)
initialiseGame()
while True:
for event in pygame.event.get():
processEvent(event)
screen.fill((0,0,0))
draw()
pygame.display.flip()
if __name__ == '__main__':
main() |
#-*- coding:utf8 -*-
import os,re,json
import datetime
from django.http import HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.db.models import Sum,Count,Max
from djangorestframework import status
from djangorestframework.response import Response,ErrorResponse
from shopback import paramconfig as pcfg
from shopback.logistics.models import LogisticsCompany
from shopback.base.views import ModelView,ListOrCreateModelView,ListModelView,FileUploadView
from shopback.base import log_action, ADDITION, CHANGE
from .service import YundaPackageService,DEFUALT_CUSTOMER_CODE
from .models import (BranchZone,
YundaCustomer,
LogisticOrder,
ParentPackageWeight,
TodaySmallPackageWeight,
TodayParentPackageWeight,
JZHW_REGEX,
YUNDA,
NORMAL,
DELETE)
from .options import get_addr_zones
class PackageByCsvFileView(FileUploadView):
file_path = 'yunda'
filename_save = 'package_%s.csv'
def get(self, request, *args, **kwargs):
pass
def getSid(self,row):
return row[0]
def getParentSid(self,row):
return row[6]
def getPackDestinate(self,row):
return row[7]
def isJZHW(self,row):
return JZHW_REGEX.match(row[7]) and True or False
def getYundaPackageRegex(self):
yunda_company = LogisticsCompany.objects.get(code=YUNDA)
return re.compile(yunda_company.reg_mail_no)
def createParentPackage(self,row):
psid = self.getParentSid(row)
if len(psid) < 13 or not psid.startswith('9'):
return
ppw,state = ParentPackageWeight.objects.get_or_create(
parent_package_id = psid)
ppw.is_jzhw = self.isJZHW(row)
ppw.destinate = self.getPackDestinate(row)
ppw.save()
tppw,state = TodayParentPackageWeight.objects.get_or_create(
parent_package_id = psid)
tppw.is_jzhw = self.isJZHW(row)
tppw.save()
def createSmallPackage(self,row):
sid = self.getSid(row)
psid = self.getParentSid(row)
lo,sate = LogisticOrder.objects.get_or_create(out_sid=sid)
lo.parent_package_id = psid
lo.is_jzhw = self.isJZHW(row)
lo.save()
tspw,state = TodaySmallPackageWeight.objects.\
get_or_create(package_id=sid)
tspw.parent_package_id = psid
tspw.is_jzhw = self.isJZHW(row)
tspw.save()
def createTodayPackageWeight(self,row):
self.createSmallPackage(row)
self.createParentPackage(row)
def handle_post(self,request,csv_iter):
package_regex = self.getYundaPackageRegex()
encoding = self.getFileEncoding(request)
for row in csv_iter:
if package_regex.match(row[0]) and not row[0].startswith('9'):
row = [r.strip().decode(encoding) for r in row]
self.createTodayPackageWeight(row)
return {'success':True,
'redirect_url':reverse('admin:yunda_todaysmallpackageweight_changelist')}
class CustomerPackageImportView(FileUploadView):
file_path = 'yunda'
filename_save = 'cr_package_%s.csv'
def get(self, request, *args, **kwargs):
pass
def getCustomer(self,code):
try:
return YundaCustomer.objects.get(code=code)
except:
raise Exception(u'未找到代码(%s)对应的客户信息'%code)
def getSid(self,row):
return row[2]
def getCusOid(self,row):
return row[0]
def getPackageCompany(self,row):
return row[1]
def getPackageReceiver(self,row):
return row[3]
def getPackageState(self,row):
return row[4]
def getPackageCity(self,row):
return row[5]
def getPackageDistrict(self,row):
return row[6]
def getPackageAddress(self,row):
return row[7]
def getPackageMobile(self,row):
return row[8]
def getPackagePhone(self,row):
return row[9]
def isValidPackage(self,row):
yunda_company = LogisticsCompany.objects.get(code=YUNDA)
return re.compile(yunda_company.reg_mail_no).match(self.getSid(row))
def createPackageOrder(self,customer,row,ware_no):
sid = self.getSid(row)
lo,sate = LogisticOrder.objects.get_or_create(out_sid=sid)
if lo.cus_oid and lo.cus_oid != self.getCusOid(row):
raise Exception(u'运单单号:%s,新(%s)旧(%s)客户单号不一致,请核实!'%
(sid,lo.cus_oid,self.getCusOid(row)))
lo.yd_customer = customer
lo.cus_oid = self.getCusOid(row)
lo.receiver_name = self.getPackageReceiver(row)
lo.receiver_state = self.getPackageState(row)
lo.receiver_city = self.getPackageCity(row)
lo.receiver_district = self.getPackageDistrict(row)
lo.receiver_address = self.getPackageAddress(row)
lo.receiver_mobile = self.getPackageMobile(row)
lo.receiver_phone = self.getPackagePhone(row)
lo.wave_no = ware_no
lo.save()
tspw,state = TodaySmallPackageWeight.objects.\
get_or_create(package_id=sid)
tspw.is_jzhw = lo.isJZHW()
tspw.save()
def handle_post(self,request,csv_iter):
wave_no = datetime.datetime.now().strftime('%Y%m%dT%H%M%S')
encoding = self.getFileEncoding(request)
cus_code = csv_iter.next()[0].split('-')[0]
customer = self.getCustomer(cus_code.upper())
for row in csv_iter:
row = [r.strip().decode(encoding) for r in row]
if self.isValidPackage(row):
self.createPackageOrder(customer,row,wave_no)
return {'success':True,'redirect_url':'/admin/yunda/logisticorder/?q='+wave_no}
class DiffPackageDataView(ModelView):
def calcWeight(self,sqs,pqs):
tspw_dict = sqs.aggregate(
total_num=Count('package_id'),
total_weight=Sum('weight'),
total_upload_weight=Sum('upload_weight'))
jb_tspw_dict = sqs.exclude(parent_package_id='').aggregate(
total_num=Count('package_id'),
total_weight=Sum('weight'),
total_upload_weight=Sum('upload_weight'))
tppw_dict = pqs.aggregate(
total_num=Count('parent_package_id'),
total_weight=Sum('weight'),
total_upload_weight=Sum('upload_weight'))
return {'A':tspw_dict,'B':jb_tspw_dict,'C':tppw_dict}
def calcPackageData(self):
small_queryset = TodaySmallPackageWeight.objects.all()
parent_queryset = TodayParentPackageWeight.objects.all()
print 'weight:',small_queryset.aggregate(max_weight=Max('weight'))
return {'all':self.calcWeight(small_queryset,parent_queryset),
'jzhw':self.calcWeight(small_queryset.filter(is_jzhw=True),
parent_queryset.filter(is_jzhw=True)),
'other':self.calcWeight(small_queryset.filter(is_jzhw=False),
parent_queryset.filter(is_jzhw=False)),
'max_sweight':small_queryset.aggregate(max_weight=Max('weight')).get('max_weight'),
'max_pweight':parent_queryset.aggregate(max_weight=Max('weight')).get('max_weight')
}
def isValidLanjianUser(self,lanjian_id):
yc = YundaCustomer.objects.get(code=DEFUALT_CUSTOMER_CODE)
return yc.lanjian_id == lanjian_id
def get(self, request, *args, **kwargs):
small_queryset = TodaySmallPackageWeight.objects.all()
parent_queryset = TodayParentPackageWeight.objects.all()
error_packages = []
yunda_service = YundaPackageService()
for tspw in small_queryset:
try:
weight_tuple = yunda_service.calcSmallPackageWeight(tspw)
tspw.weight = weight_tuple[0] or tspw.weight
tspw.upload_weight = weight_tuple[1] or tspw.upload_weight
tspw.save()
except Exception,exc:
error_packages.append((tspw.package_id,tspw.weight,exc.message))
if not error_packages:
for tppw in parent_queryset:
try:
weight_tuple = yunda_service.calcParentPackageWeight(tppw)
tppw.weight = weight_tuple[0] or tppw.weight
tppw.upload_weight = weight_tuple[1] or tppw.upload_weight
tppw.save()
except Exception,exc:
error_packages.append((tppw.parent_package_id,tppw.weight,exc.message))
if error_packages:
return {'error_packages':error_packages}
response = self.calcPackageData()
return response
def post(self, request, *args, **kwargs):
lanjian_id = request.POST.get('lanjian_id','').strip()
try:
if not self.isValidLanjianUser(lanjian_id):
raise Exception(u'揽件ID不正确,重新再试!')
parent_queryset = TodayParentPackageWeight.objects.all()
ydpkg_service = YundaPackageService()
for parent_package in parent_queryset:
child_packages = TodaySmallPackageWeight.objects\
.filter(parent_package_id=parent_package.parent_package_id)
ydpkg_service.uploadSmallPackageWeight(child_packages)
ydpkg_service.uploadParentPackageWeight([parent_package])
small_queryset = TodaySmallPackageWeight.objects.all()
ydpkg_service.uploadSmallPackageWeight(small_queryset)
except Exception,exc:
messages.error(request, u'XXXXXXXXXXXXXXXXXXXXX 包裹重量上传异常:%s XXXXXXXXXXXXXXXXXXXXX'%exc.message)
else:
messages.info(request,u'================ 包裹重量上传成功 ===================')
from django.contrib.contenttypes.models import ContentType
ct = ContentType.objects.get_for_model(TodaySmallPackageWeight)
return HttpResponseRedirect(reverse('admin:%s_%s_changelist'%(ct.app_label, ct.model)))
class PackageWeightView(ModelView):
""" 包裹称重视图 """
def isValidYundaId(self,package_no):
if len(package_no) < 13:
return False
yunda_company = LogisticsCompany.objects.get(code=YUNDA)
return re.compile(yunda_company.reg_mail_no).match(package_no[0:13])
def parseYundaId(self,package_no):
if len(package_no) < 24:
return package_no[0:13],'',''
return package_no[0:13],package_no[13:17],package_no[17:23]
def getYundaZone(self,lg_order,dc_code=None):
if dc_code:
bzones = BranchZone.objects.filter(barcode=dc_code)
if bzones.count() > 0:
return bzones[0]
return get_addr_zones(lg_order.receiver_state,
lg_order.receiver_city,
lg_order.receiver_district,
address=lg_order.receiver_address)
def get(self, request, *args, **kwargs):
content = request.REQUEST
package_no = content.get('package_no')
if not self.isValidYundaId(package_no):
return u'非法的运单号'
package_id,valid_code,dc_code = self.parseYundaId(package_no)
try:
lo = LogisticOrder.objects.get(out_sid=package_id)
except LogisticOrder.DoesNotExist:
if not dc_code:
return u'运单号未录入系统'
lo,state = LogisticOrder.objects.get_or_create(out_sid=package_id)
lo.dc_code = dc_code
lo.valid_code = valid_code
lo.save()
log_action(request.user.id,lo,ADDITION,u'扫描录单')
try:
yd_customer = lo.yd_customer and lo.yd_customer.name or ''
except:
yd_customer = ''
return {'package_id':package_id,
'cus_oid':lo.cus_oid,
'yd_customer':yd_customer,
'receiver_name':lo.receiver_name,
'receiver_state':lo.receiver_state,
'receiver_city':lo.receiver_city,
'receiver_district':lo.receiver_district,
'receiver_address':lo.receiver_address,
'created':lo.created,
'zone':self.getYundaZone(lo, dc_code)
}
def post(self, request,*args, **kwargs):
content = request.REQUEST
package_no = content.get('package_no')
package_weight = content.get('package_weight')
if not self.isValidYundaId(package_no):
return u'非法的运单号'
package_id,valid_code,dc_code = self.parseYundaId(package_no)
try:
lo = LogisticOrder.objects.get(out_sid=package_no)
except LogisticOrder.DoesNotExist:
return u'运单号未录入系统'
try:
float(package_weight)
except:
return u'重量异常:%s'%package_weight
lo.weight = package_weight
lo.valid_code = valid_code
lo.dc_code = dc_code
lo.save()
tspw,state = TodaySmallPackageWeight.objects.get_or_create(
package_id=package_no)
tspw.weight = package_weight
tspw.save()
log_action(request.user.id,lo,CHANGE,u'扫描称重')
return {'isSuccess':True}
class BranchZoneView(ModelView):
""" 获取分拨集包规则 """
def get(self, request, *args, **kwargs):
content = request.REQUEST
province = content.get('province','')
city = content.get('city','')
district = content.get('district','')
address = content.get('address','')
branch_zone = get_addr_zones(province,city,district,address=address)
return {'province':province,
'city':city,
'district':district,
'address':address,
'branch_zone':branch_zone ,
}
|
from . import osinfo
from . import visitor_info
import datetime |
from future.utils import iteritems
import pytest
import unittest
import numpy as np
import pyqg
import pickle
import os
from pyqg import diagnostic_tools as diag
def test_describe_diagnostics():
""" Test whether describe_diagnostics runs without error """
m = pyqg.QGModel(1)
m.describe_diagnostics()
def old_qgmodel_calc_paramspec(self, dqh1, dqh2):
del1 = self.del1
del2 = self.del2
F1 = self.F1
F2 = self.F2
wv2 = self.wv2
ph = self.ph
return np.real(
(del1 / (wv2 + F1 + F2) * (-(wv2 + F2) * dqh1 - F1 * dqh2) * np.conj(ph[0])) +
(del2 / (wv2 + F1 + F2) * (-F2 * dqh1 - (wv2 + F1) * dqh2) * np.conj(ph[1])) +
(del1 * F1 / (wv2 + F1 + F2) * (dqh2 - dqh1) * np.conj(ph[0] - ph[1]))
)
def test_paramspec_decomposition(rtol=1e-10):
# Initialize a model with a parameterization, step it forward and compute paramspec
dq = np.random.normal(size=(2,64,64))
m = pyqg.QGModel(q_parameterization = lambda m: dq)
m._step_forward()
m._increment_diagnostics()
# Compute the parameterization spectrum at least two ways
height_ratios = (m.Hi / m.H)[:,np.newaxis,np.newaxis]
dqh = m.fft(dq)
ps1 = -np.real((height_ratios * np.conj(m.ph) * dqh).sum(axis=0)) / m.M**2
ps2 = old_qgmodel_calc_paramspec(m, dqh[0], dqh[1]) / m.M**2
ps3 = m.get_diagnostic('paramspec')
# Ensure they're identical
np.testing.assert_allclose(ps1, ps2, rtol=rtol)
np.testing.assert_allclose(ps1, ps3, rtol=rtol)
# Now test it can be decomposed into separate KE and APE components
apeflux_term = np.einsum("ij, jk..., k... -> i...", m.S, m.a, dqh)
keflux_term = np.einsum("ij..., j... -> i...", m.a, dqh)
height_ratios = (m.Hi/m.H)[:,np.newaxis,np.newaxis]
paramspec_apeflux = -np.real(height_ratios*m.ph.conj()*apeflux_term).sum(axis=0) / m.M**2
paramspec_keflux = m.wv2*np.real(height_ratios*m.ph.conj()* keflux_term).sum(axis=0) / m.M**2
ps4 = paramspec_apeflux + paramspec_keflux
np.testing.assert_allclose(ps1, ps4, rtol=rtol)
# Test these terms match the subterms from QGModel
np.testing.assert_allclose(paramspec_apeflux,
m.get_diagnostic('paramspec_APEflux'), rtol=rtol)
np.testing.assert_allclose(paramspec_keflux,
m.get_diagnostic('paramspec_KEflux'), rtol=rtol)
def test_paramspec_additivity(rtol=1e-10):
# Test over multiple model classes
for model_class in [pyqg.QGModel, pyqg.LayeredModel]:
# Initialize four models with different (deterministic) parameterizations
m1 = model_class()
dq = np.random.normal(size=m1.q.shape)
du = np.random.normal(size=m1.u.shape)
dv = np.random.normal(size=m1.v.shape)
m2 = model_class(q_parameterization=lambda m: dq)
m3 = model_class(uv_parameterization=lambda m: (du,dv))
m4 = model_class(q_parameterization=lambda m: dq,
uv_parameterization=lambda m: (du,dv))
# Give them the same initial conditions
for m in [m1,m2,m3,m4]:
m.q = m1.q
# Step them forward and manually increment diagnostics
for m in [m1,m2,m3,m4]:
m._step_forward()
m._increment_diagnostics()
# Unparameterized model should have 0 for its parameterization spectrum
np.testing.assert_allclose(m1.get_diagnostic('paramspec'), 0., rtol=rtol)
# Parameterized models should have nonzero values
for m in [m2,m3,m4]:
with pytest.raises(AssertionError):
np.testing.assert_allclose(m.get_diagnostic('paramspec'), 0., rtol=rtol)
# Model with both parameterizations should have the sum
np.testing.assert_allclose(
(m2.get_diagnostic('paramspec') + m3.get_diagnostic('paramspec')),
m4.get_diagnostic('paramspec'),
rtol=rtol)
def test_Dissspec_diagnostics(atol=1e-20):
# Run model for some timesteps
dt = 3600
tmax = dt * 1000
m = pyqg.QGModel(tavestart=tmax, taveint=1, tmax=tmax, dt=dt)
m.run()
# Need to run _calc_diagnostics() once more to use the most recent state variables
m._calc_diagnostics()
# Calculate spectral contribution of dissipation offline
diss_spectrum, rhs_unfiltered = np.zeros_like(m.qh), np.zeros_like(m.qh)
ones = np.ones_like(m.filtr)
# Get AB coefficients
if m.ablevel==0:
# forward euler
dt1 = m.dt
dt2 = 0.0
dt3 = 0.0
elif m.ablevel==1:
# AB2 at step 2
dt1 = 1.5*m.dt
dt2 = -0.5*m.dt
dt3 = 0.0
else:
# AB3 from step 3 on
dt1 = 23./12.*m.dt
dt2 = -16./12.*m.dt
dt3 = 5./12.*m.dt
for k in range(m.nz):
rhs_unfiltered[k] = m.qh[k] + dt1*m.dqhdt[k] + dt2*m.dqhdt_p[k] + dt3*m.dqhdt_pp[k]
diss_spectrum[k] = (m.filtr - ones) * rhs_unfiltered[k]
diss_contribution = -np.real(np.tensordot(m.Hi, np.conj(m.ph)*diss_spectrum, axes=(0, 0)))/m.H/m.dt/m.M**2
diss_contribution_model = m.get_diagnostic('Dissspec')
# Ensure that the above calculation is consistent with the model's internal calculation
np.testing.assert_allclose(diss_contribution, diss_contribution_model, atol=atol)
# Obtain filtered contribution, which is used in the model
qh_new = m.qh.copy()
for k in range(m.nz):
qh_new[k] = m.filtr * rhs_unfiltered[k]
rhs_contribution_filtered = -np.real(np.tensordot(m.Hi, np.conj(m.ph)*qh_new, axes=(0, 0)))/m.H/m.dt/m.M**2
rhs_contribution_unfiltered = -np.real(np.tensordot(m.Hi, np.conj(m.ph)*rhs_unfiltered, axes=(0, 0)))/m.H/m.dt/m.M**2
# Ensure that the difference between the filtered contribution and the unfiltered contribution is
# completely the effect of dissipation
np.testing.assert_allclose(diss_contribution_model,
rhs_contribution_filtered - rhs_contribution_unfiltered,
atol=atol)
def test_diagnostic_magnitude():
# Load a set of pre-run fixture models from
# examples/diagnostic_normalization.ipynb (running from scratch would take
# a bit too long for a test)
fixtures_path = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures"
with open(f"{fixtures_path}/LayeredModel_params.pkl", 'rb') as f:
# Common set of parameters for each model
params = pickle.load(f)
m1 = pyqg.LayeredModel(nx=96, **params)
m2 = pyqg.LayeredModel(nx=64, **params)
m1.q = np.load(f"{fixtures_path}/LayeredModel_nx96_q.npy")
m2.q = np.load(f"{fixtures_path}/LayeredModel_nx64_q.npy")
for m in [m1, m2]:
m._invert()
m._calc_derived_fields()
# Loop through all diagnostics
for diagnostic in m1.diagnostics.keys():
if diagnostic == 'Dissspec':
continue
# Get the maximum-magnitude instantaneous value of each diagnostic,
# re-evaluating the function rather than relying on any saved
# diagnostics (gives a rough idea of order of magnitude)
max_hi = np.abs(m1.diagnostics[diagnostic]['function'](m1)).max()
max_lo = np.abs(m2.diagnostics[diagnostic]['function'](m2)).max()
if max_lo == 0:
assert max_hi == 0
else:
# Ensure they're the same order of magnitude -- no more than a
# factor of 3 different. If these assertions fail for a new
# diagnostic, you're probably missing a division by M**2.
assert max_hi/max_lo < 3, f"{diagnostic} should be normalized"
assert max_hi/max_lo > 0.33, f"{diagnostic} should be normalized"
|
# Django project settings file
# This file should be part of the svn repository of the project and should not
# contains any site-specific information.
# site-specific information (database name/login/password for example) should be
# in the settings_local.py file and should not be added to the svn repository
import os
SITE_ID = 1
USE_I18N = True
LANGUAGE_CODE = 'en-ca'
gettext = lambda s: s
LANGUAGES = (('en-ca', gettext('English')),)
AKISMET_API_KEY = '21adb0516170'
BLOG_AUTHOR_ID = 1
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
)
ROOT_URLCONF = 'urls'
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.join(PROJECT_PATH, 'media/')
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates/'),
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.comments',
'django.contrib.contenttypes',
'django.contrib.flatpages',
'django.contrib.humanize',
'django.contrib.markup',
'django.contrib.sessions',
'django.contrib.sites',
'django_extensions',
'sorl.thumbnail',
'south',
'tagging',
'attachments',
'blog',
)
# import local settings overriding the defaults
try:
from settings_local import *
except ImportError:
try:
from mod_python import apache
apache.log_error( "local settings not available", apache.APLOG_NOTICE )
except ImportError:
import sys
sys.stderr.write( "local settings not available\n" )
|
#!/usr/bin/python
# -*- coding:UTF-8 -*-
import requests
import json
import sys
if len(sys.argv) < 2:
sys.exit()
paradero = ' '.join(sys.argv[1:])
rs = requests.session()
url = 'http://www.transantiago.cl/predictor/prediccion?codsimt='
ser = '&codser='
url_final = '%s%s%s' % (url,paradero.upper(),ser)
micro = web.json()
x = 0
busesno = []
def disponibilidad(x):
if '00' == micro['servicios']['item'][x]['codigorespuesta'] and '' == micro['servicios']['item'][x]['respuestaServicio']:
recorrido(x)
sec_recorrido(x)
elif '01' == micro['servicios']['item'][x]['codigorespuesta'] and '' == micro['servicios']['item'][x]['respuestaServicio']:
recorrido(x)
elif '10' == micro['servicios']['item'][x]['codigorespuesta'] and "No hay buses que se dirijan al paradero." == micro['servicios']['item'][x]['respuestaServicio']:
print 'No hay buses que se dirijan al paradero.'
nodisponibilidad(x)
print '---------------------------------'
elif '11' == micro['servicios']['item'][x]['codigorespuesta'] and "Fuera de horario de operacion para este paradero" == micro['servicios']['item'][x]['respuestaServicio']:
frecuencia(x)
print '---------------------------------'
def nodisponibilidad(x):
print 'Servicio:',micro['servicios']['item'][x]['servicio'].encode('ascii')
return
def frecuencia(x):
print 'Servicio:',micro['servicios']['item'][x]['servicio']
print 'Frecuencia:',micro['servicios']['item'][x]['respuestaServicio']
return
def sec_recorrido(x):
print 'Aproxima micro!!!'
print 'Servicio:',micro['servicios']['item'][x]['servicio']
print 'Destino:',micro['servicios']['item'][x]['destino']
print 'Patente:',micro['servicios']['item'][x]['ppubus2']
print 'Cuando llega:',micro['servicios']['item'][x]['horaprediccionbus2']
print '--------------------------------'
return
def recorrido(x):
print 'Servicio:',micro['servicios']['item'][x]['servicio']
print 'Destino:',micro['servicios']['item'][x]['destino']
print 'Patente:',micro['servicios']['item'][x]['ppubus1']
print 'Cuando llega:',micro['servicios']['item'][x]['horaprediccionbus1']
print '--------------------------------'
return
print 'En cuanto tiempo llegara tu micro al paradero?'
if 'Paradero invalido.' == micro['respuestaParadero']:
print 'Paradero:Invalido'
else:
print 'Paradero:',micro['paradero']
print 'Ubicacion:',micro['nomett']
print 'Hora:',micro['horaprediccion']
print '************************************'
while x < len(micro['servicios']['item']):
disponibilidad(x)
x = x + 1
|
## @file MoleculeT.py
# @author Christopher Andrade
# @brief A Python file containing a single class called "MoleculeT"
# that inherits two classes called "ChemEntity" and "Equality"
# @date Monday, February 3, 2020
from ChemEntity import ChemEntity
from Equality import Equality
from ElmSet import ElmSet
## @brief A class "MoleculeT" inheriting class "ChemEntity" and class
# "Equality" to create a chemical molecule type object from
# a natural number and an "ElementT" object
class MoleculeT(ChemEntity, Equality):
## @brief A constructor that takes in two parameters "n" and "e" and
# creates a "MoleculeT" instance from them
# @param n An input parameter that is a natural number used to represent
# the number of atoms in the "MoleculeT" instance
# @param e An input parameter that is an "ElementT" object used to represent
# the element in the "MoleculeT" instance
def __init__(self, n, e):
self.__num = n
self.__elm = e
## @brief A method "get_num" that returns a natural number representing the number
# of atoms in the "MoleculeT" instance
# @return Returns the state variable representing the number of atoms in the
# "MoleculeT" instance
def get_num(self):
return self.__num
## @brief A method "get_elm" that returns an "ElementT" object representing the
# element in the "MoleculeT" instance
# @return Returns the state variable representing the element, an "ElementT"
# object, in the "MoleculeT" instance
def get_elm(self):
return self.__elm
## @brief A method "num_atoms" that takes in one parameter "e" of type "ElementT"
# and will return a natural number
# @param e An input parameter of type "ElementT" to be compared to compared to the
# element in the "MoleculeT" instance
# @return Returns a natural number representing how many atoms of the input parameter
# "e", an "ElementT" object, are in the "MoleculeT" instance
def num_atoms(self, e):
if e == self.get_elm():
return self.get_num()
else:
return 0
## @brief A method "constit_elems" that returns an "ElmSet" object
# @return Returns an "ElmSet" set object of "ElementT" objects that represents what
# elements the "MoleculeT" instance consists of
def constit_elems(self):
return ElmSet([self.get_elm()])
## @brief A method "equals" that takes in one parameter "m", a "MoleculeT" object,
# and checks if "m" has the same element and number of element as the
# "MoleculeT" instance
# @param m An input parameter that is a "MoleculeT" object and will
# be compared to the "MoleculeT" instance
# @return Returns a Boolean value True if the input parameter "m" has the
# same "ElementT" state variable and same number of elements state
# variable as the "MoleculeT" instance, otherwise returns False
def equals(self, m):
if (self.get_elm() == m.get_elm()) and (self.get_num() == m.get_num()):
return True
else:
return False
|
from datetime import datetime, timedelta
import folder_of_meanings.sup_functions as sup
def generate_answer(date, need_date, today, c ):
answer = ""
row = list(c.execute('''SELECT * FROM timetable WHERE date=? ''', (need_date,)))
if len(row) != 0:
row = row[0][1:]
if sup.check_weekend(row):
answer = "В этот день выходной"
else:
if type(date) == str or (type(date) != str and date > 0):
for elem in row:
if elem != "---":
campus = sup.understand_what_campus(elem)
answer = "Пары в " + campus
break
else: continue
elif need_date == today:
for elem in row:
if elem != "---":
campus = sup.understand_what_campus(elem)
answer = "Сегодня пары в " + campus
break
else:
continue
else:
answer = "Вероятно это выходной"
return answer
|
import RPi.GPIO as GPIO
import time
import requests
from num2words import num2words
from subprocess import call
cmd_beg= 'espeak '
cmd_end= ' 2>/dev/null'
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(11, GPIO.IN) #Read output from PIR motion sensor
x=time.strftime("%H") #to obtain system time in hours.
y=int(x)
while True:
i=GPIO.input(11)
if i==0: #When output from motion sensor is LOW
print ("No intruders")
time.sleep(1)
elif i==1: #When output from motion sensor is HIGH
print ("Intruder detected")
if y<12:
text="Good Morning Welcome to baker and grey"
print (text)
elif y>=12 and y<16:
text="Good afternoon Welcome to baker and grey"
print (text)
elif y>=16 and y<20:
text="Good Evening Welcome to baker and grey"
print(text)
text = text.replace(' ', '_')
#Calls the Espeak TTS Engine to read aloud a Text
call([cmd_beg+text+cmd_end], shell=True)
requests.post("https://maker.ifttt.com/trigger/YOUR-EVENT-NAME/with/key/YOUR-SECRET-KEY") #replace YOUR-EVENT-NAME and YOUR-SECRET-KEY
time.sleep(2)
|
# coding: utf8
# Ferran March Azañero
# 09/02/2018
numero1=input ("Introduce el primer numero:")
numero2=input ("Introduce el segundo numero:")
if(numero1>numero2):
print "El numero1 es mas grande que el numero2"
else:
if(numero2>numero1):
print "El numero2 es mas grande que el numero1"
else:
print "Los numeros son iguales"
|
from pandas import read_csv
from pandas import DataFrame
from pandas import concat
from matplotlib import pyplot
from pandas.plotting import lag_plot
from pandas.plotting import autocorrelation_plot
from sklearn.metrics import mean_squared_error
from math import sqrt
from statsmodels.graphics.tsaplots import plot_acf
from statsmodels.tsa.ar_model import AR
from constants import *
series = read_csv(DESTINATION_FOLDER + "\warsaw.csv", header=0, index_col=0, parse_dates=True, squeeze=True)
# create a lag feature
temps = DataFrame(series.values)
# dataframe = concat([temps.shift(3), temps.shift(2), temps.shift(1), temps], axis=1)
# dataframe.columns = ['t-2', 't-1', 't', 't+1']
# print(dataframe.head(5))
# lag plot of time series
# lag_plot(series)
# pyplot.show()
# correlation of lag=1
values = DataFrame(series.values)
dataframe = concat([values.shift(1), values], axis=1)
dataframe.columns = ['t-1', 't']
result = dataframe.corr()
print(result)
# autocorrelation plot of time series
# autocorrelation_plot(series)
# pyplot.show()
# plot_acf(series, lags=31)
# pyplot.show()
# evaluate a persistence model
# create lagged dataset
# dataframe = concat([temps.shift(1), temps], axis=1)
# dataframe.columns = ['t', 't+1']
# # split into train and test sets
# X = dataframe.values
# train, test = X[1:len(X) - 7], X[len(X) - 7:]
# train_X, train_y = train[:, 0], train[:, 1]
# test_X, test_y = test[:, 0], test[:, 1]
# persistence model
# def model_persistence(x):
# return x
#
# # walk-forward validation
# predictions = list()
# for x in test_X:
# yhat = model_persistence(x)
# predictions.append(yhat)
# rmse = sqrt(mean_squared_error(test_y, predictions))
# print('Test RMSE: %.3f' % rmse)
#
# # plot predictions vs expected
# pyplot.plot(test_y)
# pyplot.plot(predictions, color='red')
# pyplot.show()
# create and evaluate a static autoregressive model - fatalne wyniki
# split dataset
X = series.values
train, test = X[1:len(X)-100], X[len(X)-100:]
# train autoregression
# model = AR(train)
# model_fit = model.fit()
# print('Lag: %s' % model_fit.k_ar)
# print('Coefficients: %s' % model_fit.params)
#
# # make predictions
# predictions = model_fit.predict(start=len(train), end=len(train)+len(test)-1, dynamic=False)
# for i in range(len(predictions)):
# print('predicted=%f, expected=%f' % (predictions[i], test[i]))
# rmse = sqrt(mean_squared_error(test, predictions))
# print('Test RMSE: %.3f' % rmse)
#
# # plot results
# pyplot.plot(test)
# pyplot.plot(predictions, color='red')
# pyplot.show()
# calculate residual errors for a persistence forecast model Test RMSE: 2.346
# load data
# create lagged dataset
# values = DataFrame(series.values)
# dataframe = concat([values.shift(1), values], axis=1)
# dataframe.columns = ['t', 't+1']
# split into train and test sets
X = dataframe.values
train_size = int(len(X) * 0.66)
train, test = X[1:train_size], X[train_size:]
train_X, train_y = train[:, 0], train[:, 1]
test_X, test_y = test[:, 0], test[:, 1]
# persistence model
predictions = [x for x in test_X]
# skill of persistence model
rmse = sqrt(mean_squared_error(test_y, predictions))
print('Test RMSE: %.3f' % rmse)
# calculate residuals
residuals = [test_y[i]-predictions[i] for i in range(len(predictions))]
residuals = DataFrame(residuals)
print(residuals.head())
print("Koniec")
|
'''
Problem Statement
Given an array with positive numbers and a target number, find all of its contiguous subarrays whose product is less than the target number.
Example 1:
Input: [2, 5, 3, 10], target=30
Output: [2], [5], [2, 5], [3], [5, 3], [10]
Explanation: There are six contiguous subarrays whose product is less than the target.
Example 2:
Input: [8, 2, 6, 5], target=50
Output: [8], [2], [8, 2], [6], [2, 6], [5], [6, 5]
Explanation: There are seven contiguous subarrays whose product is less than the target.
'''
from collections import deque
def find_subarray(nums, target):
result = []
ws = 0
prod = 1
for we in range(len(nums)):
prod *= nums[we]
while prod >= target and ws < len(nums):
prod /= nums[ws]
ws += 1
temp = deque()
for i in range(we, ws-1, -1):
temp.appendleft(nums[i])
result.append(list(temp))
return result
def main():
nums = [2, 5, 3, 10]
target=30
result = find_subarray(nums, target)
print(result)
nums = [8, 2, 6, 5]
target=50
result = find_subarray(nums, target)
print(result)
if __name__ == "__main__":
main()
|
from ctypes import *
from numpy import *
import time
import scipy
import sys
import traceback
import scipy.optimize
import os
#*************************************************************************
#* BELOW is the post-processing of acquired traces by the mathematical dll
#* Acqiris_QuantroDLLMath1, which is based on the GSL library
#* (see dll functions syntax below)
#*************************************************************************
class DLLMath1Module():
def __init__(self, acqirisInstr): # creator
"""
Load the 'acqiris_QuantroDLLMath1.dll' DLL and make it an attribute of the DLLMath1Module object .
"""
self.acqirisInstr = acqirisInstr # define the acqiris instrument as an attribute of the module
# so that the module can access all the acqiris attributes.
try:
print "\nLoading GSL based Mathematical DLL 'acqiris_QuantroDLLMath1.dll'"
self._dll = windll.LoadLibrary(os.path.dirname(
os.path.abspath(__file__)) + '/acqiris_QuantroDLLMath1.dll')
# test with C function WINAPI mathDLLVersion
print "acqiris_QuantroDLLMath1 version is %s" % self.mathDLLVersion()
except:
print "Cannot load DLL 'acqiris_QuantroDLLMath1.dll'"
raise
self.segmentProperties = [
"minOfLastWaveForms",
"maxOfLastWaveForms",
"meanOfLastWaveForms",
"boxcarOfLastWaveForms",
"varianceOfLastWaveForms",
"sdevOfLastWaveForms"]
self.minArray = [None] * 4
self._dll.minArray.restype = c_double
self.minArrayLastID = [-1, -1, -1, -1]
self.maxArray = [None] * 4
self._dll.maxArray.restype = c_double
self.maxArrayLastID = [-1, -1, -1, -1]
self.mean = [None] * 4
self._dll.mean.restype = c_double
self.meanLastID = [-1, -1, -1, -1]
self.boxcarSlice = [None] * 4
self.boxcarMean = [None] * 4
self._dll.boxCar.restype = c_double
self.boxcarLastID = [-1, -1, -1, -1]
self.variance = [None] * 4
self._dll.variance.restype = c_double
self.varianceLastID = [-1, -1, -1, -1]
self.sdev = [None] * 4
self._dll.sdev.restype = c_double
self.sdevLastID = [-1, -1, -1, -1]
self.cov = None
self._dll.cov.restype = c_double
self.covMatrix = None # None or 1D numpy array of size 4 times the number of segments
# [ Seg1_var1, Seg1_cov12, Seg1_cov12, Seg1_var2,
# Seg2_var1, Seg2_cov12, Seg2_cov12, Seg2_var2,
# ...]
self.eigenVal = None # None or 1D numpy array of size 2 times the number of segments
# [ Seg1_Val1, Seg1_Val2,
# Seg2_Val1, Seg2_Val2,
# ...]
self.eigenVec = None # None or 1D numpy array of size 4 times the number of segments
# [ Seg1_Vec1_X, Seg1_Vec1_Y, Seg1_Vec2_X, Seg1_Vec2_Y,
# Seg2_Vec1_X, Seg2_Vec1_Y, Seg2_Vec2_X, Seg2_Vec2_Y,
# ...]
self.aboveThresholdFrequencyArray = [None] * 4
self._dll.aboveThresholdFrequency.restype = c_double
# List of four 1D histograms initialized to None
self.histoArray = [None] * 4
# List of four 1D bin center arrays initialized to None
self.binCentersArray = [None] * 4
# List of two 2D histograms initialized to None
self.histo2DArray = [None] * 2
# List of two 2D bin center arrays initialized to None
self.binCentersXYArray = [None] * 2
def mathDLLVersion(self):
"""
Returns the version of the DLL.
"""
versionString = c_char_p(" ")
self._dll.mathDLLVersion(versionString)
return versionString.value
def mathDLLHelp(self, functionName=""):
"""
Fill the passed string helpString of length helpStringLength with a help text on function functionName if it exists or on all function if functionName="".
"""
helpString = create_string_buffer(1000)
self._dll.mathDLLHelp(functionName, helpString, sizeof(helpString))
return helpString.value
def _propertyOfLastWaveForms(self, dllAndArrayName, targettedWaveform=15):
"""
private generic function being used to compute a real property of type double per segment.
Can be used for mean, variance, sdev, min, max, boxcar ...
use dllAndArrayName as the name of the dll and of the array in which results are stored
"""
lastIdentifier = self.acqirisInstr.lastWaveIdentifier
lastTranferred = self.acqirisInstr.lastTransferredChannel
lastAveraged = self.acqirisInstr.lastTransferAverage
nbrSamplesPerSeg = self.acqirisInstr.lastNbrSamplesPerSeg
if lastAveraged:
nbrSegmentArray = [1, 1, 1, 1]
else:
nbrSegmentArray = self.acqirisInstr.lastNbrSegmentsArray
for i in range(4):
if lastTranferred & targettedWaveform & (1 << i):
nbrSegment = nbrSegmentArray[i]
getattr(self, dllAndArrayName)[i] = zeros(nbrSegment)
for j in range(nbrSegment):
startAddress = self.acqirisInstr.lastWaveformArray[
i][j * nbrSamplesPerSeg:].ctypes.data
f = getattr(self._dll, dllAndArrayName)
getattr(self, dllAndArrayName)[i][j] = f(
startAddress, c_long(nbrSamplesPerSeg))
getattr(self, dllAndArrayName + "LastID")[i] = lastIdentifier
else:
getattr(self, dllAndArrayName)[i] = None
return
def minOfLastWaveForms(self, targettedWaveform=15):
"""
minOfLastWaveForms(targettedWaveform):
Finds the minima of the targetted lastly acquired waveforms (for each segment).
TargettedWaveform encodes the waveforms to be processed (provided they were acquired at the last acquisition)
TargettedWaveform is the Sum(an 2^n) for n=0 to 3 the channel number and an=1 for targetted channels and zero otherwise
Stores the results in the 4 element python array min.
"""
self._propertyOfLastWaveForms(
dllAndArrayName='minArray', targettedWaveform=targettedWaveform)
return
def maxOfLastWaveForms(self, targettedWaveform=15):
"""
maxOfLastWaveForms(targettedWaveform):
Finds the maxima of the targetted lastly acquired waveforms (for each segment).
TargettedWaveform encodes the waveforms to be processed (provided they were acquired at the last acquisition)
TargettedWaveform is the Sum(an 2^n) for n=0 to 3 the channel number and an=1 for targetted channels and zero otherwise
Stores the results in the 4 element python array max.
"""
self._propertyOfLastWaveForms(
dllAndArrayName='maxArray', targettedWaveform=targettedWaveform)
return
def meanOfLastWaveForms(self, targettedWaveform=15):
"""
meanOfLastWaveForms(targettedWaveform):
Computes the means of the targetted lastly acquired waveforms (for each segment).
TargettedWaveform encodes the waveforms to be processed (provided they have been acquired at the last acquisition)
TargettedWaveform is the Sum(an 2^n) for n=0 to 3 the channel number and an=1 for targetted channels and zero otherwise
Stores the results in the 4 element python array mean.
"""
self._propertyOfLastWaveForms(
dllAndArrayName='mean', targettedWaveform=targettedWaveform)
return
def boxcarOfLastWaveForms(self, targettedWaveform=15, sliceArray=[slice(0, -1, 1)] * 4):
"""
boxcarOfLastWaveForms(targettedWaveform):
Computes the boxecar means of the targetted lastly acquired waveforms (for each segment).
TargettedWaveform encodes the waveforms to be processed (provided they have been acquired at the last acquisition)
TargettedWaveform is the Sum(an 2^n) for n=0 to 3 the channel number and an=1 for targetted channels and zero otherwise
Stores the results in the 4 element python array boxcarMean.
"""
lastTranferred = self.acqirisInstr.lastTransferredChannel
targettedWaveform = lastTranferred & targettedWaveform
lastAveraged = self.acqirisInstr.lastTransferAverage
nbrSamplesPerSeg = self.acqirisInstr.lastNbrSamplesPerSeg
if lastAveraged:
nbrSegmentArray = [1, 1, 1, 1]
else:
nbrSegmentArray = self.acqirisInstr.lastNbrSegmentsArray
for i in range(4):
if targettedWaveform & (1 << i):
nbrSegment = nbrSegmentArray[i]
self.boxcarMean[i] = zeros(nbrSegment)
start = sliceArray[i].start
if start < 0:
start = nbrSamplesPerSeg + start
stop = sliceArray[i].stop
if stop < 0:
stop = nbrSamplesPerSeg + stop
if start >= 0 and start < nbrSamplesPerSeg and stop >= 0 and stop < nbrSamplesPerSeg and start <= stop:
self.boxcarSlice[i] = slice(start, stop, 1)
for j in range(nbrSegment):
startAddress = self.acqirisInstr.lastWaveformArray[
i][j * nbrSamplesPerSeg:].ctypes.data
self.boxcarMean[i][j] = self._dll.boxCar(
startAddress, c_long(start), c_long(stop))
else:
self.boxcarSlice[i] = None
self.boxcarMean[i] = None
else:
self.boxcarMean[i] = None
self.boxcarSlice[i] = None
return
def varianceOfLastWaveForms(self, targettedWaveform=15):
"""
varianceOfLastWaveForms(targettedWaveform):
Computes the variances of the targetted lastly acquired waveforms (for each segment).
TargettedWaveform encodes the waveforms to be processed (provided they have been acquired at the last acquisition)
TargettedWaveform is the Sum(an 2^n) for n=0 to 3 the channel number and an=1 for targetted channels and zero otherwise
Stores the results in the 4 element python array variance.
"""
self._propertyOfLastWaveForms(
dllAndArrayName='variance', targettedWaveform=targettedWaveform)
return
def sdevOfLastWaveForms(self, targettedWaveform=15):
"""
sdevOfLastWaveForms(targettedWaveform):
Computes the unbiased standard deviation of the targetted lastly acquired waveforms (for each segment).
TargettedWaveform encodes the waveforms to be processed (provided they have been acquired at the last acquisition)
TargettedWaveform is the Sum(an 2^n) for n=0 to 3 the channel number and an=1 for targetted channels and zero otherwise
Stores the results in the 4 element python array sdev.
"""
self._propertyOfLastWaveForms(
dllAndArrayName='sdev', targettedWaveform=targettedWaveform)
return
def covarianceTwoWaveforms(self, waveform1=0, waveform2=1):
"""
covarianceTwoWaveforms(waveform1,waveform2):
Computes the covariance of two lastly acquired waveforms (for each segment).
Waveform1 and waveform 2 are the channel number 0,1,2,or 3
Stores the results in cov.
"""
lastTranferred = self.acqirisInstr.lastTransferredChannel
lastAveraged = self.acqirisInstr.lastTransferAverage
nbrSamplesPerSeg = self.acqirisInstr.lastNbrSamplesPerSeg
if lastAveraged:
nbrSegmentArray = [1, 1, 1, 1]
else:
nbrSegmentArray = self.acqirisInstr.lastNbrSegmentsArray
sameLength = nbrSegmentArray[waveform1] == nbrSegmentArray[waveform2]
waveform1Transferred = lastTranferred & (1 << waveform1)
waveform2Transferred = lastTranferred & (1 << waveform2)
if sameLength and waveform1Transferred and waveform2Transferred:
nbrSegment = nbrSegmentArray[waveform1]
self.cov = zeros(nbrSegment)
for j in range(nbrSegment):
startAddress1 = self.acqirisInstr.lastWaveformArray[
waveform1][j * nbrSamplesPerSeg:].ctypes.data
startAddress2 = self.acqirisInstr.lastWaveformArray[
waveform2][j * nbrSamplesPerSeg:].ctypes.data
self.cov[j] = self._dll.cov(
startAddress1, startAddress2, c_long(nbrSamplesPerSeg))
else:
self.cov = None
return
def covMatrixTwoWaveforms(self, waveform1=0, waveform2=1):
"""
covMatrixTwoWaveforms(waveform1,waveform2):
Computes the covariance matrices of two lastly acquired waveforms (for each segment).
Waveform1 and waveform 2 are the channel number 0,1,2,or 3
Stores the results in covMatrix.
"""
lastTranferred = self.acqirisInstr.lastTransferredChannel
lastAveraged = self.acqirisInstr.lastTransferAverage
nbrSamplesPerSeg = self.acqirisInstr.lastNbrSamplesPerSeg
if lastAveraged:
nbrSegmentArray = [1, 1, 1, 1]
else:
nbrSegmentArray = self.acqirisInstr.lastNbrSegmentsArray
sameLength = nbrSegmentArray[waveform1] == nbrSegmentArray[waveform2]
waveform1Transferred = lastTranferred & (1 << waveform1)
waveform2Transferred = lastTranferred & (1 << waveform2)
if sameLength and waveform1Transferred and waveform2Transferred:
nbrSegment = nbrSegmentArray[waveform1]
self.covMatrix = zeros(4 * nbrSegment)
for j in range(nbrSegment):
pointer1 = self.acqirisInstr.lastWaveformArray[
waveform1][j * nbrSamplesPerSeg:].ctypes.data
pointer2 = self.acqirisInstr.lastWaveformArray[
waveform2][j * nbrSamplesPerSeg:].ctypes.data
pointer3 = self.covMatrix[4 * j:].ctypes.data
self._dll.covMatrix2(pointer1, pointer2,
c_long(nbrSamplesPerSeg), pointer3)
else:
self.covMatrix = None
return
def diagCovMatrix(self):
"""
diagCovMatrix():
Diagonalize the covariance matrices stored in covMatrix and stores the eigenvalues in eigenVal
and the eigenvectors in eigenVec.
"""
if self.covMatrix is not None:
# copy the covariance matrices because the GSL function destroys
# its source
covMatrixCopy = self.covMatrix
print covMatrixCopy
nbrsegment = len(covMatrixCopy) / 4
self.eigenVal = zeros(2 * nbrsegment)
self.eigenVec = zeros(4 * nbrsegment)
for j in range(nbrsegment):
covMatPtr = covMatrixCopy[j * 4:].ctypes.data
eigValPtr = self.eigenVal[j * 2:].ctypes.data
eigVecPtr = self.eigenVec[j * 4:].ctypes.data
self._dll.eigenSystemSym(
covMatPtr, c_long(4), eigValPtr, eigVecPtr)
else:
self.eigenVal = None
self.eigenVec = None
return
def modulusTwoWaveforms(self, targettedWaveform=15):
"""
modulusTwoWaveforms(targettedWaveform=15):
Calculate the modulus (xi^2+yi^2)^1/2 using for x and y the first two targetted waveforms if targettedWaveform < 15,
or using both ch1 and ch2 for modulus 1 and ch3 and ch4 for modulus 2 if targettedWaveform = 15.
Results are overwritten in the first waveform channel of each pair.
"""
lastTranferred = self.acqirisInstr.lastTransferredChannel
targettedWaveform = lastTranferred & targettedWaveform
lastAveraged = self.acqirisInstr.lastTransferAverage
waveSizes = self.acqirisInstr.lastWaveformArraySizes
i = 0
while i < 4:
while (not(targettedWaveform & (1 << i))and i <= 4):
i += 1
waveform1 = i
i += 1
while (not(targettedWaveform & (1 << i))and i <= 4):
i += 1
if i < 4:
waveform2 = i
pointer1 = self.acqirisInstr.lastWaveformArray[
waveform1].ctypes.data
pointer2 = self.acqirisInstr.lastWaveformArray[
waveform2].ctypes.data
size1 = min(waveSizes[waveform1], waveSizes[waveform2])
self._dll.modulus(pointer1, pointer2, pointer1, c_long(size1))
i += 1
print "modulusTwoWaveforms() not debugged yet"
return
def thresholderOfLastWaveForms(self, threshold="auto", targettedWaveform=15):
"""
thresholderOfLastWaveForms(threshold='auto',targettedWaveform=15):
Overwrite values of dataArray with 0's or 1's if value is below and strictly above the threshold, respectively
"""
lastTranferred = self.acqirisInstr.lastTransferredChannel
targettedWaveform = lastTranferred & targettedWaveform
lastAveraged = self.acqirisInstr.lastTransferAverage
waveSizes = self.acqirisInstr.lastWaveformArraySizes
for i in range(4):
if targettedWaveform & (1 << i):
pointer = self.acqirisInstr.lastWaveformArray[
waveform1].ctypes.data
size = c_long(waveSizes[i])
threshold1 = threshold
if threshold == "auto":
threshold1 = (self._dll.maxArray(
pointer, size) + self._dll.minArray(pointer, size)) / 2
self._dll.thresholder(
pointer, pointer, size, c_double(threshold1))
print "thresholderOfLastWaveForms() not debugged yet"
return
def histo1DProperty(self, propertyArray='mean', mini="auto", maxi="auto", binNumber=10, targettedWaveform=15):
"""
histo1DProperty(propertyArray=self.mean,mini='auto',maxi='auto',binNumber=10,targettedWaveform=15):
Do a 1D histogram of the 1D array propertyArray
"""
for i in range(4):
if (targettedWaveform & (1 << i) and getattr(self, propertyArray)[i] is not None):
pointerData = getattr(self, propertyArray)[i].ctypes.data
size = c_long(len(getattr(self, propertyArray)[i]))
min1 = mini
if mini == "auto":
min1 = min(getattr(self, propertyArray)[i])
max1 = maxi
if maxi == "auto":
max1 = max(getattr(self, propertyArray)[i])
if min1 > max1:
max1, min1 = min1, max1
binWidth0 = (max1 - min1) / binNumber
if mini == "auto":
min1 = min1 - binWidth0 / 2
if maxi == "auto":
max1 = max1 + binWidth0 / 2
binWidth = (max1 - min1) / binNumber
centers = array(
[min1 + (j + 0.5) * binWidth for j in range(binNumber)])
self.binCentersArray[i] = array(
[min1 + (j + 0.5) * binWidth for j in range(binNumber)])
self.histoArray[i] = zeros(binNumber)
pointerHisto = self.histoArray[i][0:].ctypes.data
self._dll.histo1D(pointerData, size, c_double(
min1), c_double(max1), c_ulong(binNumber), pointerHisto)
return
def thresholderProperty(self, propertyArray=mean, threshold="auto", targettedWaveform=15):
"""
thresholderProperty(propertyArray=self.mean,threshold='auto',targettedWaveform=15):
Overwrite values of dataArray with 0's or 1's if value is below and strictly above the threshold, respectively
"""
for i in range(4):
if targettedWaveform & (1 << i) and getattr(self, propertyArray)[i] is not None:
pointer = getattr(self, propertyArray)[i].ctypes.data
size = c_long(len(getattr(self, propertyArray)[i]))
threshold1 = threshold
if threshold == "auto":
threshold1 = (self._dll.maxArray(
pointer, size) + self._dll.minArray(pointer, size)) / 2
self._dll.thresholder(
pointer, pointer, size, c_double(threshold1))
return
def aboveThresholdFrequencyProperty(self, propertyArray=mean, threshold="auto", targettedWaveform=15):
"""
aboveThresholdFrequencyProperty(propertyArray=self.mean,threshold='auto',targettedWaveform=15):
Overwrite values of dataArray with 0's or 1's if value is below and strictly above the threshold, respectively
"""
for i in range(4):
if targettedWaveform & (1 << i) and getattr(self, propertyArray)[i] is not None:
pointer = getattr(self, propertyArray)[i].ctypes.data
size = c_long(len(getattr(self, propertyArray)[i]))
threshold1 = threshold
if threshold == "auto":
threshold1 = (self._dll.maxArray(
pointer, size) + self._dll.minArray(pointer, size)) / 2
self.aboveThresholdFrequencyArray[i] = self._dll.aboveThresholdFrequency(
pointer, size, c_double(threshold1))
return
def histo2DProperty(self, propertyArray='mean', minMax="auto", binNumbers=[10, 10], channels=[0, 1], histo2DMemory=0):
"""
histo2DProperty(propertyArray='mean',minMax="auto",binNumbers=[10,10],channels=[0,1],histo2DMemory=0):
Do a 2D histogram of the two 1D arrays propertyArray[channel1],propertyArray[channel2] by calling the histo2D() function of the DLL.
minMax is a 2d list of the form [[min1,max1],[min2,max2]] specifying the minima and maxima along the two axes, where any value or list can be replaced by "auto".
histo2DMemory= 0 or 1 = either one of the possible 2D histogram memories.
reshape2D shape the final 2D histograms as a 2D array instead of 1D array
"""
if all([getattr(self, propertyArray)[channel] is not None for channel in channels]): # first check that data exist in the two requested channels
pointerData1, pointerData2 = [getattr(self, propertyArray)[
channel].ctypes.data for channel in channels] # get pointers to the x and y data
# Take the common length of of x and y channels
size = c_long(
min([len(getattr(self, propertyArray)[channel]) for channel in channels]))
if minMax == "auto":
# Caclulate min max specifications in case "auto" has been used
# somewhere
minMax = ["auto", "auto"]
if len(minMax) == 2 and len(binNumbers) == 2:
if minMax[0] == "auto":
minMax[0] = ["auto", "auto"]
if minMax[1] == "auto":
minMax[1] = ["auto", "auto"]
minMaxStart = minMax
if minMax[0][0] == "auto":
minMax[0][0] = min(
getattr(self, propertyArray)[channels[0]])
if minMax[0][1] == "auto":
minMax[0][1] = max(
getattr(self, propertyArray)[channels[0]])
if minMax[1][0] == "auto":
minMax[1][0] = min(
getattr(self, propertyArray)[channels[1]])
if minMax[1][1] == "auto":
minMax[1][1] = max(
getattr(self, propertyArray)[channels[1]])
if minMax[0][0] > minMax[0][1]:
minMax[0][0], minMax[0][1] = minMax[0][1], minMax[0][0]
if minMax[0][0] > minMax[0][1]:
minMax[1][0], minMax[1][1] = minMax[1][1], minMax[1][0]
binWidths = [(minMax[0][1] - minMax[0][0]) / binNumbers[0],
(minMax[1][1] - minMax[1][0]) / binNumbers[1]]
if minMaxStart[0][0] == "auto":
minMax[0][0] = minMax[0][0] - binWidths[0] / 2
if minMaxStart[0][1] == "auto":
minMax[0][1] = minMax[0][1] + binWidths[0] / 2
if minMaxStart[1][0] == "auto":
minMax[1][0] = minMax[1][0] - binWidths[1] / 2
if minMaxStart[1][0] == "auto":
minMax[1][1] = minMax[1][1] + binWidths[1] / 2
binWidths = [(minMax[0][1] - minMax[0][0]) / binNumbers[0],
(minMax[1][1] - minMax[1][0]) / binNumbers[1]]
self.binCentersXYArray[histo2DMemory] = array([[minMax[0][0] + (i + 0.5) * binWidths[0] for i in range(
binNumbers[0])], [minMax[1][0] + (i + 0.5) * binWidths[1] for i in range(binNumbers[1])]])
# Initialize the 2D array as a 1 d array
self.histo2DArray[histo2DMemory] = zeros(
binNumbers[0] * binNumbers[1])
# defines the pointer to be passed to the DLL and call the DLL
pointerHisto2D = self.histo2DArray[histo2DMemory][0:].ctypes.data
self._dll.histo2D(pointerData1, pointerData2, size, c_double(minMax[0][0]), c_double(minMax[0][1]), c_ulong(
binNumbers[0]), c_double(minMax[1][0]), c_double(minMax[1][1]), c_ulong(binNumbers[1]), pointerHisto2D)
# the histo is a 1D array with [Y1 column, Y2 column,Y3 column,...]
# reshape it as [[X1 line],[X1 line],[X1 line],... ] if requested
self.histo2DArray[histo2DMemory] = self.histo2DArray[
histo2DMemory].reshape((binNumbers[0], binNumbers[1]))
return
'''**************************************************************************************
BELOW IS THE SYNTAX OF THE QUANTRO_DLLMATH1 FUNCTIONS
extern "C" {
//These functions are built using the GSL mathematical library (see code DLLMath1Functions.cpp)
//return the version string of the DLL
impExp void WINAPI mathDLLVersion (char* versionString);
//Fill the passed string helpString of length helpStringLength with a help text on function functionName if it exists or on all function if functionName=""
impExp void WINAPI mathDLLHelp (char* functionName,char* helpString,long helpStringLength);
//mean of a 1D array
impExp double WINAPI mean(double *array,long length);
//unbiased variance of a 1D array, the mean of which is already known
impExp double WINAPI variance_m(double *array,long length,double mean);
//unbiased variance of a 1D array
impExp double WINAPI variance(double *array,long length);
//unbiased standard deviation of a 1D array,the mean of which is already known
impExp double WINAPI sdev_m(double *array,long length,double mean);
//unbiased standard deviation of a 1D array
impExp double WINAPI sdev(double *array,long length);
//Boxcar average between two indexes (mean of a subarray). Index of first element is 0.
impExp double WINAPI boxCar(double *array,long firstIndex, long lastIndex);
//Min of a 1D array.
impExp double WINAPI minArray(double *array,long length);
//Max of a 1D array.
impExp double WINAPI maxArray(double *array,long length);
//Covariance of two 1D arrays, the means of which are already known.
impExp double WINAPI cov_m(double *array1,double *array2,long length,double mean1,double mean2);
//Covariance of two 1D arrays of same length.
impExp double WINAPI cov(double *array1,double *array2,long length);
// 2x2 covariance matrix of two 1D arrays with the same length.
impExp void WINAPI covMatrix2(double *array1,double *array2,long length,double covMatrix[2][2]);
// Sorted eigenValues and eigneVectors of a symmetric nxn matrix
impExp void WINAPI eigenSystemSym(double * array, long n,double* eigenValArray,double* eigenVecArray);
// Do a 1D histogram of a 1D array
impExp void WINAPI histo1D(double * dataArray,long lengthArray,double min,double max,size_t binNumber, double * histo );
// Do a 2D histogram of two 1D arrays representing a 2D distribution
impExp void WINAPI histo2D(double * xArray,double * yArray,long lengthArray,double xmin,double xmax,size_t binNumberX,double ymin,double ymax,size_t binNumberY, double *histo);
// Calculate and store in the modulus array the modulus values (xi^2+yi^2)^1/2 of two 1D arrays xi and yi. All array have same length. Output array can be xi or yi if needed.
impExp void WINAPI modulus(double * xArray,double * yArray,double * modulus, long lengthArray);
// Calculate and store in the binaryArray array 0's or 1's for xi values below and strictly above the threshold, respectively. All array have same length. Output array can be xi if needed.
impExp void WINAPI thresholder(double * xArray,double * binaryArray, long lengthArray, double threshold);
// returns the frequency with which values xi in array xArray are stricly above threshold.
impExp double WINAPI aboveThresholdFrequency(double * xArray, long lengthArray, double threshold);
}'''
|
# Generated by Django 3.0.3 on 2020-02-13 16:12
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('Product', '0002_auto_20200205_0716'),
]
operations = [
migrations.CreateModel(
name='RatingProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(5)])),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Product.Product')),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, threading, pyodbc, logging, ConfigParser
from pdu import pdu
reload(sys)
sys.setdefaultencoding('utf-8')
# 生成数据库连接
def connect_to_mssql():
config = ConfigParser.ConfigParser()
config.read('conf/sd.conf')
# server name
server = config.get('mssql', 'server')
# database name
dbname = config.get('mssql', 'dbname')
# SQL Server user
username = config.get('mssql', 'username')
# SQL Server passwd
passwd = config.get('mssql', 'passwd')
conn = pyodbc.connect(u"DRIVER={SQL SERVER};SERVER=" + server + u";DATABASE=" + dbname + u";UID=" + username + u";PWD=" + passwd)
return conn
# 生成用户信息
def get_persons():
try:
conn = connect_to_mssql()
cursor = conn.cursor()
#
persons = []
for row in cursor.execute(u"select * from persons").fetchall():
person = {}
person["name"] = row[0]
person["mobile"] = row[1]
person["right"] = row[2]
person["usertype"] = row[3]
person["meters"] = ()
# yibiaoindexes = cursor.execute(u"declare @yibiaoindexes nvarchar(1000);set @yibiaoindexes = ',';select @yibiaoindexes = @yibiaoindexes + yibiaoindex + ',' from supervise where mobile = ?;select @yibiaoindexes;", row[1]).fetchall()
yibiaoindexes = cursor.execute(u"select yibiaoindex from supervise where mobile = ?;", row[1]).fetchall()
# tuple for yibiaoindex which $name would receive sms from
if not yibiaoindexes == []:
person["meters"] = zip(*yibiaoindexes)[0]
persons.append(person)
return persons
except Exception, e:
print(e)
sys.exit()
# person有无发送某短信的权限
def has_right(person, sms_type, yibiaoindex):
if ',' + str(sms_type) + ',' in person['right']:
if person['usertype'] == '供热管理':
return True
if person['usertype'] == '用热用户' and yibiaoindex in person['meters']:
return True
return False
# 生成短信接口参数
def get_sms_parameters(**kw):
parameters = {}
parameters['phones'] = []
# 通知
if kw['sms_type'] == 0:
parameters['content'] = u'{0}'.format(kw['param1'])
# 远程充值成功
elif kw['sms_type'] == 1:
parameters['content'] = u'{0}于{1} 远程充值{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 远程充值失败
elif kw['sms_type'] == 2:
parameters['content'] = u'{0}于{1} 远程充值失败'.format(kw['name'], kw['riqi'])
# 仪表开箱
elif kw['sms_type'] == 3:
parameters['content'] = u'{0}于{1} 开箱'.format(kw['name'], kw['riqi'])
# 仪表断电
elif kw['sms_type'] == 4:
parameters['content'] = u'{0}于{1} 断电'.format(kw['name'], kw['riqi'])
# IC充值
elif kw['sms_type'] == 5:
parameters['content'] = u'{0}于{1} IC充值{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 余额不足之后 充值
elif kw['sms_type'] == 6:
parameters['content'] = u'{0}于{1} 充值成功,当前剩余流量{2}KG'.format(kw['name'], kw['riqi'], kw['param1'])
# 余额不足报警
elif kw['sms_type'] == 7:
parameters['content'] = u'{0}于{1} 余额不足,请及时充值。当前剩余流量{2}KG'.format(kw['name'], kw['riqi'], kw['param1'])
# 关阀门报警
elif kw['sms_type'] == 8:
parameters['content'] = u'{0}于{1} 阀门关闭,请及时充值。当前剩余流量{2}KG'.format(kw['name'], kw['riqi'], kw['param1'])
# 日常余额不足报警
elif kw['sms_type'] == 9:
parameters['content'] = u'{0}于{1} 余额不足日常报警,当前剩余流量{2}KG'.foramt(kw['name'], kw['riqi'], kw['param1'])
# 日常关阀门报警
elif kw['sms_type'] == 10:
parameters['content'] = u'{0}于{1} 阀门关闭日常报警,当前剩余流量{2}KG'.format(kw['name'], kw['riqi'], kw['param1'])
# 扣除剩余流量
elif kw['sms_type'] == 11:
parameters['content'] = u'{0}于{1} 扣除剩余流量{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 扣除剩余流量失败
elif kw['sms_type'] == 12:
parameters['content'] = u'{0}于{1} 扣除剩余流量失败'.format(kw['name'], kw['riqi'])
# 设置单价
elif kw['sms_type'] == 13:
parameters['content'] = u'{0}于{1} 单价成功设置为:{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 设置单价失败
elif kw['sms_type'] == 14:
parameters['content'] = u'{0}于{1} 设置单价失败'.format(kw['name'], kw['riqi'])
# 设置报警关阀值
elif kw['sms_type'] == 15:
parameters['content'] = u'{0}于{1} 设置成功,报警值为:{2},关阀值为:{3}'.format(kw['name'], kw['riqi'], kw['param1'], kw['param2'])
# 设置报警关阀值失败
elif kw['sms_type'] == 16:
parameters['content'] = u'{0}于{1} 设置报警值关阀值失败'.format(kw['name'], kw['riqi'])
# 设置充值流量
elif kw['sms_type'] == 17:
parameters['content'] = u'{0}于{1} 设置成功,充值流量设置为:{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 设置充值流量失败
elif kw['sms_type'] == 18:
parameters['content'] = u'{0}于{1} 设置充值流量失败'.format(kw['name'], kw['riqi'])
# 设置充值金额
elif kw['sms_type'] == 19:
parameters['content'] = u'{0}于{1} 设置成功,充值金额设置为:{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 设置充值金额失败
elif kw['sms_type'] == 20:
parameters['content'] = u'{0}于{1} 设置充值金额失败'.format(kw['name'], kw['riqi'])
# 设置剩余流量
elif kw['sms_type'] == 21:
parameters['content'] = u'{0}于{1} 设置成功,剩余流量设置为:{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 设置充值金额失败
elif kw['sms_type'] == 22:
parameters['content'] = u'{0}于{1} 设置剩余流量失败'.format(kw['name'], kw['riqi'])
# 设置初始流量
elif kw['sms_type'] == 23:
parameters['content'] = u'{0}于{1} 设置成功,初始流量设置为:{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 设置初始流量失败
elif kw['sms_type'] == 24:
parameters['content'] = u'{0}于{1} 设置初始流量失败'.format(kw['name'], kw['riqi'])
# 设置阀门类型
elif kw['sms_type'] == 25:
parameters['content'] = u'{0}于{1} 设置成功,阀门类型设置为:{2}'.format(kw['name'], kw['riqi'], kw['param1'])
# 设置阀门类型失败
elif kw['sms_type'] == 26:
parameters['content'] = u'{0}于{1} 设置阀门类型失败'.format(kw['name'], kw['riqi'])
# 流量持续超过上限
elif kw['sms_type'] == 34:
parameters['content'] = u'{0}于{1} 流量持续超过{2},当前流量为{3}'.format(kw['name'], kw['riqi'], kw['param1'], kw['param2'])
# 手机号s
for person in get_persons():
if has_right(person, kw['sms_type'], kw['yibiaoindex']):
parameters['phones'].append(person['mobile'])
return parameters
# 读取表sms,发送短信
def send_sms_from_db():
try:
p = pdu()
conn = connect_to_mssql()
cursor = conn.cursor()
for row in cursor.execute(u"select s.autoid, s.yibiaoindex, y.tip, s.updatedate, s.smstype, s.smsable, isnull(s.param_1, ' ') param1, \
isnull(s.param_2, ' ') param2, isnull(s.param_3, ' ') param3 from sms s, yibiao y where s.yibiaoindex = \
y.yibiaoindex and s.smsable = 1 order by s.smstype").fetchall():
d = get_sms_parameters(yibiaoindex=row[1],name=row[2],riqi=row[3],sms_type=row[4],param1=row[6],param2=row[7],param3=row[8])
for phone in d['phones']:
p.send_message(phone, d['content'])
cursor.execute(u'update sms set smsable = 0 where autoid = ?', row[0])
conn.commit()
global t
t = threading.Timer(5.0, send_sms_from_db)
t.start()
except Exception, e:
logging.error(e)
sys.exit()
def say_hello():
print('hello world!\n')
global t
t = threading.Timer(5.0, say_hello)
t.start()
#t = threading.Timer(5.0, say_hello)
#t.start()
t = threading.Timer(1.0, send_sms_from_db)
t.start()
#print get_persons() |
import math
radians = float(input())
degrees = round(radians * 180 / math.pi)
print(degrees)
|
import numpy as np
from time import time
from collections import Counter
import networkx as nx
# import gmatch4py as gm
from grakel import graph_from_networkx, RandomWalk
import pandas as pd
import os
from copy import deepcopy
from mvmm.multi_view.block_diag.graph.linalg import get_adjmat_bp
from mvmm.multi_view.base import MultiViewMixtureModelMixin
from mvmm.multi_view.MVMMGridSearch import MVMMGridSearch
from mvmm.multi_view.BlockDiagMVMM import BlockDiagMVMM
from mvmm.multi_view.TwoStage import TwoStage
# from mvmm.multi_view.SpectralPenSearchMVMM import SpectralPenSearchMVMM
from mvmm.multi_view.SpectralPenSearchByBlockMVMM import \
SpectralPenSearchByBlockMVMM
def is_mvmm(estimator):
"""
Returns True iff estimator is a multi-view mixture model.
"""
if isinstance(estimator, MultiViewMixtureModelMixin) or \
isinstance(estimator, MVMMGridSearch) or \
isinstance(estimator, TwoStage) or \
isinstance(estimator, SpectralPenSearchByBlockMVMM):
# isinstance(estimator, SpectralPenSearchMVMM) or \
return True
else:
return False
def is_block_diag_mvmm(estimator):
if isinstance(estimator, MVMMGridSearch):
return is_block_diag_mvmm(estimator.base_estimator)
if isinstance(estimator, TwoStage):
return is_block_diag_mvmm(estimator.base_final)
if isinstance(estimator, BlockDiagMVMM):
return True
else:
return False
def clf_fit_and_score(clf, X_tr, y_tr, X_tst, y_tst):
"""
Fits a classification model and scores the results for the training
and test set.
Parameters
----------
clf:
A sklearn compatible classifier..
X_tr, y_tr: training data and true labels.
X_tst, y_tst: test data and true labels.
Output
------
results: dict
Train and test set results.
"""
start_time = time()
def get_metrics(y_true, y_pred):
"""
Measures of classification accuracy.
"""
return {'acc': np.mean(y_true == y_pred)}
clf.fit(X_tr, y_tr)
y_hat_tr = clf.predict(X_tr)
y_hat_tst = clf.predict(X_tst)
results = {'tr': get_metrics(y_tr, y_hat_tr),
'tst': get_metrics(y_tst, y_hat_tst),
'runtime': time() - start_time}
return results
def get_pi_acc(Pi_est, Pi_true, method='random_walk', **kwargs):
"""
Computes the graph edit distance between the sparsity graphs.
"""
A_est = get_adjmat_bp(Pi_est > 0)
A_true = get_adjmat_bp(Pi_true > 0)
G_est = nx.from_numpy_array(A_est)
G_true = nx.from_numpy_array(A_true)
sim = graph_similarity(G_est, G_true, method=method, **kwargs)
return sim
def graph_similarity(G, H, method='random_walk', **kwargs):
"""
Parameters
----------
G, H: nx.Graph
"""
assert method in ['random_walk']
if method == 'random_walk':
kernel = RandomWalk(**kwargs)
return kernel.fit_transform(graph_from_networkx([G, H]))[0, 1]
def get_n_comp_seq(true_n_components, pm):
return np.arange(max(1, true_n_components - pm), true_n_components + pm)
def get_empirical_pi(Y, shape, scale='prob'):
assert scale in ['prob', 'counts']
pi_empir = np.zeros(shape)
pairs = Counter(tuple(Y[i, :]) for i in range(Y.shape[0]))
for k in pairs.keys():
pi_empir[k[0], k[1]] = pairs[k]
if scale == 'prob':
pi_empir = pi_empir / pi_empir.sum()
return pi_empir
def extract_tuning_param_vals(df):
vals = []
for tune_param in df['tuning_param_values']:
assert len(list(tune_param.keys())) == 1
param_name = list(tune_param.keys())[0]
vals.append(tune_param[param_name])
vals = pd.Series(vals, index=df.index, name=param_name)
return vals, param_name
def add_pi_config(pi_name='motivating_ex', config={}):
if pi_name == 'sparse_pi':
config['pi_dist'] = 'sparse'
config['pi_config'] = {'n_rows_base': 5,
'n_cols_base': 8,
'density': .6,
'random_state': 78923}
elif pi_name == 'sparse_pi_2':
config['pi_dist'] = 'sparse'
config['pi_config'] = {'n_rows_base': 10,
'n_cols_base': 10,
'density': .6,
'random_state': 94009}
elif 'diagonal' in pi_name:
config['pi_dist'] = 'block_diag'
n_blocks = int(pi_name.split('_')[1])
block_shapes = [(1, 1)] * n_blocks
config['pi_config'] = {'block_shapes': block_shapes}
elif 'indep' in pi_name:
config['pi_dist'] = 'indep'
n_comp = int(pi_name.split('_')[1])
config['pi_config'] = {'n_comp': n_comp}
elif 'lollipop' in pi_name:
config['pi_dist'] = 'block_diag'
_, stick, bulb = pi_name.split('_')
stick = int(stick)
bulb = int(bulb)
block_shapes = [(1, 1)] * stick + [(bulb, bulb)]
block_weights = [1] * stick + [1 / bulb]
config['pi_config'] = {'block_shapes': block_shapes,
'block_weights': block_weights}
elif 'beads' in pi_name:
config['pi_dist'] = 'block_diag'
_, size, n_beads = pi_name.split('_')
block_shapes = [(int(size), int(size))] * int(n_beads)
config['pi_config'] = {'block_shapes': block_shapes}
elif pi_name == 'motivating_ex':
config['pi_dist'] = 'motivating_ex'
config['pi_config'] = {}
else:
raise ValueError('bad arguent for pi_name, {}'.format(pi_name))
return config
def safe_drop(x, val):
if type(x) == list:
ret = 'list'
elif type(x) == np.array:
ret = 'np'
else:
raise ValueError('bad x input')
new_x = list(set(x).difference([val]))
if ret == 'list':
return new_x
else:
return np.array(new_x)
def safe_drop_list(x, to_drop):
_x = deepcopy(x)
for val in to_drop:
_x = safe_drop(x=_x, val=val)
return _x
def format_mini_experiment(args):
if not args.mini:
return args
args.sim_name = 'mini'
# max n steps
for k in args.__dict__:
if 'max_n_steps' in k:
args.__dict__[k] = 5
# number of initilalizations
args.n_init = 1
args.bd_n_pen_tries = 1
args.sp_n_pen_seq = 2
args.log_pen_n_pen_seq = 2
return args
def make_and_get_dir(*args):
d = os.path.join(*args)
os.makedirs(d, exist_ok=True)
return d
|
# -*- coding: utf-8 -*-
{
'name': 'Purchase Invoice Create Hooks',
'version': '1.0',
'category': 'Purchase Management',
'description': '''
Add hook point in purchase.action_invoice_create()
''',
'author': "Ecosoft",
'website': 'http://ecosoft.co.th',
'depends': ['purchase'],
'data': [],
'installable': True,
}
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import xml.etree.ElementTree as ET
import uuid, hashlib
from datetime import datetime
import random
import time
import pdb
def object2dict(obj):
t = type(obj)
if 'class' in str(t):
d = {}
for (key, value) in obj.__dict__.items():
d[key] = object2dict(value)
return d
elif t is dict:
d = {}
for (key, value) in obj.items():
d[key] = object2dict(value)
return d
elif t is list:
l = []
for _ in obj:
l.append(object2dict(_))
return l
else:
return obj
def dict2object(d, moduleName, className):
module = __import__(moduleName)
if module is None:
raise Exception('Moudle not found')
class_ = getattr(module, className)
inst = class_()
for (key, value) in d.items():
setattr(inst, key, value)
return inst
def CDATA(text):
return '<![CDATA[%s]]>' % text
def createOrderId():
prefix = datetime.now().strftime('%Y%m%d%H%M%f')
randomNum = '%08d' % random.randint(1, 99999999)
return '%s%s' % (prefix, randomNum)
def mapToXml(d):
xml = ET.Element('xml')
for (name, value) in d.items():
ele = ET.SubElement(xml, name)
ele.text = value
return ET.tostring(xml, encoding='utf-8')
def generateSign(d, key):
l = d.items()
l = filter(lambda t: t[1] is not None and len(t[1].strip()) > 0, l)
l = map(lambda t: '%s=%s' % t, l)
l = sorted(l)
stringSignTemp = reduce(lambda _1, _2: '%s&%s' % (_1, _2), l)
stringSignTemp = '%s&key=%s' % (stringSignTemp, key)
stringSignTemp = stringSignTemp.encode('utf8')
md5Sign = hashlib.md5(stringSignTemp).hexdigest()
md5Sign = md5Sign.upper()
return md5Sign
def generateSHA1Sign(d):
l = d.items()
l = filter(lambda t: t[1] is not None and len(t[1].strip()) > 0, l)
l = map(lambda t: '%s=%s' % t, l)
l = sorted(l)
stringSignTemp = reduce(lambda _1, _2: '%s&%s' % (_1, _2), l)
stringSignTemp = stringSignTemp.encode('utf8')
sha1Sign = hashlib.sha1(stringSignTemp).hexdigest()
return sha1Sign
def xmlToMap(text):
text = text.encode('utf8')
xml = ET.fromstring(text)
d = dict()
for child in xml:
d[child.tag] = child.text
return d
def now():
return int(time.mktime(datetime.now().timetuple()))
def nonceStr():
return str(uuid.uuid1()).replace('-', '')
|
sum=0
aa=input().split()
bb=input().split()
n=int(aa[0])
k=int(aa[1])
for j in range(0,k):
sum=sum+int(bb[j])
print(sum)
|
'''
Find a number in an array of numbers by adding K th elemment with K th element and return the K th largest number.
Note:
1. The K th largest number must an absolute number.
2. If the sum is already present in the array then add the number to K th value
3. Negative values will also be passed as input.
Example: [4, 1, 2, 12, 7, 3],2
Root element is 1(minimum),
Here K=2 (It was the index value)
The number K th i.e (2-1)th largest number: (2-1 is the index and 2 is the position in array)
1+1 = 2
2 is already present so let's add 2+1=3
3 is already present so let's add 3+1=4
4 is already present so let's add 4+1=5
Hence, the answer is 5
Input Format
Computer generated input of array A[i] and position K (Getting using map)
Constraints
0 ≤ k ≤ 100
Output Format:
Absolute number (important point)
Sample Input 0
-4 -1 -2 -12 -7 -3 4 1 2 12 7 3
2
Sample Output 0
5
Sample Input 1
3 2 1 4 5 6
1
Sample Output 1
9
'''
class Solution():
def sibling(self):
#getting the single line input with space seperated values and changing to list
arr = list(map(int, input().split()))
#getting the index value
k = int(input())
#picking the arr value ofor the particular index for further use
k_val = arr[k-1]
#adding the k_val 2 times for the k_lar value
k_lar = k_val+k_val
'''
if k_val in the arr again we need to add the k_val for next k_lar value
else we can end the loop and we return the value
'''
while k_lar in arr:
k_lar = k_lar + k_val
return(abs(k_lar))
#main
if __name__ == '__main__':
#creating the class
sol = Solution()
#calling the function and printing he result
print(sol.sibling())
|
import pyotp
import time
from twilio.rest import Client
from django.shortcuts import render
from rest_framework.views import APIView
from rest_framework import viewsets
from rest_framework.response import Response
from rest_framework import status
from .serializers import (SiteGroupSerializer, CatalogItemDeptSerializer,
ItemRangeSerializer, StockSerializer, StockRetailSerializer, StockIdSerializer,
OtpRequestSerializer, OtpValidationSerializer, ResetPasswordSerializer, CustomerSignSerializer,
TreatmentAccountSerializer, TopupSerializer, TreatmentDoneSerializer, TopupproductSerializer,
TopupprepaidSerializer,TreatmentReversalSerializer,ShowBalanceSerializer,ReverseTrmtReasonSerializer,
VoidSerializer,PosDaudDetailsSerializer,VoidReasonSerializer,TreatmentAccSerializer,
DashboardSerializer,CreditNoteSerializer,ProductAccSerializer,PrepaidAccSerializer,PrepaidacSerializer,
CreditNoteAdjustSerializer,BillingSerializer,CreditNotePaySerializer,PrepaidPaySerializer,VoidListSerializer,
CartPrepaidSerializer, VoidCancelSerializer,HolditemdetailSerializer,HolditemSerializer,HolditemupdateSerializer)
from cl_table.serializers import PostaudSerializer, TmpItemHelperSerializer
from .models import (SiteGroup, ItemSitelist, ReverseTrmtReason, VoidReason)
from cl_table.models import (Employee, Fmspw, ItemClass, ItemDept, ItemRange, Stock, ItemUomprice,
PackageDtl, ItemDiv, PosDaud, PosTaud, Customer, GstSetting, ControlNo, TreatmentAccount, DepositAccount,
PrepaidAccount, Treatment,PosHaud,TmpItemHelper,Appointment,Source,PosHaud,ReverseDtl,ReverseHdr,
CreditNote,Multistaff,ItemHelper,ItemUom,Treatment_Master,Holditemdetail,PrepaidAccountCondition,
CnRefund,ItemBrand,Title,ItemBatch,Stktrn)
from custom.models import ItemCart, Room, Combo_Services,VoucherRecord
from datetime import date, timedelta
from datetime import datetime
import datetime
from rest_framework.authentication import TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from django.http import Http404
from django.utils import timezone
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from custom.views import response, get_client_ip
from django.db.models import Q
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from rest_framework.permissions import AllowAny
from Cl_beautesoft.settings import SMS_SECRET_KEY, SMS_ACCOUNT_SID, SMS_AUTH_TOKEN, SMS_SENDER
from custom.services import GeneratePDF
from .permissions import authenticated_only
from rest_framework.decorators import action
from cl_table.views import get_in_val
from rest_framework import generics
from django.db.models import Sum
from rest_framework import exceptions
from django.shortcuts import get_object_or_404
from custom.serializers import ComboServicesSerializer
from .utils import general_error_response
from cl_table.authentication import ExpiringTokenAuthentication
from django.template.loader import get_template
from Cl_beautesoft.settings import BASE_DIR
from fpdf import FPDF
from pyvirtualdisplay import Display
import pdfkit
import os
import math
import os.path
from Cl_beautesoft import settings
from django.template.defaulttags import register
type_ex = ['VT-Deposit','VT-Top Up','VT-Sales']
type_tx = ['Deposit','Top Up','Sales']
# Create your views here.
class SalonViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
queryset = SiteGroup.objects.filter(is_active=True).order_by('-id')
serializer_class = SiteGroupSerializer
def get_queryset(self):
queryset = SiteGroup.objects.filter(is_active=True).order_by('-id')
q = self.request.GET.get('search',None)
value = self.request.GET.get('sortValue',None)
key = self.request.GET.get('sortKey',None)
if q is not None:
queryset = SiteGroup.objects.filter(is_active=True,description__icontains=q).order_by('-id')
elif value and key is not None:
if value == "asc":
if key == 'description':
queryset = SiteGroup.objects.filter(is_active=True).order_by('description')
elif value == "desc":
if key == 'description':
queryset = SiteGroup.objects.filter(is_active=True).order_by('-description')
return queryset
def list(self, request):
try:
queryset = self.filter_queryset(self.get_queryset())
if queryset:
serializer = self.get_serializer(queryset, many=True)
result = {'status': status.HTTP_200_OK,"message":"Listed Successfully",'error': False, 'data': serializer.data}
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
# @authenticated_only
def create(self, request):
try:
queryset = None
serializer_class = None
total = None
serializer = self.get_serializer(data=request.data)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
if serializer.is_valid():
self.perform_create(serializer)
control_obj = ControlNo.objects.filter(control_description__icontains="SiteGroup",Site_Codeid__id=site.id).first()
if not control_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Customer Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
code = str(control_obj.control_no)
k = serializer.save(code=code)
if k.id:
control_obj.control_no = int(control_obj.control_no) + 1
control_obj.save()
state = status.HTTP_201_CREATED
message = "Created Succesfully"
error = False
data = serializer.data
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
return Response(result, status=status.HTTP_201_CREATED)
state = status.HTTP_400_BAD_REQUEST
message = "Invalid Input"
error = True
data = serializer.errors
result=response(self,request, queryset,total, state, message, error, serializer_class, data, action=self.action)
return Response(result, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return SiteGroup.objects.get(pk=pk,is_active=True)
except SiteGroup.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
queryset = None
total = None
serializer_class = None
site_group = self.get_object(pk)
serializer = SiteGroupSerializer(site_group)
data = serializer.data
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
result=response(self,request, queryset,total, state, message, error, serializer_class, data, action=self.action)
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def update(self, request, pk=None):
try:
queryset = None
total = None
serializer_class = None
site_group = self.get_object(pk)
serializer = SiteGroupSerializer(site_group, data=request.data)
if serializer.is_valid():
serializer.save()
state = status.HTTP_200_OK
message = "Updated Succesfully"
error = False
data = serializer.data
result=response(self,request, queryset,total, state, message, error, serializer_class, data, action=self.action)
return Response(result, status=status.HTTP_200_OK)
state = status.HTTP_400_BAD_REQUEST
message = "Invalid Input"
error = True
data = serializer.errors
result=response(self,request, queryset,total, state, message, error, serializer_class, data, action=self.action)
return Response(serializer.errors, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def destroy(self, request, pk=None):
try:
queryset = None
total = None
serializer_class = None
data = None
state = status.HTTP_204_NO_CONTENT
try:
instance = self.get_object(pk)
self.perform_destroy(instance)
message = "Deleted Succesfully"
error = False
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
return Response(result,status=status.HTTP_200_OK)
except Http404:
pass
message = "No Content"
error = True
result=response(self,request, queryset,total, state, message, error, serializer_class, data, action=self.action)
return Response(result,status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def perform_destroy(self, instance):
instance.is_active = False
site = ItemSitelist.objects.filter(Site_Groupid=instance).update(Site_Groupid=None)
instance.save()
class CatalogItemDeptViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = CatalogItemDeptSerializer
def list(self, request):
try:
if not request.GET.get('Item_Dept', None) is None:
if request.GET.get('Item_Dept', None) == 'SERVICE':
queryset = ItemDept.objects.filter(is_service=True, itm_status=True).order_by('itm_seq')
elif request.GET.get('Item_Dept', None) == 'PACKAGE':
queryset = ItemDept.objects.filter(is_service=True, itm_status=True).order_by('itm_seq')
elif request.GET.get('Item_Dept', None) == 'RETAIL':
queryset = ItemBrand.objects.filter(retail_product_brand=True, itm_status=True).order_by('itm_seq')
elif request.GET.get('Item_Dept', None) == 'PREPAID':
queryset = ItemBrand.objects.filter(prepaid_brand=True, itm_status=True).order_by('itm_seq')
elif request.GET.get('Item_Dept', None) == 'VOUCHER':
queryset = ItemBrand.objects.filter(voucher_brand=True, itm_status=True).order_by('itm_seq')
else:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Dept id does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
else:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Dept id does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if queryset:
serializer = self.get_serializer(queryset, many=True)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return ItemDept.objects.get(pk=pk,itm_status=True)
except ItemDept.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
itemdept = self.get_object(pk)
serializer = CatalogItemDeptSerializer(itemdept)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class CatalogItemRangeViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = ItemRangeSerializer
def list(self, request):
try:
if not request.GET.get('Item_Deptid',None) is None:
item_id = ItemDept.objects.filter(pk=request.GET.get('Item_Deptid',None), itm_status=True).first()
if item_id:
queryset = ItemRange.objects.filter(itm_dept=item_id.itm_code, isservice=True).order_by('pk')
if item_id is None:
branditem_id = ItemBrand.objects.filter(pk=request.GET.get('Item_Deptid',None), itm_status=True).first()
if branditem_id:
queryset = ItemRange.objects.filter(itm_brand=branditem_id.itm_code).order_by('pk')
if not item_id and not branditem_id:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
else:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if queryset:
serializer = self.get_serializer(queryset, many=True)
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': serializer.data}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ServiceStockViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockSerializer
def list(self, request):
try:
queryset = Stock.objects.filter(item_isactive=True, item_type="SINGLE", item_div="3").order_by('pk')
if request.GET.get('Item_Deptid',None):
if not request.GET.get('Item_Deptid',None) is None:
item_dept = ItemDept.objects.filter(pk=request.GET.get('Item_Deptid',None), is_service=True, itm_status=True).first()
if not item_dept:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_type="SINGLE", item_dept=item_dept.itm_code).order_by('pk')
# else:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if request.GET.get('Item_Rangeid',None):
if not request.GET.get('Item_Rangeid',None) is None:
if request.GET.get('Item_Rangeid',None):
itemrange = ItemRange.objects.filter(pk=request.GET.get('Item_Rangeid',None), isservice=True).first()
if not itemrange:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Range Id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_type="SINGLE", item_range=itemrange.itm_code).order_by('pk')
else:
queryset = Stock.objects.filter(item_isactive=True, item_type="SINGLE", item_dept=item_dept.itm_code).order_by('pk')
if request.GET.get('search',None):
if not request.GET.get('search',None) is None:
queryset = queryset.filter(Q(item_name__icontains=request.GET.get('search',None)) | Q(item_desc__icontains=request.GET.get('search',None)))
serializer_class = StockSerializer
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get("dataList")
for dat in d:
dat["item_price"] = "{:.2f}".format(float(dat['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Stock.objects.get(pk=pk, item_isactive=True, item_type="SINGLE")
except Stock.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
stock = self.get_object(pk)
serializer = StockSerializer(stock)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
v = result.get('data')
if v['Stock_PIC']:
v['Stock_PIC'] = str("http://"+request.META['HTTP_HOST']) + str(v['Stock_PIC'])
if v['item_price']:
v['item_price'] = "{:.2f}".format(float(v['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class RetailStockListViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockRetailSerializer
def list(self, request):
try:
queryset = Stock.objects.filter(item_isactive=True, item_div="1").order_by('pk')
if request.GET.get('Item_Deptid',None):
if not request.GET.get('Item_Deptid',None) is None:
item_brand = ItemBrand.objects.filter(pk=request.GET.get('Item_Deptid',None),retail_product_brand=True,itm_status=True).first()
if not item_brand:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Brand id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_brand=item_brand.itm_code).order_by('pk')
# else:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if request.GET.get('Item_Rangeid',None):
if not request.GET.get('Item_Rangeid',None) is None:
if request.GET.get('Item_Rangeid',None):
itemrange = ItemRange.objects.filter(pk=request.GET.get('Item_Rangeid',None), isproduct=True).first()
if not itemrange:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Range Id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_range=itemrange.itm_code).order_by('pk')
else:
queryset = Stock.objects.filter(item_isactive=True, item_brand=item_brand.itm_code).order_by('pk')
if request.GET.get('search',None):
if not request.GET.get('search',None) is None:
queryset = queryset.filter(Q(item_name__icontains=request.GET.get('search',None)) | Q(item_desc__icontains=request.GET.get('search',None)))
serializer_class = StockRetailSerializer
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get("dataList")
lst = []
for dat in d:
q = dict(dat)
uomlst = []
stock = Stock.objects.filter(item_isactive=True, pk=q['id']).first()
itemuomprice = ItemUomprice.objects.filter(isactive=True, item_code=stock.item_code).order_by('id')
for i in itemuomprice:
itemuom = ItemUom.objects.filter(uom_isactive=True,uom_code=i.item_uom).order_by('id').first()
itemuom_id = None; itemuom_desc = None
if itemuom:
itemuom_id = int(itemuom.id)
itemuom_desc = itemuom.uom_desc
uom = {
"itemuomprice_id": int(i.id),
"item_uom": i.item_uom,
"uom_desc": i.uom_desc,
"item_price": "{:.2f}".format(float(i.item_price)),
"itemuom_id": itemuom_id,
"itemuom_desc" : itemuom_desc}
uomlst.append(uom)
val = {'uomprice': uomlst}
q.update(val)
lst.append(q)
v['dataList'] = lst
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Stock.objects.get(pk=pk, item_isactive=True)
except Stock.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
ip = get_client_ip(request)
stock = self.get_object(pk)
serializer = StockRetailSerializer(stock)
uomlst = []; uom = {}
itemuomprice = ItemUomprice.objects.filter(isactive=True, item_code=stock.item_code)
for i in itemuomprice:
itemuom = ItemUom.objects.filter(uom_isactive=True,uom_desc=i.uom_desc).order_by('id').first()
uom = {
"itemuomprice_id": int(i.id),
"item_uom": i.item_uom,
"uom_desc": i.uom_desc,
"item_price": "{:.2f}".format(float(i.item_price)),
"itemuom_id": int(itemuom.id),
"itemuom_desc" : itemuom.uom_desc}
uomlst.append(uom)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data, 'Item_Price': uomlst}
v = result.get('data')
q = dict(v)
if v['Stock_PIC']:
v['Stock_PIC'] = str("http://"+request.META['HTTP_HOST']) + str(v['Stock_PIC'])
val = {'uomprice': uomlst}
q.update(val)
result['data'] = q
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
# def create(self, request):
# if self.request.GET.get('cust_id',None) is None:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give customer id!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# cust_id = Customer.objects.filter(pk=self.request.GET.get('cust_id',None)).last()
# if self.request.GET.get('stock_id',None) is None:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give Stock id!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# stock_id = Stock.objects.filter(pk=self.request.GET.get('stock_id',None)).last()
# if self.request.GET.get('uom_id',None) is None:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give uom id!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# uom_id = ItemUomprice.objects.filter(pk=self.request.GET.get('uom_id',None)).last()
# item_uom = self.request.GET.get('item_uom',None)
# if item_uom is None:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give item uom!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# item_price = self.request.GET.get('item_price',None)
# if item_price is None:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give item price!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# temp_uomprice = TempUomPrice.objects.create(Cust_Codeid=cust_id,Item_Codeid=stock_id,Item_UOMid=uom_id,
# item_uom=item_uom,item_price=item_price)
# if temp_uomprice:
# result = {'status': status.HTTP_200_OK, "message": "Created Successfully", 'error': False}
# else:
# result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Failed to create ", 'error': False}
# return Response(data=result, status=status.HTTP_200_OK)
class PackageStockViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockSerializer
def list(self, request):
try:
queryset = Stock.objects.filter(item_isactive=True, item_type="PACKAGE", item_div="3").order_by('pk')
if request.GET.get('Item_Deptid',None):
if not request.GET.get('Item_Deptid',None) is None:
item_dept = ItemDept.objects.filter(pk=request.GET.get('Item_Deptid',None), is_service=True, itm_status=True).first()
if not item_dept:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_type="PACKAGE", item_dept=item_dept.itm_code).order_by('pk')
# else:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if request.GET.get('Item_Rangeid',None):
if not request.GET.get('Item_Rangeid',None) is None:
if request.GET.get('Item_Rangeid',None):
itemrange = ItemRange.objects.filter(pk=request.GET.get('Item_Rangeid',None), isservice=True).first()
if not itemrange:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Range Id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_type="PACKAGE", item_range=itemrange.itm_code).order_by('pk')
else:
queryset = Stock.objects.filter(item_isactive=True, item_type="PACKAGE", item_dept=item_dept.itm_code).order_by('pk')
if request.GET.get('search',None):
if not request.GET.get('search',None) is None:
queryset = queryset.filter(Q(item_name__icontains=request.GET.get('search',None)) | Q(item_desc__icontains=request.GET.get('search',None)))
serializer_class = StockSerializer
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get("dataList")
for dat in d:
dat["item_price"] = "{:.2f}".format(float(dat['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Stock.objects.get(pk=pk, item_isactive=True, item_type="PACKAGE")
except Stock.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
stock = self.get_object(pk)
serializer = StockSerializer(stock)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
v = result.get('data')
if v['Stock_PIC']:
v['Stock_PIC'] = str("http://"+request.META['HTTP_HOST']) + str(v['Stock_PIC'])
if v['item_price']:
v['item_price'] = "{:.2f}".format(float(v['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class PackageDtlViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockIdSerializer
def list(self, request):
try:
stock = Stock.objects.filter(pk=request.GET.get('stock_id',None), item_isactive=True)
if not stock:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Stock Id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
for s in stock:
if s.Stock_PIC:
image = {"STOCK_PIC" : str("http://"+request.META['HTTP_HOST'])+str(s.Stock_PIC.url)}
else:
image = None
detail = []; package = {}
package_dtl = PackageDtl.objects.filter(package_code=s.item_code)
if package_dtl:
for p in package_dtl:
package = {
"stock_id": s.pk,
"id": p.id,
"Description": p.description}
detail.append(package)
package_data = {"package_description": detail,
"image" : image}
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': package_data }
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class PrepaidStockViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockSerializer
def list(self, request):
try:
queryset = Stock.objects.filter(item_isactive=True, item_div="5").order_by('pk')
if request.GET.get('Item_Deptid',None):
if not request.GET.get('Item_Deptid',None) is None:
item_brand = ItemBrand.objects.filter(pk=request.GET.get('Item_Deptid',None), prepaid_brand=True).first()
if not item_brand:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_brand=item_brand.itm_code).order_by('pk')
# else:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if request.GET.get('Item_Rangeid',None):
if not request.GET.get('Item_Rangeid',None) is None:
if request.GET.get('Item_Rangeid',None):
itemrange = ItemRange.objects.filter(pk=request.GET.get('Item_Rangeid',None), isprepaid=True).first()
if not itemrange:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Range Id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_range=itemrange.itm_code).order_by('pk')
else:
queryset = Stock.objects.filter(item_isactive=True, item_brand=item_brand.itm_code).order_by('pk')
if request.GET.get('search',None):
if not request.GET.get('search',None) is None:
queryset = queryset.filter(Q(item_name__icontains=request.GET.get('search',None)) | Q(item_desc__icontains=request.GET.get('search',None)))
serializer_class = StockSerializer
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get("dataList")
for dat in d:
dat["item_price"] = "{:.2f}".format(float(dat['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Stock.objects.get(pk=pk, item_isactive=True)
except Stock.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
stock = self.get_object(pk)
serializer = StockSerializer(stock)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
v = result.get('data')
if v['Stock_PIC']:
v['Stock_PIC'] = str("http://"+request.META['HTTP_HOST']) + str(v['Stock_PIC'])
if v['item_price']:
v['item_price'] = "{:.2f}".format(float(v['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class VoucherStockViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockSerializer
def list(self, request):
try:
queryset = Stock.objects.filter(item_isactive=True, item_div="4").order_by('pk')
if request.GET.get('Item_Deptid',None):
if not request.GET.get('Item_Deptid',None) is None:
item_brand = ItemBrand.objects.filter(pk=request.GET.get('Item_Deptid',None), voucher_brand=True).first()
if not item_brand:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_brand=item_brand.itm_code).order_by('pk')
# else:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Dept id does not exist!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if request.GET.get('Item_Rangeid',None):
if not request.GET.get('Item_Rangeid',None) is None:
if request.GET.get('Item_Rangeid',None):
itemrange = ItemRange.objects.filter(pk=request.GET.get('Item_Rangeid',None), isvoucher=True).first()
if not itemrange:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Range Id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Stock.objects.filter(item_isactive=True, item_range=itemrange.itm_code).order_by('pk')
else:
queryset = Stock.objects.filter(item_isactive=True, item_brand=item_brand.itm_code).order_by('pk')
if request.GET.get('search',None):
if not request.GET.get('search',None) is None:
queryset = queryset.filter(Q(item_name__icontains=request.GET.get('search',None)) | Q(item_desc__icontains=request.GET.get('search',None)))
serializer_class = StockSerializer
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get("dataList")
for dat in d:
dat["item_price"] = "{:.2f}".format(float(dat['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Stock.objects.get(pk=pk, item_isactive=True)
except Stock.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
stock = self.get_object(pk)
serializer = StockSerializer(stock)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
v = result.get('data')
if v['Stock_PIC']:
v['Stock_PIC'] = str("http://"+request.META['HTTP_HOST']) + str(v['Stock_PIC'])
if v['item_price']:
v['item_price'] = "{:.2f}".format(float(v['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class CatalogSearchViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockSerializer
def get_queryset(self):
q = self.request.GET.get('search',None)
if q:
queryset = Stock.objects.filter(item_isactive=True).order_by('pk')
queryset = queryset.filter(Q(item_name__icontains=q) | Q(item_desc__icontains=q))
else:
queryset = Stock.objects.none()
return queryset
def list(self, request, *args, **kwargs):
try:
serializer_class = StockSerializer
queryset = self.filter_queryset(self.get_queryset())
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get("dataList")
for dat in d:
dat["item_price"] = "{:.2f}".format(float(dat['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Stock.objects.get(pk=pk, item_isactive=True)
except Stock.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
stock = self.get_object(pk)
serializer = StockSerializer(stock)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
v = result.get('data')
if v['Stock_PIC']:
v['Stock_PIC'] = str("http://"+request.META['HTTP_HOST']) + str(v['Stock_PIC'])
if v['item_price']:
v['item_price'] = "{:.2f}".format(float(v['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class CatalogFavoritesViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockSerializer
def get_queryset(self):
today = timezone.now().date()
month = today.month
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
daud_ids = PosDaud.objects.filter(ItemSite_Codeid__pk=site.pk,created_at__date__month=month,
dt_qty__gt = 0,dt_status='SA').only('itemsite_code','created_at','dt_qty','dt_status').order_by('-pk')
pro_lst = []
for d in daud_ids:
daudids = PosDaud.objects.filter(ItemSite_Codeid__pk=site.pk,created_at__date__month=month,
dt_itemnoid=d.dt_itemnoid,dt_qty__gt = 0,dt_status='SA').only('itemsite_code','created_at','dt_itemnoid','dt_qty','dt_status').aggregate(Sum('dt_qty'))
qdaudids = PosDaud.objects.filter(ItemSite_Codeid__pk=site.pk,created_at__date__month=month,
dt_itemnoid=d.dt_itemnoid,dt_qty__gt = 0,dt_status='SA').only('itemsite_code','created_at','dt_itemnoid','dt_qty','dt_status').order_by('-pk')[:1]
#client qty > 10 need to change later
if float(daudids['dt_qty__sum']) > 1:
if d.dt_itemnoid.pk not in pro_lst:
pro_lst.append(d.dt_itemnoid.pk)
if pro_lst != []:
queryset = Stock.objects.filter(pk__in=pro_lst,item_isactive=True).order_by('pk')
else:
queryset = Stock.objects.none()
return queryset
def list(self, request, *args, **kwargs):
try:
serializer_class = StockSerializer
queryset = self.filter_queryset(self.get_queryset())
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get("dataList")
for dat in d:
dat["item_price"] = "{:.2f}".format(float(dat['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Stock.objects.get(pk=pk, item_isactive=True)
except Stock.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
stock = self.get_object(pk)
serializer = StockSerializer(stock)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
v = result.get('data')
if v['Stock_PIC']:
v['Stock_PIC'] = str("http://"+request.META['HTTP_HOST']) + str(v['Stock_PIC'])
if v['item_price']:
v['item_price'] = "{:.2f}".format(float(v['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class SalonProductSearchViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = StockSerializer
def get_queryset(self):
q = self.request.GET.get('search',None)
if q is not None:
itm_div = ItemDiv.objects.filter(itm_isactive=True, itm_code=2, itm_desc="SALON PRODUCT").first()
queryset = Stock.objects.filter(item_isactive=True, Item_Divid=itm_div).filter(Q(item_name__icontains=q) | Q(item_desc__icontains=q)).order_by('pk')
else:
queryset = Stock.objects.none()
return queryset
def list(self, request):
try:
queryset = self.filter_queryset(self.get_queryset())
if queryset:
serializer = self.get_serializer(queryset, many=True)
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': serializer.data}
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
v = result.get('data')
for i in v:
i["item_price"] = "{:.2f}".format(float(i['item_price']))
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ForgotPswdRequestOtpAPIView(APIView):
permission_classes = (AllowAny,)
serializer_class = OtpRequestSerializer
def post(self, request):
try:
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
request_data = serializer.validated_data
emp_name = request_data['emp_name']
employee = Employee.objects.get(emp_name=emp_name)
fmspw = Fmspw.objects.get(Emp_Codeid=employee, pw_isactive=True)
if fmspw:
totp = pyotp.TOTP(SMS_SECRET_KEY)
otp = totp.now()
employee.otp = otp
employee.save()
client = Client(SMS_ACCOUNT_SID, SMS_AUTH_TOKEN)
receiver = employee.emp_phone1
message = client.messages.create(
body='Your change password request otp is {}'.format(otp),
from_=SMS_SENDER,
to=receiver
)
result = {'status': status.HTTP_200_OK, "message": "OTP Sended Successfully", 'error': False}
else:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Failed to send OTP", 'error': False}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ForgotPswdOtpValidationAPIView(APIView):
permission_classes = (AllowAny,)
serializer_class = OtpValidationSerializer
def post(self, request):
try:
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
request_data = serializer.validated_data
emp_name = self.request.GET.get('emp_name',None)
otp = request_data['otp']
employee = Employee.objects.get(emp_name=emp_name)
fmspw = Fmspw.objects.get(Emp_Codeid=employee, pw_isactive=True)
if fmspw and employee.otp == otp:
result = {'status': status.HTTP_200_OK, "message": "OTP Verified Successfully", 'error': False}
else:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Failed...! Please enter a valid OTP", 'error': False}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ResetPasswordAPIView(APIView):
permission_classes = (AllowAny,)
serializer_class = ResetPasswordSerializer
def post(self, request):
try:
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
request_data = serializer.validated_data
emp_name = self.request.GET.get('emp_name',None)
new_password = request_data['new_password']
employee = Employee.objects.get(emp_name=emp_name)
fmspw = Fmspw.objects.get(Emp_Codeid=employee, pw_isactive=True)
user = User.objects.get(username=emp_name)
if fmspw:
fmspw.pw_password = new_password
fmspw.save()
user.set_password(new_password)
user.save()
employee.pw_password = new_password
employee.save()
result = {'status': status.HTTP_200_OK, "message": "Password Changed Successfully", 'error': False}
else:
result = {'status': status.HTTP_400_BAD_REQUEST, 'message': "Failed to change Password", 'error': False}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
# class UpdateStockAPIView(APIView):
# authentication_classes = [ExpiringTokenAuthentication]
# permission_classes = [IsAuthenticated & authenticated_only]
# queryset = Stock.objects.filter().order_by('-pk')
# serializer_class = StockSerializer
# def post(self, request):
# queryset = Stock.objects.filter().order_by('-pk')
# for s in queryset:
# print(s.pk,"PK")
# divobj = ItemDiv.objects.filter(itm_code=s.item_div).first()
# deptobj = ItemDept.objects.filter(itm_code=s.item_dept).first()
# classobj = ItemClass.objects.filter(itm_code=s.item_class).first()
# rangeobj = ItemRange.objects.filter(itm_code=s.item_range).first()
# typeobj = ItemType.objects.filter(itm_name=s.item_type).first()
# Stock.objects.filter(pk=s.pk).update(Item_Divid=divobj,Item_Deptid=deptobj,Item_Classid=classobj,Item_Rangeid=rangeobj,Item_Typeid=typeobj)
# print(s.Item_Divid,s.Item_Deptid,s.Item_Classid,s.Item_Rangeid,s.Item_Typeid,"kkk")
# return True
class ReceiptPdfSendSMSAPIView(APIView):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
def post(self, request, format=None):
try:
if request.GET.get('sa_transacno',None) is None:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give sa_transacno!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
sa_transacno = request.GET.get('sa_transacno',None)
hdr = PosHaud.objects.filter(sa_transacno=sa_transacno,
ItemSite_Codeid__pk=site.pk).order_by("pk")
if not hdr:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Sa Transacno Does not exist in Poshaud!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
pdf_link = GeneratePDF(self,request, sa_transacno)
if not pdf_link:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Pdf link not generated",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
Pos_daud = PosDaud.objects.filter(sa_transacno=sa_transacno,
ItemSite_Codeid__pk=site.pk).first()
if not Pos_daud.itemcart.cust_noid.cust_phone2:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give customer mobile number!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
allow_sms = Pos_daud.itemcart.cust_noid.custallowsendsms
if allow_sms:
cust_name = Pos_daud.itemcart.cust_noid.cust_name
client = Client(SMS_ACCOUNT_SID, SMS_AUTH_TOKEN)
receiver = Pos_daud.itemcart.cust_noid.cust_phone2
try:
message = client.messages.create(
body='''Dear {0},\nYour receipt bill no {1}.Please check your bill in this link {2}.\nThank You,'''.format(cust_name,sa_transacno,pdf_link),
from_=SMS_SENDER,
to=receiver
)
result = {'status': status.HTTP_200_OK,"message":"SMS sent succesfully",'error': False}
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
else:
result = {'status': status.HTTP_400_BAD_REQUEST, 'message': "Customer doesn't wish to send SMS", 'error': False}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class CustomerSignatureAPIView(APIView):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = CustomerSignSerializer
def post(self, request):
try:
cust_code = self.request.GET.get('cust_code',None)
cust_obj = Customer.objects.filter(cust_code=cust_code,cust_isactive=True)
if not cust_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give customer code!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
request_data = serializer.validated_data
customer = Customer.objects.get(cust_code=cust_code,cust_isactive=True)
customersign = request_data['customersign']
if customer and customersign is not None:
customer.customersign = bytes(customersign, encoding='utf8')
customer.save()
result = {'status': status.HTTP_200_OK, "message": "Customer Signature updated Successfully", 'error': False}
else:
result = {'status': status.HTTP_400_BAD_REQUEST, 'message': "Failed to update customer signature", 'error': False}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class TopupViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = TreatmentAccountSerializer
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id',None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id',None),cust_isactive=True).first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK,"message":"Please give customer id!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
if not self.request.user.is_authenticated:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not allowed!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if not fmspw:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not Permitted!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
site = fmspw[0].loginsite
if not site:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Users Item Site is not mapped!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = TreatmentAccount.objects.filter(Cust_Codeid=cust_id, Site_Codeid=site, type='Deposit', outstanding__gt = 0).order_by('pk')
sum = 0; lst = []
header_data = {"customer_name" : cust_obj.cust_name,"old_outstanding" : "0.00",
"topup_amount" : "0.00","new_outstanding" : "0.00"}
if queryset:
for q in queryset:
#type__in=('Deposit', 'Top Up')
# accids = TreatmentAccount.objects.filter(ref_transacno=q.sa_transacno,
# treatment_parentcode=q.treatment_parentcode,Site_Codeid=site).order_by('id').first()
# trmtobj = Treatment.objects.filter(treatment_account__pk=accids.pk,status='Open').order_by('pk').first()
acc_ids = TreatmentAccount.objects.filter(ref_transacno=q.sa_transacno,
treatment_parentcode=q.treatment_parentcode,Site_Codeid=site).order_by('id').last()
acc = TreatmentAccount.objects.filter(pk=acc_ids.pk)
serializer = self.get_serializer(acc, many=True)
if acc_ids.outstanding > 0.0:
for data in serializer.data:
pos_haud = PosHaud.objects.filter(sa_custnoid=cust_id,ItemSite_Codeid__pk=site.pk,
sa_transacno_type="Receipt",sa_transacno=q.sa_transacno).first()
sum += data['outstanding']
if pos_haud:
if pos_haud.sa_date:
splt = str(pos_haud.sa_date).split(" ")
data['sa_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%m-%Y")
data['TreatmentAccountid'] = q.pk
data["pay_amount"] = None
if data['sa_transacno']:
data['sa_transacno'] = pos_haud.sa_transacno_ref
if data['treatment_parentcode']:
data['treatment_parentcode'] = q.treatment_parentcode
if data["description"]:
trmt = Treatment.objects.filter(treatment_account=q.pk).last()
if trmt:
data["description"] = trmt.course
data['stock_id'] = trmt.Item_Codeid.pk
if data["balance"]:
data["balance"] = "{:.2f}".format(float(data['balance']))
else:
data["balance"] = "0.00"
if data["outstanding"]:
data["outstanding"] = "{:.2f}".format(float(data['outstanding']))
else:
data["outstanding"] = "0.00"
lst.append(data)
if lst != []:
header_data = {"customer_name" : cust_obj.cust_name,"old_outstanding" : "{:.2f}".format(float(sum)),
"topup_amount" : None,"new_outstanding" : "{:.2f}".format(float(sum))}
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'header_data':header_data, 'data': lst}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False,'header_data':header_data, 'data': []}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False,'header_data':header_data, 'data': []}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
# def get_object(self, pk):
# try:
# return TreatmentAccount.objects.get(pk=pk)
# except TreatmentAccount.DoesNotExist:
# raise Http404
# def retrieve(self, request, pk=None):
# topup = self.get_object(pk)
# serializer = TopupSerializer(topup)
# result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
# v = result.get('data')
# if v["description"]:
# description = Treatment.objects.filter(treatment_account=v["id"]).last()
# v["description"] = description.course
# if v["amount"]:
# v["amount"] = "{:.2f}".format(float(v['amount']))
# else:
# v["amount"] = "0.00"
# if v["balance"]:
# v["balance"] = "{:.2f}".format(float(v['balance']))
# else:
# v["balance"] = "0.00"
# if v["outstanding"]:
# v["outstanding"] = "{:.2f}".format(float(v['outstanding']))
# else:
# v["outstanding"] = "0.00"
# return Response(result, status=status.HTTP_200_OK)
class TreatmentDoneViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = TreatmentDoneSerializer
@action(methods=['get'], detail=False, permission_classes=[IsAuthenticated & authenticated_only],
authentication_classes=[ExpiringTokenAuthentication])
def Year(self, request):
try:
today = timezone.now()
year = today.year
res = [r for r in range(2010, today.year+1)]
res.append("All")
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': res[::-1]}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id',None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id',None),cust_isactive=True).first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK,"message":"Please give customer id!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
if not self.request.user.is_authenticated:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not allowed!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if not fmspw:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not Permitted!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
site = fmspw[0].loginsite
if not site:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Users Item Site is not mapped!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = Treatment.objects.filter(cust_code=cust_obj.cust_code, site_code=site.itemsite_code,
status="Open").order_by('pk')
if request.GET.get('year',None):
year = request.GET.get('year',None)
if year != "All":
queryset = Treatment.objects.filter(cust_code=cust_obj.cust_code, site_code=site.itemsite_code,
status="Open", treatment_date__year=year).order_by('pk')
par_lst = list(set([e.treatment_parentcode for e in queryset if e.treatment_parentcode]))
id_lst = []
for p in par_lst:
query = Treatment.objects.filter(treatment_parentcode=p, cust_code=cust_obj.cust_code, site_code=site.itemsite_code,
status="Open", treatment_date__year=year).order_by('pk').last()
id_lst.append(query.pk)
queryset = Treatment.objects.filter(pk__in=id_lst,cust_code=cust_obj.cust_code,site_code=site.itemsite_code, status="Open", treatment_date__year=year).order_by('pk')
if queryset:
serializer = self.get_serializer(queryset, many=True)
lst = []
for i in serializer.data:
splt = str(i['treatment_date']).split('T')
trmt_obj = Treatment.objects.filter(pk=i['id']).first()
# tmp_ids = TmpItemHelper.objects.filter(treatment=trmt_obj)
tmp_ids = TmpItemHelper.objects.filter(item_code=trmt_obj.treatment_code)
for emp in tmp_ids:
appt = Appointment.objects.filter(cust_no=trmt_obj.cust_code,appt_date=date.today(),
itemsite_code=fmspw[0].loginsite.itemsite_code,emp_no=emp.helper_code)
if not appt:
# tmpids = TmpItemHelper.objects.filter(treatment=trmt_obj,helper_code=emp.helper_code,
# site_code=site.itemsite_code).filter(Q(appt_fr_time__isnull=True) | Q(appt_to_time__isnull=True) | Q(add_duration__isnull=True))
tmpids = TmpItemHelper.objects.filter(item_code=trmt_obj.treatment_code,helper_code=emp.helper_code,
site_code=site.itemsite_code).filter(Q(appt_fr_time__isnull=True) | Q(appt_to_time__isnull=True) | Q(add_duration__isnull=True))
if tmpids:
emp.delete()
#need to uncomment later
# if emp.appt_fr_time and emp.appt_to_time:
# appt_ids = Appointment.objects.filter(appt_date=date.today(),emp_no=emp.helper_code,
# itemsite_code=fmspw[0].loginsite.itemsite_code).filter(Q(appt_to_time__gte=emp.appt_fr_time) & Q(appt_fr_time__lte=emp.appt_to_time))
# if appt_ids:
# emp.delete()
for existing in trmt_obj.helper_ids.all():
trmt_obj.helper_ids.remove(existing)
for t in TmpItemHelper.objects.filter(treatment=trmt_obj,site_code=site.itemsite_code):
trmt_obj.helper_ids.add(t)
# for t in TmpItemHelper.objects.filter(item_code=trmt_obj.treatment_code,site_code=site.itemsite_code):
# trmt_obj.helper_ids.add(t)
# pos_haud = PosHaud.objects.filter(sa_custnoid=cust_id,ItemSite_Codeid__pk=site.pk,
# sa_transacno_type__in=('Receipt', 'Non Sales'),sa_transacno=i["sa_transacno"]).first()
pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,itemsite_code=site.itemsite_code,
sa_transacno_type__in=('Receipt', 'Non Sales'),sa_transacno=i["sa_transacno"]).first()
# if not pos_haud:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"PosHaud Payment not done yet!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
item_code = str(trmt_obj.item_code)
itm_code = item_code[:-4]
# print(Stock.objects.filter(item_code=itm_code,item_isactive=True).order_by('pk'),"sss")
stockobj = Stock.objects.filter(item_code=itm_code,item_isactive=True).order_by('pk').first()
if pos_haud and stockobj:
acc_obj = TreatmentAccount.objects.filter(treatment_parentcode=trmt_obj.treatment_parentcode,
site_code=site.itemsite_code).order_by('pk').first()
i['treatment_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%m-%Y")
# i['TreatmentAccountid'] = trmt_obj.treatment_account.pk
i['TreatmentAccountid'] = acc_obj.pk
# i['stockid'] = trmt_obj.Item_Codeid.pk
i['stockid'] = stockobj.pk if stockobj else ""
i["transacno_ref"] = pos_haud.sa_transacno_ref if pos_haud.sa_transacno_ref else ""
if i["unit_amount"]:
i["unit_amount"] = "{:.2f}".format(float(i['unit_amount']))
i["rev"] = False
i["limit"] = None
if trmt_obj.helper_ids.all().exists():
i["sel"] = True
i["staff"] = ','.join([v.helper_id.emp_name for v in trmt_obj.helper_ids.all() if v.helper_id.emp_name])
else:
i["sel"] = None
i["staff"] = None
lst.append(i)
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': lst}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class TrmtTmpItemHelperViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
queryset = TmpItemHelper.objects.filter().order_by('-id')
serializer_class = TmpItemHelperSerializer
# def get_permissions(self):
# if self.request.GET.get('treatmentid',None) is None:
# msg = {'status': status.HTTP_204_NO_CONTENT,"message":"Please give Treatment Record ID",'error': False}
# raise exceptions.AuthenticationFailed(msg)
# else:
# self.permission_classes = [permissions.IsAuthenticated,]
# return self.permission_classes
def list(self, request):
try:
if request.GET.get('treatmentid',None) is None:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give Treatment Record ID",'error': False}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
trmt_obj = Treatment.objects.filter(status="Open",pk=request.GET.get('treatmentid',None)).first()
if not trmt_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Treatment ID does not exist/Status Should be in Open only!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# acc_ids = TreatmentAccount.objects.filter(ref_transacno=trmt_obj.sa_transacno,
# treatment_parentcode=trmt_obj.treatment_parentcode,Site_Codeid=trmt_obj.Site_Codeid).order_by('id').last()
acc_ids = TreatmentAccount.objects.filter(ref_transacno=trmt_obj.sa_transacno,
treatment_parentcode=trmt_obj.treatment_parentcode,site_code=trmt_obj.site_code).order_by('id').last()
if acc_ids and acc_ids.balance:
if acc_ids.balance < trmt_obj.unit_amount:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Insufficient Amount in Treatment Account. Please Top Up!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# if cart_obj.deposit < cart_obj.discount_price:
# msg = "Min Deposit for this treatment is SS {0} ! Treatment Done not allow.".format(str(cart_obj.discount_price))
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":msg,'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
item_code = str(trmt_obj.item_code)
itm_code = item_code[:-4]
stockobj = Stock.objects.filter(item_code=itm_code,item_isactive=True).order_by('pk').first()
# if trmt_obj.Item_Codeid.workcommpoints == None or trmt_obj.Item_Codeid.workcommpoints == 0.0:
if stockobj.workcommpoints == None or stockobj.workcommpoints == 0.0:
workcommpoints = 0.0
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Work Point should not be None/zero value!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
else:
workcommpoints = stockobj.workcommpoints
# stock_obj = Stock.objects.filter(pk=trmt_obj.Item_Codeid.pk,item_isactive=True).first()
if stockobj.srv_duration is None or stockobj.srv_duration == 0.0:
srvduration = 60
else:
srvduration = stockobj.srv_duration
stkduration = int(srvduration) + 30
hrs = '{:02d}:{:02d}'.format(*divmod(stkduration, 60))
h_obj = TmpItemHelper.objects.filter(treatment=trmt_obj).first()
value = {'Item':trmt_obj.course,'Price':"{:.2f}".format(float(trmt_obj.unit_amount)),
'work_point':"{:.2f}".format(float(workcommpoints)),'room_id':None,'room_name':None,
'source_id': trmt_obj.times if trmt_obj.times else "",'source_name':None,'new_remark':None,
'times':trmt_obj.times if trmt_obj.times else "",'add_duration':hrs}
if h_obj:
if not h_obj.Room_Codeid is None:
value['room_id'] = h_obj.Room_Codeid.pk
value['room_name'] = h_obj.Room_Codeid.displayname
if not h_obj.Source_Codeid is None:
value['source_id'] = h_obj.Source_Codeid.pk
value['source_name'] = h_obj.Source_Codeid.source_desc
if not h_obj.new_remark is None:
value['new_remark'] = h_obj.new_remark
if h_obj.times:
value['times'] = trmt_obj.times
if h_obj.workcommpoints:
sumwp1 = TmpItemHelper.objects.filter(treatment=trmt_obj.pk).aggregate(Sum('wp1'))
value['work_point'] = "{:.2f}".format(float(sumwp1['wp1__sum']))
queryset = TmpItemHelper.objects.filter(treatment=trmt_obj).order_by('id')
serializer = self.get_serializer(queryset, many=True)
final = []
if queryset:
for t in serializer.data:
s = dict(t)
s['wp1'] = "{:.2f}".format(float(s['wp1']))
s['appt_fr_time'] = get_in_val(self, s['appt_fr_time'])
s['appt_to_time'] = get_in_val(self, s['appt_to_time'])
s['add_duration'] = get_in_val(self, s['add_duration'])
final.append(s)
# else:
# val = {'id':None,'helper_id':None,'helper_name':None,'wp1':None,'appt_fr_time':None,
# 'appt_to_time':None,'add_duration':None}
# final.append(val)
result = {'status': status.HTTP_200_OK,"message": "Listed Succesfully",'error': False,
'value': value,'data': final}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def create(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
if request.GET.get('treatmentid',None) is None:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give Treatment Record ID",'error': False}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
trmt_obj = Treatment.objects.filter(status="Open",pk=request.GET.get('treatmentid',None)).first()
if not trmt_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Treatment ID does not exist / not in open status!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
item_code = str(trmt_obj.item_code)
itm_code = item_code[:-4]
stockobj = Stock.objects.filter(item_code=itm_code,item_isactive=True).order_by('pk').first()
# acc_ids = TreatmentAccount.objects.filter(ref_transacno=trmt_obj.treatment_account.ref_transacno,
# treatment_parentcode=trmt_obj.treatment_account.treatment_parentcode,Site_Codeid=site,).order_by('id').last()
tracc_obj = TreatmentAccount.objects.filter(treatment_parentcode=trmt_obj.treatment_parentcode,
site_code=site.itemsite_code).order_by('pk').first()
acc_ids = TreatmentAccount.objects.filter(ref_transacno=tracc_obj.ref_transacno,
treatment_parentcode=tracc_obj.treatment_parentcode,site_code=site.itemsite_code,).order_by('id').last()
if acc_ids and acc_ids.balance:
if acc_ids.balance < trmt_obj.unit_amount:
msg = "Treatment Account Balance is SS {0} is not less than Treatment Price {1}.".format(str(acc_ids.balance),str(trmt_obj.unit_amount))
result = {'status': status.HTTP_400_BAD_REQUEST,"message":msg,'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# if not request.GET.get('Room_Codeid',None) is None:
# room_ids = Room.objects.filter(id=request.GET.get('Room_Codeid',None),isactive=True).first()
# if not room_ids:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Room Id does not exist!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# if not request.GET.get('Source_Codeid',None) is None:
# source_ids = Source.objects.filter(id=request.GET.get('Source_Codeid',None),source_isactive=True).first()
# if not source_ids:
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Source ID does not exist!!",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# if request.GET.get('Room_Codeid',None) is None:
# room_ids = None
# if request.GET.get('Source_Codeid',None) is None:
# source_ids = None
if request.GET.get('workcommpoints',None) is None or float(request.GET.get('workcommpoints',None)) == 0.0:
workcommpoints = 0.0
else:
workcommpoints = request.GET.get('workcommpoints',None)
tmp = []
h_obj = TmpItemHelper.objects.filter(treatment__pk=trmt_obj.pk).order_by('pk')
count = 1;Source_Codeid=None;Room_Codeid=None;new_remark=None;appt_fr_time=None;appt_to_time=None;add_duration=None
if stockobj.srv_duration is None or float(stockobj.srv_duration) == 0.0:
stk_duration = 60
else:
stk_duration = int(stockobj.srv_duration)
stkduration = int(stk_duration) + 30
hrs = '{:02d}:{:02d}'.format(*divmod(stkduration, 60))
duration = hrs
add_duration = duration
helper_obj = Employee.objects.filter(emp_isactive=True,pk=request.data['helper_id']).first()
if not helper_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Employee ID does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
alemp_ids = TmpItemHelper.objects.filter(treatment__pk=trmt_obj.pk,
helper_code=helper_obj.emp_code,site_code=site.itemsite_code).order_by('pk')
if alemp_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"This Employee already selected!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if h_obj:
count = int(h_obj.count()) + 1
Source_Codeid = h_obj[0].Source_Codeid
Room_Codeid = h_obj[0].Room_Codeid
new_remark = h_obj[0].new_remark
last = h_obj.last()
start_time = get_in_val(self, last.appt_to_time); endtime = None
if start_time:
starttime = datetime.datetime.strptime(start_time, "%H:%M")
end_time = starttime + datetime.timedelta(minutes = stkduration)
endtime = datetime.datetime.strptime(str(end_time), "%Y-%m-%d %H:%M:%S").strftime("%H:%M")
appt_fr_time = starttime if start_time else None
appt_to_time = endtime if endtime else None
wp1 = float(workcommpoints) / float(count)
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
temph = serializer.save(item_name=stockobj.item_desc,helper_id=helper_obj,
helper_name=helper_obj.display_name,helper_code=helper_obj.emp_code,Room_Codeid=Room_Codeid,
site_code=site.itemsite_code,times=trmt_obj.times,treatment_no=trmt_obj.treatment_no,
wp1=wp1,wp2=0.0,wp3=0.0,itemcart=None,treatment=trmt_obj,Source_Codeid=Source_Codeid,
new_remark=new_remark,appt_fr_time=appt_fr_time,appt_to_time=appt_to_time,
add_duration=add_duration,workcommpoints=workcommpoints)
# trmt_obj.helper_ids.add(temph.id)
tmp.append(temph.id)
for h in TmpItemHelper.objects.filter(treatment__pk=trmt_obj.pk,site_code=site.itemsite_code).order_by('pk'):
TmpItemHelper.objects.filter(id=h.id).update(wp1=wp1)
else:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Invalid Input",'error': True,
'data': serializer.errors}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
if tmp != []:
value = {'Item':stockobj.item_desc,'Price':"{:.2f}".format(float(trmt_obj.unit_amount)),
'work_point':"{:.2f}".format(float(workcommpoints)),'Room':None,'Source':None,'new_remark':None,
'times':trmt_obj.times}
tmp_h = TmpItemHelper.objects.filter(id__in=tmp)
serializer_final = self.get_serializer(tmp_h, many=True)
final = []
for t in serializer_final.data:
s = dict(t)
s['wp1'] = "{:.2f}".format(float(s['wp1']))
s['appt_fr_time'] = get_in_val(self, s['appt_fr_time'])
s['appt_to_time'] = get_in_val(self, s['appt_to_time'])
s['add_duration'] = get_in_val(self, s['add_duration'])
final.append(s)
result = {'status': status.HTTP_201_CREATED,"message": "Created Succesfully",'error': False,
'value':value,'data': final}
return Response(result, status=status.HTTP_201_CREATED)
result = {'status': status.HTTP_400_BAD_REQUEST,"message": "Invalid Input",'error': False,
'data': serializer.errors}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return TmpItemHelper.objects.get(pk=pk)
except TmpItemHelper.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
queryset = TmpItemHelper.objects.filter().order_by('pk')
tmpitm = get_object_or_404(queryset, pk=pk)
serializer = TmpItemHelperSerializer(tmpitm)
result = {'status': status.HTTP_200_OK,"message": "Listed Succesfully",'error': False,
'data': serializer.data}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def partial_update(self, request, pk=None):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
if request.GET.get('Room_Codeid',None):
room_ids = Room.objects.filter(id=request.GET.get('Room_Codeid',None),isactive=True).first()
if not room_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Room Id does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if request.GET.get('Source_Codeid',None):
source_ids = Source.objects.filter(id=request.GET.get('Source_Codeid',None),source_isactive=True).first()
if not source_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Source ID does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# if request.GET.get('Room_Codeid',None) is None or request.GET.get('Room_Codeid',None) == "null":
if not request.GET.get('Room_Codeid',None):
room_ids = None
# if request.GET.get('Source_Codeid',None) is None or request.GET.get('Source_Codeid',None) == "null":
if not request.GET.get('Source_Codeid',None):
source_ids = None
if request.GET.get('workcommpoints',None) is None or float(request.GET.get('workcommpoints',None)) == 0.0:
workcommpoints = 0.0
else:
workcommpoints = request.GET.get('workcommpoints',None)
tmpobj = self.get_object(pk)
# appt = Appointment.objects.filter(cust_noid=tmpobj.treatment.Cust_Codeid,appt_date=date.today(),
# ItemSite_Codeid=site)
# if not appt:
# if (not 'appt_fr_time' in request.data or str(request.data['appt_fr_time']) is None) and (not 'add_duration' in request.data or str(request.data['add_duration']) is None):
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Appointment is not available today so please give appointment details",'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
item_code = str(tmpobj.treatment.item_code)
itm_code = item_code[:-4]
stockobj = Stock.objects.filter(item_code=itm_code,item_isactive=True).order_by('pk').first()
serializer = self.get_serializer(tmpobj, data=request.data, partial=True)
if serializer.is_valid():
if ('appt_fr_time' in request.data and not request.data['appt_fr_time'] == None):
if ('add_duration' in request.data and not request.data['add_duration'] == None):
if stockobj.srv_duration is None or float(stockobj.srv_duration) == 0.0:
stk_duration = 60
else:
stk_duration = int(stockobj.srv_duration)
stkduration = int(stk_duration) + 30
t1 = datetime.datetime.strptime(str(request.data['add_duration']), '%H:%M')
t2 = datetime.datetime(1900,1,1)
addduration = (t1-t2).total_seconds() / 60.0
hrs = '{:02d}:{:02d}'.format(*divmod(stkduration, 60))
start_time = get_in_val(self, request.data['appt_fr_time'])
starttime = datetime.datetime.strptime(start_time, "%H:%M")
end_time = starttime + datetime.timedelta(minutes = addduration)
endtime = datetime.datetime.strptime(str(end_time), "%Y-%m-%d %H:%M:%S").strftime("%H:%M")
duration = hrs
serializer.save(appt_fr_time=starttime,appt_to_time=endtime,add_duration=request.data['add_duration'],
Room_Codeid=room_ids,Source_Codeid=source_ids,new_remark=request.GET.get('new_remark',None))
next_recs = TmpItemHelper.objects.filter(id__gte=tmpobj.pk,site_code=site.itemsite_code).order_by('pk')
for t in next_recs:
start_time = get_in_val(self, t.appt_to_time)
if start_time:
starttime = datetime.datetime.strptime(str(start_time), "%H:%M")
end_time = starttime + datetime.timedelta(minutes = stkduration)
endtime = datetime.datetime.strptime(str(end_time), "%Y-%m-%d %H:%M:%S").strftime("%H:%M")
idobj = TmpItemHelper.objects.filter(id__gt=t.pk,site_code=site.itemsite_code).order_by('pk').first()
if idobj:
TmpItemHelper.objects.filter(id=idobj.pk).update(appt_fr_time=starttime,
appt_to_time=endtime,add_duration=duration)
if 'wp1' in request.data and not request.data['wp1'] == None:
serializer.save(wp1=float(request.data['wp1']))
tmpids = TmpItemHelper.objects.filter(treatment=tmpobj.treatment,site_code=site.itemsite_code).order_by('pk').aggregate(Sum('wp1'))
value ="{:.2f}".format(float(tmpids['wp1__sum']))
tmpl_ids = TmpItemHelper.objects.filter(treatment=tmpobj.treatment,site_code=site.itemsite_code).order_by('pk')
for t in tmpl_ids:
TmpItemHelper.objects.filter(id=t.pk).update(workcommpoints=value)
result = {'status': status.HTTP_200_OK,"message":"Updated Succesfully",'error': False}
return Response(result, status=status.HTTP_200_OK)
result = {'status': status.HTTP_400_BAD_REQUEST,"message":serializer.errors,'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
@action(methods=['get'], detail=False, permission_classes=[IsAuthenticated & authenticated_only],
authentication_classes=[ExpiringTokenAuthentication])
def confirm(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
# per = self.check_permissions(self.get_permissions(self))
# print(per,"per")
if request.GET.get('treatmentid',None) is None:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give Treatment Record ID",'error': False}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
trmt_obj = Treatment.objects.filter(status="Open",pk=request.GET.get('treatmentid',None)).first()
if not trmt_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Treatment ID does not exist/Status Should be in Open only!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
trmt_obj = Treatment.objects.filter(status="Open",pk=request.GET.get('treatmentid',None))
# print(trmt_obj,"trmt_obj")
if trmt_obj:
tmp_ids = TmpItemHelper.objects.filter(treatment=trmt_obj[0],site_code=site.itemsite_code)
# print(tmp_ids,"tmp_ids")
if not tmp_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Without employee cant do confirm!!",'error': False}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
for emp in tmp_ids:
appt = Appointment.objects.filter(cust_no=trmt_obj[0].cust_code,appt_date=date.today(),
itemsite_code=fmspw[0].loginsite.itemsite_code,emp_no=emp.helper_code)
# print(appt,"appt")
if not appt:
tmpids = TmpItemHelper.objects.filter(treatment=trmt_obj[0],
helper_code=emp.helper_code,site_code=site.itemsite_code).filter(Q(appt_fr_time__isnull=True) | Q(appt_to_time__isnull=True) | Q(add_duration__isnull=True))
if tmpids:
amsg = "Appointment is not available today, please give Start Time & Add Duration for employee {0} ".format(emp.helper_name)
result = {'status': status.HTTP_400_BAD_REQUEST,"message": amsg,'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
#need to uncomment later
# if emp.appt_fr_time and emp.appt_to_time:
# appt_ids = Appointment.objects.filter(appt_date=date.today(),emp_no=emp.helper_code,
# itemsite_code=fmspw[0].loginsite.itemsite_code).filter(Q(appt_to_time__gte=emp.appt_fr_time) & Q(appt_fr_time__lte=emp.appt_to_time))
# if appt_ids:
# msg = "In These timing already Appointment is booked for employee {0} so allocate other duration".format(emp.helper_name)
# result = {'status': status.HTTP_400_BAD_REQUEST,"message":msg,'error': True}
# return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
# print(trmt_obj[0].helper_ids.all(),"trmt_obj[0].helper_ids")
for existing in trmt_obj[0].helper_ids.all():
trmt_obj[0].helper_ids.remove(existing)
# print(tmp_ids,"111")
for t in tmp_ids:
trmt_obj[0].helper_ids.add(t)
# print(trmt_obj[0].helper_ids.all(),"222")
result = {'status': status.HTTP_200_OK , "message": "Confirmed Succesfully", 'error': False}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
@action(detail=False, methods=['delete'], name='delete', permission_classes=[IsAuthenticated & authenticated_only],
authentication_classes=[ExpiringTokenAuthentication])
def delete(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
if self.request.GET.get('clear_all',None) is None:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give clear all/line in parms!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if request.GET.get('treatmentid',None) is None:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give Treatment Record ID",'error': False}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
trmt_obj = Treatment.objects.filter(status="Open",pk=request.GET.get('treatmentid',None)).first()
if not trmt_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Treatment ID does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
state = status.HTTP_204_NO_CONTENT
try:
tmp_ids = TmpItemHelper.objects.filter(treatment=trmt_obj,site_code=site.itemsite_code).values_list('id')
if not tmp_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Tmp Item Helper records is not present for this Treatment record id!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if self.request.GET.get('clear_all',None) == "1":
queryset = TmpItemHelper.objects.filter(treatment=trmt_obj,site_code=site.itemsite_code).order_by('id').delete()
elif self.request.GET.get('clear_all',None) == "0":
queryset = TmpItemHelper.objects.filter(treatment=trmt_obj,site_code=site.itemsite_code).order_by('id').first().delete()
result = {'status': status.HTTP_200_OK , "message": "Deleted Succesfully", 'error': False}
return Response(result, status=status.HTTP_200_OK)
except Http404:
pass
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': True}
return Response(result,status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class TopupproductViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = TopupproductSerializer
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id',None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id',None),cust_isactive=True).first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK,"message":"Please give customer id!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
if not self.request.user.is_authenticated:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not allowed!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if not fmspw:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not Permitted!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
site = fmspw[0].loginsite
if not site:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Users Item Site is not mapped!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = DepositAccount.objects.filter(Cust_Codeid=cust_id, Site_Codeid=site, type='Deposit',
outstanding__gt=0).order_by('pk')
sum = 0; lst = []
header_data = {"customer_name" : cust_obj.cust_name,"old_outstanding" : "0.00",
"topup_amount" : "0.00","new_outstanding" : "0.00"}
if queryset:
for q in queryset:
# ,type__in=('Deposit', 'Top Up')
acc_ids = DepositAccount.objects.filter(ref_transacno=q.sa_transacno,
ref_productcode=q.treat_code,Site_Codeid=site).order_by('id').last()
acc = DepositAccount.objects.filter(pk=acc_ids.pk)
serializer = self.get_serializer(acc, many=True)
if acc_ids.outstanding > 0.0:
for data in serializer.data:
pos_haud = PosHaud.objects.filter(sa_custnoid=cust_id,ItemSite_Codeid__pk=site.pk,
sa_transacno_type="Receipt",sa_transacno=q.sa_transacno).first()
if pos_haud:
sum += data['outstanding']
data['DepositAccountid'] = q.pk
data["pay_amount"] = None
data['transaction_code'] = pos_haud.sa_transacno_ref
data['stock_id'] = acc_ids.Item_Codeid.pk
if data["balance"]:
data["balance"] = "{:.2f}".format(float(data['balance']))
else:
data["balance"] = "0.00"
if data["outstanding"]:
data["outstanding"] = "{:.2f}".format(float(data['outstanding']))
else:
data["outstanding"] = "0.00"
lst.append(data)
if lst != []:
header_data = {"customer_name" : cust_obj.cust_name,"old_outstanding" : "{:.2f}".format(float(sum)),
"topup_amount" : None,"new_outstanding" : "{:.2f}".format(float(sum))}
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'header_data':header_data, 'data': lst}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'header_data':header_data, 'data': []}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False,'header_data':header_data, 'data': []}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class TopupprepaidViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = TopupprepaidSerializer
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id',None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id',None),cust_isactive=True).first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK,"message":"Customer ID does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
if not self.request.user.is_authenticated:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not allowed!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if not fmspw:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not Permitted!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
site = fmspw[0].loginsite
if not site:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Users Item Site is not mapped!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,sa_transacno_type="Receipt",
ItemSite_Codeid__pk=site.pk)
sum = 0; lst = []
header_data = {"customer_name" : cust_obj.cust_name,"old_outstanding" : "0.00",
"topup_amount" : "0.00","new_outstanding" : "0.00"}
if queryset:
for q in queryset:
daud = PosDaud.objects.filter(sa_transacno=q.sa_transacno,
ItemSite_Codeid__pk=site.pk)
for d in daud:
if int(d.dt_itemnoid.item_div) == 3 and d.dt_itemnoid.item_type == 'PACKAGE':
acc_ids = PrepaidAccount.objects.filter(pp_no=d.sa_transacno,package_code=d.dt_combocode,
Site_Codeid=d.ItemSite_Codeid,pos_daud_lineno=d.dt_lineno,outstanding__gt = 0).order_by('id').last()
else:
acc_ids = PrepaidAccount.objects.filter(pp_no=d.sa_transacno,Item_Codeid=d.dt_itemnoid,
Site_Codeid=d.ItemSite_Codeid,pos_daud_lineno=d.dt_lineno,outstanding__gt = 0).order_by('id').last()
if acc_ids:
acc = PrepaidAccount.objects.filter(pk=acc_ids.pk)
serializer = self.get_serializer(acc, many=True)
for data in serializer.data:
pos_haud = PosHaud.objects.filter(sa_custnoid=cust_obj,ItemSite_Codeid__pk=site.pk,
sa_transacno_type="Receipt",sa_transacno=q.sa_transacno).first()
if pos_haud:
sum += data['outstanding']
splt = str(data['exp_date']).split('T')
if data['exp_date']:
data['exp_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%b-%y")
data['transaction_code'] = pos_haud.sa_transacno_ref
data['prepaid_id'] = acc_ids.pk
if int(d.dt_itemnoid.item_div) == 3 and d.dt_itemnoid.item_type == 'PACKAGE':
data['stock_id'] = acc_ids.Item_Codeid.pk
else:
data['stock_id'] = d.dt_itemnoid.pk
data["pay_amount"] = None
if data["remain"]:
data["remain"] = "{:.2f}".format(float(data['remain']))
if data["outstanding"]:
data["outstanding"] = "{:.2f}".format(float(data['outstanding']))
lst.append(data)
header_data = {"customer_name" : cust_obj.cust_name,"old_outstanding" : "{:.2f}".format(float(sum)),
"topup_amount" : None,"new_outstanding" : "{:.2f}".format(float(sum))}
if lst != []:
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'header_data':header_data, 'data': lst}
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False,'header_data':header_data, 'data': []}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False,'header_data':header_data, 'data': []}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ReversalListViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = TreatmentReversalSerializer
def list(self, request):
try:
treatment_id = self.request.GET.get('treatment_id',None)
if treatment_id is None:
result = {'status': status.HTTP_200_OK,"message":"Please give Treatment id!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
if not fmspw:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not Permitted!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
site = fmspw[0].loginsite
if not site:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Users Item Site is not mapped!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
treat_id = treatment_id.split(',')
sum = 0; lst = []; count = 0 ; tot_balance = 0 ; tot_credit = 0; checklst = []
for i in treat_id:
count +=1
queryset = Treatment.objects.filter(pk=i,status='Open',site_code=site.itemsite_code).order_by('-pk')
if queryset:
# type__in=('Deposit', 'Top Up','CANCEL')
acc_ids = TreatmentAccount.objects.filter(ref_transacno=queryset[0].sa_transacno,
treatment_parentcode=queryset[0].treatment_parentcode,Site_Codeid=queryset[0].Site_Codeid).order_by('id').last()
serializer = self.get_serializer(queryset, many=True)
for data in serializer.data:
if queryset[0].treatment_parentcode not in checklst:
checklst.append(queryset[0].treatment_parentcode)
if acc_ids:
tot_balance += acc_ids.balance
if float(acc_ids.balance) > float(queryset[0].unit_amount):
tot_credit += queryset[0].unit_amount
elif float(acc_ids.balance) <= float(queryset[0].unit_amount):
tot_credit += acc_ids.balance
data['no'] = count
sum += data['unit_amount']
data['unit_amount'] = "{:.2f}".format(float(data['unit_amount']))
lst.append(data)
else:
result = {'status': status.HTTP_200_OK,"message":"Treatment ID does not exist/Not in Open Status!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
if lst != []:
control_obj = ControlNo.objects.filter(control_description__iexact="Reverse No",Site_Codeid=site).first()
if not control_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Reverse Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
rev_code = str(control_obj.control_prefix)+str(control_obj.Site_Codeid.itemsite_code)+str(control_obj.control_no)
header_data = {"reverse_no" : rev_code, "total" : "{:.2f}".format(float(sum)),
"total_depobalance" : "{:.2f}".format(float(tot_balance)),"total_credit" : "{:.2f}".format(float(tot_credit))}
# if self.request.GET.get('adjustment',None) is not None:
# header_data["creditnote_after_adjustment"] = "Null"
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'header_data':header_data, 'data': lst}
return Response(result, status=status.HTTP_200_OK)
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def create(self, request):
try:
if not self.request.user.is_authenticated:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not allowed!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
if not fmspw:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Unauthenticated Users are not Permitted!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
site = fmspw[0].loginsite
if not site:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Users Item Site is not mapped!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
treatment_id = self.request.GET.get('treatment_id',None)
if treatment_id is None:
result = {'status': status.HTTP_200_OK,"message":"Please give Treatment id!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
recontrol_obj = ControlNo.objects.filter(control_description__iexact="Reverse No",Site_Codeid=site).first()
if not recontrol_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Reverse Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
rev_code = str(recontrol_obj.control_prefix)+str(recontrol_obj.Site_Codeid.itemsite_code)+str(recontrol_obj.control_no)
control_obj = ControlNo.objects.filter(control_description__iexact="Reference Credit Note No",Site_Codeid=site).first()
if not control_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Reverse Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
credit_code = str(control_obj.control_prefix)+str(control_obj.Site_Codeid.itemsite_code)+str(control_obj.control_no)
fmspw = fmspw.first()
treat_id = treatment_id.split(',')
sum = 0; lst = [];total = 0;trm_lst = [];total_r = 0.0;rea_obj = False
if treat_id:
for i in treat_id:
queryset = Treatment.objects.filter(pk=i,status='Open',site_code=site.itemsite_code).order_by('-pk')
if not queryset:
result = {'status': status.HTTP_200_OK,"message":"Treatment ID does not exist/Not in Open Status!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
# type__in=('Deposit', 'Top Up','CANCEL')
acc_ids = TreatmentAccount.objects.filter(ref_transacno=queryset[0].sa_transacno,
treatment_parentcode=queryset[0].treatment_parentcode,Site_Codeid=queryset[0].Site_Codeid).order_by('id').last()
# if acc_ids.balance == 0.0:
# result = {'status': status.HTTP_200_OK,"message":"Treatment Account for this customer is Zero so cant create Credit Note!!",'error': True}
# return Response(data=result, status=status.HTTP_200_OK)
j = queryset.first()
#treatment update
j.status = 'Cancel'
j.transaction_time = timezone.now()
j.save()
cust_obj = Customer.objects.filter(cust_code=j.cust_code,cust_isactive=True).first()
pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,itemsite_code=site.itemsite_code,
sa_transacno=j.sa_transacno).first()
val = {'invoice': "Credit for Invoice Number : "+str(pos_haud.sa_transacno_ref),
'desc':j.course,'amount':j.unit_amount}
trm_lst.append(val)
total_r += j.unit_amount
#reversedtl creation
reversedtl = ReverseDtl(treatment_no=j.treatment_code,treatment_desc=j.course,
treatment_price=j.unit_amount,transac_no=j.sa_transacno,reverse_no=rev_code,
site_code=j.site_code)
reversedtl.save()
desc = "CANCEL" +" "+ str(j.course)+" "+str(j.times)+"/"+str(j.treatment_no)
#treatment Account creation
if acc_ids.balance > queryset[0].unit_amount:
balance = acc_ids.balance - queryset[0].unit_amount
tamount = queryset[0].unit_amount
total += j.unit_amount
elif acc_ids.balance <= queryset[0].unit_amount:
balance = acc_ids.balance - acc_ids.balance
tamount = acc_ids.balance
total += acc_ids.balance
treatacc = TreatmentAccount(Cust_Codeid=cust_obj,cust_code=cust_obj.cust_code,
description=desc,ref_no=j.treatment_parentcode,type='CANCEL',amount=-float("{:.2f}".format(float(tamount))) if tamount else 0,
balance="{:.2f}".format(float(balance)),User_Nameid=fmspw,user_name=fmspw.pw_userlogin,ref_transacno=j.sa_transacno,
sa_transacno="",qty=1,outstanding="{:.2f}".format(float(acc_ids.outstanding)),deposit=None,treatment_parentcode=j.treatment_parentcode,
treatment_code=None,sa_status="VT",cas_name=fmspw.pw_userlogin,sa_staffno=acc_ids.sa_staffno,sa_staffname=acc_ids.sa_staffname,
next_paydate=None,hasduedate=0,dt_lineno=j.dt_lineno,Site_Codeid=site,
site_code=j.site_code,treat_code=j.treatment_parentcode)
treatacc.save()
#creditnote creation
creditnote = CreditNote(treatment_code=j.treatment_parentcode,treatment_name=j.course,
treatment_parentcode=j.treatment_parentcode,type="CANCEL",cust_code=j.cust_code,
cust_name=j.cust_name,sa_transacno=j.sa_transacno,status="OPEN",
credit_code=credit_code,deposit_type="TREATMENT",site_code=j.site_code,
treat_code=j.treatment_parentcode)
creditnote.save()
if creditnote.pk:
control_obj.control_no = int(control_obj.control_no) + 1
control_obj.save()
if creditnote.pk not in lst:
lst.append(creditnote.pk)
#reversehdr creation
reversehdr = ReverseHdr(reverse_no=rev_code,staff_code="",staff_name="",
cust_code=j.cust_code,cust_name=j.cust_name,site_code=j.site_code,
ref_creditnote=creditnote.credit_code,total_balance=total)
reversehdr.save()
if reversehdr.pk:
recontrol_obj.control_no = int(recontrol_obj.control_no) + 1
recontrol_obj.save()
if self.request.GET.get('adjustment_value',None) and float(self.request.GET.get('adjustment_value',None)) != 0.0:
amount = self.request.GET.get('adjustment_value',None)
reversehdr.has_adjustment = True
reversehdr.adjustment_value = amount
split = str(amount).split('-')
if '-' in split:
reversehdr.credit_note_amt = total - float(amount)
creditnote.amount = total - float(amount)
creditnote.balance = total - float(amount)
else:
reversehdr.credit_note_amt = total + float(amount)
creditnote.amount = total + float(amount)
creditnote.balance = total + float(amount)
if creditnote.amount == 0.0 and creditnote.balance == 0.0:
creditnote.status = "CLOSE"
creditnote.save()
if not self.request.GET.get('reason_id',None) is None:
rea_obj = ReverseTrmtReason.objects.filter(id=self.request.GET.get('reason_id',None),
is_active=True)
if not rea_obj:
result = {'status': status.HTTP_200_OK,"message":"Reason ID does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
reversehdr.reason = rea_obj[0].rev_desc
if not self.request.GET.get('remark',None) is None:
reversehdr.remark = self.request.GET.get('remark',None)
if rea_obj[0].rev_no == '100001':
if rea_obj:
reversehdr.reason1 = rea_obj[0].rev_desc
if amount:
reversehdr.reason_adj_value1 = amount
reversehdr.save()
else:
creditnote.amount = total
creditnote.balance = total
if creditnote.amount == 0.0 and creditnote.balance == 0.0:
creditnote.status = "CLOSE"
creditnote.save()
reversehdr.credit_note_amt = total
reversehdr.save()
if lst != [] and trm_lst != []:
title = Title.objects.filter(product_license=site.itemsite_code).first()
credit_ids = CreditNote.objects.filter(pk__in=lst).order_by('pk')
path = None
if title and title.logo_pic:
path = BASE_DIR + title.logo_pic.url
split = str(credit_ids[0].sa_date).split(" ")
date = datetime.datetime.strptime(str(split[0]), '%Y-%m-%d').strftime("%d/%m/%Y")
adjustamt = self.request.GET.get('adjustment_value',None)
remark = self.request.GET.get('remark',None)
if adjustamt:
total_credit = float(total_r) + float(adjustamt)
else:
total_credit = float(total_r)
data = {'name': title.trans_h1 if title and title.trans_h1 else '',
'address': title.trans_h2 if title and title.trans_h2 else '',
'footer1':title.trans_footer1 if title and title.trans_footer1 else '',
'footer2':title.trans_footer2 if title and title.trans_footer2 else '',
'footer3':title.trans_footer3 if title and title.trans_footer3 else '',
'footer4':title.trans_footer4 if title and title.trans_footer4 else '',
'credit_ids': credit_ids, 'date':date,'total':total_r,'adjustamt':adjustamt if adjustamt else "",
'reason':rea_obj[0] if rea_obj else "",'remark':remark if remark else "",'total_credit':total_credit,
'credit': trm_lst,'cust': cust_obj,'creditno': credit_ids[0].credit_code,'fmspw':fmspw,'adjustamtstr': "0.00",
'path':path if path else '','title':title if title else None,
}
template = get_template('creditnote.html')
display = Display(visible=0, size=(800, 600))
display.start()
html = template.render(data)
options = {
'margin-top': '.25in',
'margin-right': '.25in',
'margin-bottom': '.25in',
'margin-left': '.25in',
'encoding': "UTF-8",
'no-outline': None,
}
dst ="creditnote_" + str(str(credit_ids[0].credit_code)) + ".pdf"
p=pdfkit.from_string(html,False,options=options)
PREVIEW_PATH = dst
pdf = FPDF()
pdf.add_page()
pdf.set_font("Arial", size = 15)
file_path = os.path.join(settings.PDF_ROOT, PREVIEW_PATH)
pdf.output(file_path)
if p:
file_path = os.path.join(settings.PDF_ROOT, PREVIEW_PATH)
report = os.path.isfile(file_path)
if report:
file_path = os.path.join(settings.PDF_ROOT, PREVIEW_PATH)
with open(file_path, 'wb') as fh:
fh.write(p)
display.stop()
ip_link = "http://"+request.META['HTTP_HOST']+"/media/pdf/creditnote_"+str(credit_ids[0].credit_code)+".pdf"
result = {'status': status.HTTP_200_OK, "message": "Created Successfully", 'error': False,
'data': ip_link}
else:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Failed to create ", 'error': False}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ShowBalanceViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = ShowBalanceSerializer
def list(self, request):
try:
treatment_id = self.request.GET.get('treatment_id',None)
if treatment_id is None:
result = {'status': status.HTTP_200_OK,"message":"Please give Treatment id!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
treat_id = treatment_id.split(',')
checklst = []; lst = []; sum = 0
for i in treat_id:
q = Treatment.objects.filter(pk=i,status='Open',site_code=site.itemsite_code)
if not q:
result = {'status': status.HTTP_200_OK,"message":"Treatment ID does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
# 'Deposit', 'Top Up','CANCEL')
acc_ids = TreatmentAccount.objects.filter(ref_transacno=q[0].sa_transacno,
treatment_parentcode=q[0].treatment_parentcode,Site_Codeid=q[0].Site_Codeid).order_by('id').last()
if q[0].treatment_parentcode not in checklst:
reverse_amt = 0
reverse_amt += q[0].unit_amount
checklst.append(q[0].treatment_parentcode)
queryset = TreatmentAccount.objects.filter(pk=acc_ids.pk)
if queryset:
serializer = self.get_serializer(queryset, many=True)
for data in serializer.data:
if data['balance']:
data['balance'] = "{:.2f}".format(float(data['balance']))
if data["outstanding"]:
data["outstanding"] = "{:.2f}".format(float(data['outstanding']))
dict_v = dict(data)
lst.append(dict_v)
else:
if q[0].treatment_parentcode in checklst:
reverse_amt += q[0].unit_amount
for l in lst:
if str(l['treatment_parentcode']) == q[0].treatment_parentcode:
l['reverse_price'] = "{:.2f}".format(float(reverse_amt))
if lst != []:
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': lst}
return Response(result, status=status.HTTP_200_OK)
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ReverseTrmtReasonAPIView(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
queryset = ReverseTrmtReason.objects.filter(is_active=True).order_by('id')
serializer_class = ReverseTrmtReasonSerializer
def list(self, request):
try:
queryset = self.filter_queryset(self.get_queryset())
if queryset:
serializer = self.get_serializer(queryset, many=True)
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': serializer.data}
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class VoidViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
queryset = PosHaud.objects.filter(isvoid=False).order_by('-pk')
serializer_class = VoidSerializer
def get_queryset(self):
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
# year = date.today().year
# month = date.today().month
from_date = self.request.GET.get('from_date',None)
to_date = self.request.GET.get('to_date',None)
transac_no = self.request.GET.get('transac_no',None)
cust_code = self.request.GET.get('cust_code',None)
cust_name = self.request.GET.get('cust_name',None)
queryset = PosHaud.objects.filter(isvoid=False,
ItemSite_Codeid__pk=site.pk).order_by('-pk')
if not from_date and not to_date and not transac_no and not cust_code and not cust_name:
queryset = queryset
else:
if from_date and to_date:
queryset = queryset.filter(Q(sa_date__date__gte=from_date,sa_date__date__lte=to_date)).order_by('-pk')
if transac_no:
queryset = queryset.filter(sa_transacno_ref__icontains=transac_no).order_by('-pk')
if cust_code:
customer = Customer.objects.filter(pk=cust_code,cust_isactive=True,site_code=site.itemsite_code).last()
if customer:
queryset = queryset.filter(sa_custno__icontains=customer.cust_code).order_by('-pk')
else:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Logined Site Customer Doesn't Exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if cust_name:
queryset = queryset.filter(sa_custname__icontains=cust_name).order_by('-pk')
return queryset
def list(self, request):
try:
if str(self.request.GET.get('cust_code',None)) != "undefined":
if isinstance(int(self.request.GET.get('cust_code',None)), int):
serializer_class = VoidSerializer
queryset = self.filter_queryset(self.get_queryset())
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset,total, state, message, error, serializer_class, data, action=self.action)
v = result.get('data')
d = v.get('dataList')
lst = []
for dat in d:
dict_d = dict(dat)
if dict_d['sa_date']:
splt = str(dict_d['sa_date']).split('T')
dict_d['sa_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%b-%y")
lst.append(dict_d)
v['dataList'] = lst
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_200_OK,"message":"No Data",'error': False, "data":[]}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
@action(detail=False, methods=['get'], name='Details', permission_classes=[IsAuthenticated & authenticated_only],
authentication_classes=[ExpiringTokenAuthentication])
def Details(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
poshdr_id = self.request.GET.get('poshdr_id',None)
# if not isinstance(poshdr_id, int):
# result = {'status': status.HTTP_200_OK,"message":"Poshaud ID Should be Integer only!!",'error': True}
# return Response(data=result, status=status.HTTP_200_OK)
haud_obj = PosHaud.objects.filter(pk=poshdr_id,isvoid=False,
ItemSite_Codeid__pk=site.pk).first()
if haud_obj is None:
result = {'status': status.HTTP_200_OK,"message":"PosHaud ID Does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
cust_obj = Customer.objects.filter(cust_code=haud_obj.sa_custno,cust_isactive=True,
site_code=site.itemsite_code).first()
daud_ids = PosDaud.objects.filter(sa_transacno=haud_obj.sa_transacno,
ItemSite_Codeid__pk=site.pk)
if daud_ids:
serializer = PosDaudDetailsSerializer(daud_ids, many=True)
for data in serializer.data:
data['dt_amt'] = "{:.2f}".format(float(data['dt_amt']))
data['cust_noid'] = cust_obj.pk
data['cart_id'] = haud_obj.cart_id
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': serializer.data}
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def create(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
poshdr_id = self.request.GET.get('poshdr_id',None)
# poshdrid = poshdr_id.split(',')
# for i in poshdrid:
haud_obj = PosHaud.objects.filter(pk=poshdr_id,isvoid=False,
ItemSite_Codeid__pk=site.pk).first()
if haud_obj is None:
result = {'status': status.HTTP_200_OK,"message":"PosHaud ID Does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
gst = GstSetting.objects.filter(item_desc='GST',isactive=True).first()
# for p in poshdrid:
haudobj = PosHaud.objects.filter(pk=poshdr_id,isvoid=False,
ItemSite_Codeid__pk=site.pk).first()
if haudobj.cart_id:
ids_cart = ItemCart.objects.filter(isactive=True,cart_id=haudobj.cart_id,
sitecode=site.itemsite_code,cart_date=date.today(),
cust_noid__pk=haud_obj.sa_custnoid.pk).exclude(type__in=type_tx)
if ids_cart:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Already Cart is Created!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
else:
haudobj.cart_id = None
haudobj.save()
ids_cartold = ItemCart.objects.filter(cart_id=haudobj.cart_id,cart_status="Inprogress",
sitecode=site.itemsite_code,cust_noid__pk=haud_obj.sa_custnoid.pk).exclude(type__in=type_tx).delete()
daud_ids = PosDaud.objects.filter(sa_transacno=haudobj.sa_transacno,
ItemSite_Codeid__pk=site.pk)
lineno = 0
control_obj = ControlNo.objects.filter(control_description__iexact="ITEM CART",Site_Codeid__pk=fmspw[0].loginsite.pk).first()
if not control_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Item Cart Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
cartre = ItemCart.objects.filter(sitecodeid=site).order_by('cart_id')
final = list(set([r.cart_id for r in cartre]))
# print(final,len(final),"final")
code_site = site.itemsite_code
prefix = control_obj.control_prefix
lst = []
if final != []:
for f in final:
newstr = f.replace(prefix,"")
new_str = newstr.replace(code_site, "")
lst.append(new_str)
lst.sort(reverse=True)
# print(lst,"lst")
c_no = int(lst[0]) + 1
cart_id = str(control_obj.control_prefix)+str(control_obj.Site_Codeid.itemsite_code)+str(c_no)
else:
cart_id = str(control_obj.control_prefix)+str(control_obj.Site_Codeid.itemsite_code)+str(control_obj.control_no)
haudobj.cart_id = cart_id
haudobj.save()
cart_lst = []
for d in daud_ids:
if d.itemcart:
lineno += 1
if lineno == 1:
check = "New"
else:
check = "Old"
cust_obj = Customer.objects.filter(pk=d.itemcart.cust_noid.pk,cust_isactive=True).first()
stock_obj = Stock.objects.filter(pk=d.itemcart.itemcodeid.pk,item_isactive=True).first()
tax_value = 0.0
if stock_obj.is_have_tax == True:
tax_value = gst.ITEM_VALUE
if d.itemcart.type == "Deposit":
type = "VT-Deposit"
elif d.itemcart.type == "Top Up":
type = "VT-Top Up"
elif d.itemcart.type == "Sales":
type = "VT-Sales"
else:
type = d.itemcart.type
cart = ItemCart(cart_date=date.today(),phonenumber=cust_obj.cust_phone2,
customercode=cust_obj.cust_code,cust_noid=cust_obj,lineno=lineno,
itemcodeid=stock_obj,itemcode=stock_obj.item_code,itemdesc=stock_obj.item_desc,
quantity=d.itemcart.quantity,price="{:.2f}".format(float(d.itemcart.price)),
sitecodeid=d.itemcart.sitecodeid,sitecode=d.itemcart.sitecodeid.itemsite_code,
cart_status="Inprogress",cart_id=cart_id,item_uom=d.itemcart.item_uom,
tax="{:.2f}".format(tax_value),check=check,ratio=d.itemcart.ratio,
discount=d.itemcart.discount,discount_amt=d.itemcart.discount_amt,
additional_discount=d.itemcart.additional_discount,
additional_discountamt=d.itemcart.additional_discountamt,
discount_price=d.itemcart.discount_price,total_price=d.itemcart.total_price,
trans_amt=d.itemcart.trans_amt,deposit=d.itemcart.deposit,type=type,
itemstatus=d.itemcart.itemstatus,remark=d.itemcart.remark,
discreason_txt=d.itemcart.discreason_txt,focreason=d.itemcart.focreason,
holditemqty=d.itemcart.holditemqty,holdreason=d.itemcart.holdreason,
done_sessions=d.itemcart.done_sessions,treatment_account=d.itemcart.treatment_account,
treatment=d.itemcart.treatment,deposit_account=d.itemcart.deposit_account,
prepaid_account=d.itemcart.prepaid_account)
cart.save()
for s in d.itemcart.sales_staff.all():
cart.sales_staff.add(s)
for se in d.itemcart.service_staff.all():
cart.service_staff.add(se)
for h in d.itemcart.helper_ids.all():
cart.helper_ids.add(h)
for dis in d.itemcart.disc_reason.all():
cart.disc_reason.add(dis)
for po in d.itemcart.pos_disc.all():
cart.pos_disc.add(po)
if cart.pk:
if cart.pk not in cart_lst:
cart_lst.append(cart.pk)
if cart_lst != [] and len(cart_lst) == len(daud_ids):
result = {'status': status.HTTP_200_OK, "message": "Created Successfully", 'error': False,'data':cart_id}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
@action(methods=['post'], detail=False, permission_classes=[IsAuthenticated & authenticated_only],
authentication_classes=[ExpiringTokenAuthentication])
def VoidReturn(self, request):
try:
global type_tx
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
cart_date = timezone.now().date()
cart_id = self.request.GET.get('cart_id',None)
if not cart_id:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Cart ID parms not given!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
#This Transaction already VOID, no permission allow
cartobj_ids = ItemCart.objects.filter(isactive=True,cart_id=cart_id,
sitecode=site.itemsite_code,cart_date=date.today(),cart_status="Inprogress").exclude(type__in=type_tx)
if not cartobj_ids or cartobj_ids is None:
result = {'status': status.HTTP_200_OK,"message":"Cart ID Does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
gst = GstSetting.objects.filter(item_desc='GST',isactive=True).first()
haudobj = PosHaud.objects.filter(cart_id=cart_id,isvoid=False,
ItemSite_Codeid__pk=site.pk,sa_custnoid=cartobj_ids[0].cust_noid).first()
if not haudobj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"sa transacno does not exist in Poshaud!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
daud_ids = PosDaud.objects.filter(sa_transacno=haudobj.sa_transacno,
ItemSite_Codeid__pk=site.pk)
taud_ids = PosTaud.objects.filter(sa_transacno=haudobj.sa_transacno,
ItemSIte_Codeid__pk=site.pk)
multi_ids = Multistaff.objects.filter(sa_transacno=haudobj.sa_transacno)
control_obj = ControlNo.objects.filter(control_description__iexact="Transaction number",Site_Codeid__pk=fmspw[0].loginsite.pk).first()
if not control_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Transaction Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
haudre = PosHaud.objects.filter(ItemSite_Codeid__pk=site.pk).order_by('sa_transacno')
final = list(set([r.sa_transacno for r in haudre]))
# print(final,len(final),"final")
saprefix = control_obj.control_prefix
code_site = site.itemsite_code
lst = []
if final != []:
for f in final:
newstr = f.replace(saprefix,"")
new_str = newstr.replace(code_site, "")
lst.append(new_str)
lst.sort(reverse=True)
# print(lst,"lst")
sa_no = int(lst[0]) + 1
sa_transacno = str(control_obj.control_prefix)+str(control_obj.Site_Codeid.itemsite_code)+str(sa_no)
else:
sa_transacno = str(control_obj.control_prefix)+str(control_obj.Site_Codeid.itemsite_code)+str(control_obj.control_no)
refcontrol_obj = ControlNo.objects.filter(control_description__iexact="Reference VOID No",Site_Codeid__pk=fmspw[0].loginsite.pk).first()
if not refcontrol_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Reference VOID Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
sa_transacno_ref = str(refcontrol_obj.control_prefix)+str(refcontrol_obj.Site_Codeid.itemsite_code)+str(refcontrol_obj.control_no)
poshaud_ids = PosHaud.objects.filter(sa_transacno=sa_transacno,sa_custno=haudobj.sa_custno,
ItemSite_Codeid__pk=site.pk,sa_transacno_ref=sa_transacno_ref)
if poshaud_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"PosHaud Void sa transacno Already Created!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
pos_haud_ids = PosHaud.objects.filter(sa_transacno=sa_transacno,sa_custno=haudobj.sa_custno,
ItemSite_Codeid__pk=site.pk)
if pos_haud_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"PosHaud Void sa transacno Already Created!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
cartnew_ids = ItemCart.objects.filter(isactive=True,cart_date=cart_date,
cart_id=cart_id,cart_status="Completed",is_payment=True,sitecodeid__pk=site.pk,
customercode=cartobj_ids[0].customercode).exclude(type__in=type_tx)
if cartnew_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Invalid Cart ID,Send correct Cart Id,Given Cart ID Payment done for this Customer!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
for ctl in cartnew_ids:
#,itemcart__pk=ctl.pk
pos_daud_ids = PosDaud.objects.filter(sa_transacno=sa_transacno,dt_itemnoid__pk=ctl.itemcodeid.pk,
ItemSite_Codeid__pk=site.pk,dt_lineno=ctl.lineno)
if pos_daud_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"PosDaud Void Already Created!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
voidreason_id = self.request.GET.get('voidreason_id',None)
void_obj = VoidReason.objects.filter(pk=voidreason_id,isactive=True)
if void_obj is None:
result = {'status': status.HTTP_200_OK,"message":"VoidReason ID Does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
finalsatrasc = False
if haudobj.sa_transacno_type in ['Receipt','Non Sales']:
for t in taud_ids:
ids_taud = PosTaud.objects.filter(sa_transacno=sa_transacno,dt_lineno=t.dt_lineno,
ItemSIte_Codeid__pk=site.pk)
# print(ids_taud,"ids_taud")
if ids_taud:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"PosTaud Void Already Created!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
taud = PosTaud(sa_transacno=sa_transacno,pay_groupid=t.pay_groupid,pay_group=t.pay_group,
pay_typeid=t.pay_typeid,pay_type=t.pay_type,pay_desc=t.pay_desc,pay_tendamt=t.pay_tendamt,
pay_tendrate=t.pay_tendrate,pay_tendcurr=t.pay_tendcurr,pay_amt=-t.pay_amt,pay_amtrate=t.pay_amtrate,
pay_amtcurr=t.pay_amtcurr,pay_rem1=t.pay_rem1,pay_rem2=t.pay_rem2,pay_rem3=t.pay_rem3,pay_rem4=t.pay_rem4,
pay_status=t.pay_status,pay_actamt=-t.pay_actamt,ItemSIte_Codeid=t.ItemSIte_Codeid,
itemsite_code=t.itemsite_code,paychange=t.paychange,dt_lineno=t.dt_lineno,
pay_gst_amt_collect=-t.pay_gst_amt_collect,pay_gst=-t.pay_gst,posdaudlineno=t.posdaudlineno,
posdaudlineamountassign=t.posdaudlineamountassign,posdaudlineamountused=t.posdaudlineamountused,
voucher_name=t.voucher_name,billed_by=t.billed_by,subtotal=-t.subtotal,tax=-t.tax,
discount_amt=-t.discount_amt,billable_amount=-t.billable_amount,credit_debit=t.credit_debit,
points=t.points,prepaid=t.prepaid,pay_premise=t.pay_premise,is_voucher=t.is_voucher,
voucher_no=t.voucher_no,voucher_amt=t.voucher_amt)
taud.save()
for m in multi_ids:
multi = Multistaff(sa_transacno=sa_transacno,item_code=m.item_code,emp_code=m.emp_code,
ratio=m.ratio,salesamt=-float("{:.2f}".format(float(m.salesamt))) if m.salesamt else 0,type=m.type,isdelete=m.isdelete,role=m.role,dt_lineno=m.dt_lineno,
level_group_code=m.level_group_code)
multi.save()
for d in daud_ids:
cart_obj = ItemCart.objects.filter(isactive=True,cart_id=cart_id,lineno=d.dt_lineno,
sitecode=site.itemsite_code,cart_date=date.today(),cart_status="Inprogress",
cust_noid=haudobj.sa_custnoid).exclude(type__in=type_tx).first()
topup_outstanding = d.topup_outstanding
if d.itemcart.type == 'Top Up':
topup_outstanding = d.topup_outstanding + d.dt_price
sales = "";service = ""
if cart_obj.sales_staff.all():
for i in cart_obj.sales_staff.all():
if sales == "":
sales = sales + i.display_name
elif not sales == "":
sales = sales +","+ i.display_name
if cart_obj.service_staff.all():
for s in cart_obj.service_staff.all():
if service == "":
service = service + s.display_name
elif not service == "":
service = service +","+ s.display_name
daud = PosDaud(sa_transacno=sa_transacno,dt_status="VT",dt_itemnoid=d.dt_itemnoid,
dt_itemno=d.dt_itemno,dt_itemdesc=d.dt_itemdesc,dt_price=d.dt_price,dt_promoprice="{:.2f}".format(float(d.dt_promoprice)),
dt_amt=-float("{:.2f}".format(float(d.dt_amt))),dt_qty=-d.dt_qty,dt_discamt=-d.dt_discamt if float(d.dt_discamt) > 0.0 else d.dt_discamt,
dt_discpercent=-d.dt_discpercent if float(d.dt_discpercent) > 0.0 else d.dt_discpercent,
dt_discdesc=d.dt_discdesc,dt_discno=d.dt_discno,dt_remark=d.dt_remark,dt_Staffnoid=d.dt_Staffnoid,
dt_staffno=d.dt_staffno,dt_staffname=d.dt_staffname,dt_reason=d.dt_reason,dt_discuser="",
dt_combocode=d.dt_combocode,ItemSite_Codeid=d.ItemSite_Codeid,itemsite_code=d.itemsite_code,
dt_lineno=d.dt_lineno,dt_stockupdate=d.dt_stockupdate,dt_stockremark=d.dt_stockremark,
dt_uom=d.dt_uom,isfoc=d.isfoc,item_remarks=None,next_payment=None,next_appt=None,
dt_transacamt="{:.2f}".format(float(d.dt_transacamt)),dt_deposit=-float("{:.2f}".format(float(d.dt_deposit))) if d.dt_deposit else 0,appt_time=None,hold_item_out=d.hold_item_out,
issue_date=d.issue_date,hold_item=d.hold_item,holditemqty=d.holditemqty,st_ref_treatmentcode='',
item_status_code=d.item_status_code,first_trmt_done=d.first_trmt_done,first_trmt_done_staff_code=d.first_trmt_done_staff_code,
first_trmt_done_staff_name=d.first_trmt_done_staff_name,record_detail_type=d.record_detail_type,
trmt_done_staff_code=d.trmt_done_staff_code,trmt_done_staff_name=d.trmt_done_staff_name,
trmt_done_id=d.trmt_done_id,trmt_done_type=d.trmt_done_type,topup_service_trmt_code=d.topup_service_trmt_code,
topup_product_treat_code=d.topup_product_treat_code,topup_prepaid_trans_code=d.topup_prepaid_trans_code,
topup_prepaid_type_code=d.topup_prepaid_type_code,voucher_link_cust=d.voucher_link_cust,
voucher_no=d.voucher_no,update_prepaid_bonus=d.update_prepaid_bonus,deduct_commission=d.deduct_commission,
deduct_comm_refline=d.deduct_comm_refline,gst_amt_collect=-float("{:.2f}".format(float(d.gst_amt_collect))) if d.gst_amt_collect else 0,
topup_prepaid_pos_trans_lineno=d.topup_prepaid_pos_trans_lineno,open_pp_uid_ref=None,compound_code=d.compound_code,
topup_outstanding=topup_outstanding,t1_tax_code=d.t1_tax_code,t1_tax_amt=d.t1_tax_amt,
t2_tax_code=d.t2_tax_code,t2_tax_amt=d.t2_tax_amt,dt_grossamt=d.dt_grossamt,dt_topup_old_outs_amt=d.dt_topup_old_outs_amt,
dt_topup_new_outs_amt=d.dt_topup_new_outs_amt,dt_td_tax_amt=d.dt_td_tax_amt,earnedpoints=d.earnedpoints,
earnedtype=d.earnedtype,redeempoints=d.redeempoints,redeemtype=d.redeemtype,itemcart=cart_obj,
staffs=sales +" "+"/"+" "+ service)
daud.save()
if int(d.itemcart.itemcodeid.item_div) == 3:
if d.itemcart.type == 'Deposit':
acc_ids = TreatmentAccount.objects.filter(sa_transacno=haudobj.sa_transacno,type='Deposit',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for acc in acc_ids:
TreatmentAccount.objects.filter(pk=acc.pk).update(sa_status="VOID",updated_at=timezone.now())
treat_ids = Treatment.objects.filter(sa_transacno=haudobj.sa_transacno,
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for trt in treat_ids:
Treatment.objects.filter(pk=trt.pk).update(status="Cancel",sa_status="VOID")
sal_acc_ids = TreatmentAccount.objects.filter(sa_transacno=haudobj.sa_transacno,type='Sales',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for sal in sal_acc_ids:
TreatmentAccount.objects.filter(pk=acc.pk).update(description=d.itemcart.itemcodeid.item_name,sa_status="VOID",updated_at=timezone.now())
appt_ids = Appointment.objects.filter(sa_transacno=sal.ref_transacno,
treatmentcode=sal.ref_no,itemsite_code=site.itemsite_code).update(appt_status="Cancelled")
master_ids = Treatment_Master.objects.filter(sa_transacno=sal.ref_transacno,
treatment_code=sal.ref_no,site_code=site.itemsite_code).update(status="Cancel")
elif d.itemcart.type == 'Top Up':
tacc_ids = TreatmentAccount.objects.filter(sa_transacno=haudobj.sa_transacno,type='Top Up',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for ac in tacc_ids:
balance = ac.balance - ac.amount
outstanding = ac.outstanding + ac.amount
TreatmentAccount(Cust_Codeid=ac.Cust_Codeid,cust_code=ac.cust_code,
description=ac.description,ref_no=sa_transacno,type=ac.type,amount=-float("{:.2f}".format(float(ac.amount))) if ac.amount else 0,
balance="{:.2f}".format(float(balance)),user_name=ac.user_name,User_Nameid=ac.User_Nameid,
ref_transacno=ac.ref_transacno,sa_transacno=sa_transacno,qty=-ac.qty,
outstanding="{:.2f}".format(float(outstanding)),deposit=-float("{:.2f}".format(float(ac.deposit))) if ac.deposit else 0,treatment_parentcode=ac.treatment_parentcode,
treatment_code=ac.treatment_code,sa_status="VT",cas_name=ac.cas_name,sa_staffno=ac.sa_staffno,
sa_staffname=ac.sa_staffname,next_paydate=ac.next_paydate,hasduedate=ac.hasduedate,
dt_lineno=ac.dt_lineno,lpackage=ac.lpackage,package_code=ac.package_code,Site_Codeid=ac.Site_Codeid,
site_code=ac.site_code,treat_code=ac.treat_code,focreason=ac.focreason,itemcart=cart_obj).save()
elif d.itemcart.type == 'Sales':
sacc_ids = TreatmentAccount.objects.filter(sa_transacno=haudobj.sa_transacno,type='Sales',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
description = d.itemcart.itemcodeid.item_name+" "+"(Void Transaction by {0})".format(fmspw[0].pw_userlogin)
Treatment.objects.filter(pk=d.itemcart.treatment.pk).update(course=description,status="Open",
trmt_room_code=None,treatment_count_done=0)
for sa in sacc_ids:
master_ids = Treatment_Master.objects.filter(sa_transacno=sa.ref_transacno,
treatment_code=sa.ref_no,site_code=site.itemsite_code).update(status="Cancel")
appt_ids = Appointment.objects.filter(sa_transacno=sa.ref_transacno,
treatmentcode=sa.ref_no,itemsite_code=site.itemsite_code).update(appt_status="Cancelled")
TreatmentAccount.objects.filter(pk=sa.pk).update(sa_status='VOID')
# type__in=('Deposit', 'Top Up')
olacc_ids = TreatmentAccount.objects.filter(ref_transacno=sa.ref_transacno,
treatment_parentcode=sa.treatment_parentcode,cust_code=haudobj.sa_custno,site_code=site.itemsite_code).order_by('id').exclude(type='Sales').last()
TreatmentAccount(Cust_Codeid=sa.Cust_Codeid,cust_code=sa.cust_code,
description=description,ref_no=sa.ref_no,type=sa.type,amount="{:.2f}".format(float(d.itemcart.treatment.unit_amount)),
balance="{:.2f}".format(float(olacc_ids.balance)),user_name=sa.user_name,User_Nameid=sa.User_Nameid,
ref_transacno=sa.ref_transacno,sa_transacno=sa_transacno,qty=sa.qty,
outstanding="{:.2f}".format(float(olacc_ids.outstanding)) if olacc_ids.outstanding else None,deposit=-float("{:.2f}".format(float(sa.deposit))) if sa.deposit else 0,treatment_parentcode=sa.treatment_parentcode,
treatment_code=sa.treatment_code,sa_status="SA",cas_name=fmspw[0].pw_userlogin,sa_staffno=sa.sa_staffno,
sa_staffname=sa.sa_staffname,next_paydate=sa.next_paydate,hasduedate=sa.hasduedate,
dt_lineno=sa.dt_lineno,lpackage=sa.lpackage,package_code=sa.package_code,Site_Codeid=sa.Site_Codeid,
site_code=sa.site_code,treat_code=sa.treat_code,focreason=sa.focreason,itemcart=cart_obj).save()
ihelper_ids = ItemHelper.objects.filter(helper_transacno=haudobj.sa_transacno,site_code=site.itemsite_code)
for hlp in ihelper_ids:
ItemHelper(item_code=hlp.item_code,item_name=hlp.item_name,line_no=hlp.line_no,
sa_transacno=hlp.sa_transacno,amount=-hlp.amount,helper_name=hlp.helper_name,
helper_code=hlp.helper_code,site_code=hlp.site_code,share_amt=-hlp.share_amt,
helper_transacno=sa_transacno,system_remark=hlp.system_remark,
wp1=hlp.wp1,wp2=hlp.wp2,wp3=hlp.wp3,td_type_code=hlp.td_type_code,
td_type_short_desc=hlp.td_type_short_desc).save()
elif int(d.itemcart.itemcodeid.item_div) == 1:
if d.itemcart.type == 'Deposit':
dacc_ids = DepositAccount.objects.filter(sa_transacno=haudobj.sa_transacno,sa_status='SA',type='Deposit',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for depo in dacc_ids:
tpcontrolobj = ControlNo.objects.filter(control_description__iexact="TopUp",Site_Codeid__pk=fmspw[0].loginsite.pk).first()
if not tpcontrolobj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"TopUp Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
tp_code = str(tpcontrolobj.control_prefix)+str(tpcontrolobj.Site_Codeid.itemsite_code)+str(tpcontrolobj.control_no)
balance = depo.balance - depo.amount
desc = "Cancel"+" "+"Product Amount : "+str("{:.2f}".format(float(depo.amount)))
DepositAccount(cust_code=depo.cust_code,type="CANCEL",amount=-float("{:.2f}".format(float(depo.amount))) if depo.amount else 0,balance="{:.2f}".format(float(balance)),
user_name=depo.user_name,qty=depo.qty,outstanding=0.0,deposit="{:.2f}".format(float(depo.deposit)),
cas_name=fmspw[0].pw_userlogin,sa_staffno=depo.sa_staffno,sa_staffname=depo.sa_staffname,
deposit_type=depo.deposit_type,sa_transacno=depo.sa_transacno,description=desc,
sa_status="VT",item_barcode=depo.item_barcode,item_description=depo.item_description,
treat_code=depo.treat_code,void_link=depo.void_link,lpackage=depo.lpackage,
package_code=depo.package_code,dt_lineno=depo.dt_lineno,Cust_Codeid=depo.Cust_Codeid,
Site_Codeid=depo.Site_Codeid,site_code=depo.site_code,Item_Codeid=depo.Item_Codeid,
item_code=depo.item_code,ref_transacno=depo.ref_transacno,ref_productcode=depo.ref_productcode,
ref_code=tp_code).save()
tpcontrolobj.control_no = int(tpcontrolobj.control_no) + 1
tpcontrolobj.save()
# DepositAccount.objects.filter(pk=depo.pk).update(sa_status="VT",item_description="Cancel"+depo.item_description,updated_at=timezone.now())
#ItemBatch
batch_ids = ItemBatch.objects.filter(site_code=site.itemsite_code,
item_code=d.dt_itemnoid.item_code,uom=d.dt_uom).order_by('pk').last()
if batch_ids:
addamt = batch_ids.qty + d.dt_qty
batch_ids.qty = addamt
batch_ids.save()
#Stktrn
stktrn_ids = Stktrn.objects.filter(store_no=site.itemsite_code,
itemcode=d.dt_itemno,item_uom=d.dt_uom,trn_docno=haudobj.sa_transacno,
line_no=d.dt_lineno).last()
currenttime = timezone.now()
post_time = str(currenttime.hour)+str(currenttime.minute)+str(currenttime.second)
amt_add = stktrn_ids.trn_balqty - stktrn_ids.trn_qty
if stktrn_ids:
stktrn_id = Stktrn(trn_no=stktrn_ids.trn_no,post_time=post_time,aperiod=stktrn_ids.aperiod,
itemcode=stktrn_ids.itemcode,store_no=site.itemsite_code,
tstore_no=stktrn_ids.tstore_no,fstore_no=stktrn_ids.fstore_no,trn_docno=sa_transacno,
trn_type="VT",trn_db_qty=stktrn_ids.trn_db_qty,trn_cr_qty=stktrn_ids.trn_cr_qty,
trn_qty=-stktrn_ids.trn_qty,trn_balqty=amt_add,trn_balcst=stktrn_ids.trn_balcst,
trn_amt=stktrn_ids.trn_amt,trn_cost=stktrn_ids.trn_cost,trn_ref=stktrn_ids.trn_ref,
hq_update=stktrn_ids.hq_update,line_no=stktrn_ids.line_no,item_uom=stktrn_ids.item_uom,
item_batch=stktrn_ids.item_batch,mov_type=stktrn_ids.mov_type,item_batch_cost=stktrn_ids.item_batch_cost,
stock_in=stktrn_ids.stock_in,trans_package_line_no=stktrn_ids.trans_package_line_no).save()
elif d.itemcart.type == 'Top Up':
dtacc_ids = DepositAccount.objects.filter(ref_code=haudobj.sa_transacno,sa_status='SA',type='Top Up',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for dt in dtacc_ids:
balance = dt.balance - dt.amount
outstanding = dt.outstanding + dt.amount
DepositAccount(cust_code=dt.cust_code,type=dt.type,amount=-float("{:.2f}".format(float(dt.amount))) if dt.amount else 0,
balance="{:.2f}".format(float(balance)),user_name=dt.user_name,qty=-dt.qty,outstanding="{:.2f}".format(float(outstanding)),
deposit="{:.2f}".format(float(dt.deposit)),cas_name=dt.cas_name,sa_staffno=dt.sa_staffno,
sa_staffname=dt.sa_staffname,deposit_type=dt.deposit_type,sa_transacno=dt.sa_transacno,
description=dt.description,ref_code=sa_transacno,sa_status="VT",item_barcode=dt.item_barcode,
item_description=dt.item_description,treat_code=dt.treat_code,void_link=dt.void_link,
lpackage=dt.lpackage,package_code=dt.package_code,dt_lineno=dt.dt_lineno,Cust_Codeid=dt.Cust_Codeid,
Site_Codeid=dt.Site_Codeid,site_code=dt.site_code,Item_Codeid=dt.Item_Codeid,item_code=dt.item_code,
ref_transacno=dt.ref_transacno,ref_productcode=dt.ref_productcode).save()
elif int(d.itemcart.itemcodeid.item_div) == 5:
if d.itemcart.type == 'Deposit':
pacc_ids = PrepaidAccount.objects.filter(pp_no=haudobj.sa_transacno,sa_status='DEPOSIT',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for pa in pacc_ids:
PrepaidAccount.objects.filter(pk=pa.pk).update(remain=0.0,status=False,sa_status="VT",updated_at=timezone.now(),
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
elif d.itemcart.type == 'Top Up':
ptacc_ids = PrepaidAccount.objects.filter(topup_no=haudobj.sa_transacno,sa_status='TOPUP',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
for pt in ptacc_ids:
PrepaidAccount.objects.filter(pk=pt.pk).update(status=False,updated_at=timezone.now())
remain = pt.remain - pt.topup_amt
outstanding = pt.outstanding + pt.topup_amt
PrepaidAccount(pp_no=pt.pp_no,pp_type=pt.pp_type,pp_desc=pt.pp_desc,exp_date=pt.exp_date,
cust_code=pt.cust_code,cust_name=pt.cust_name,pp_amt=pt.pp_amt,pp_bonus=pt.pp_bonus,
pp_total=pt.pp_total,transac_no=pt.transac_no,item_no=pt.item_no,use_amt=pt.use_amt,
remain=remain,ref1=pt.ref1,ref2=pt.ref2,status=True,site_code=pt.site_code,
sa_status='TOPUP',exp_status=pt.exp_status,voucher_no=pt.voucher_no,isvoucher=pt.isvoucher,
has_deposit=pt.has_deposit,topup_amt=-pt.topup_amt,outstanding=outstanding,
active_deposit_bonus=pt.active_deposit_bonus,topup_no=sa_transacno,topup_date=pt.topup_date,
line_no=pt.line_no,staff_name=pt.staff_name,staff_no=pt.staff_no,pp_type2=pt.pp_type2,
condition_type1=pt.condition_type1,pos_daud_lineno=pt.pos_daud_lineno,mac_uid_ref=pt.mac_uid_ref,
lpackage=pt.lpackage,package_code=pt.package_code,package_code_lineno=pt.package_code_lineno,
prepaid_disc_type=pt.prepaid_disc_type,prepaid_disc_percent=pt.prepaid_disc_percent,
Cust_Codeid=pt.Cust_Codeid,Site_Codeid=pt.Site_Codeid,Item_Codeid=pt.Item_Codeid,
item_code=pt.item_code).save()
elif int(d.itemcart.itemcodeid.item_div) == 4:
if d.itemcart.type == 'Deposit':
voucher_ids = VoucherRecord.objects.filter(sa_transacno=haudobj.sa_transacno,
cust_code=haudobj.sa_custno,site_code=site.itemsite_code).order_by('pk')
for vcc in voucher_ids:
VoucherRecord.objects.filter(pk=vcc.pk).update(value=-vcc.value,updated_at=timezone.now())
h = haudobj
# void_obj[0].reason_desc if void_obj else None
PosHaud.objects.filter(pk=h.pk).update(isvoid=True,void_refno=sa_transacno)
total_outstanding = h.total_outstanding + h.sa_transacamt
haud = PosHaud(cas_name=fmspw[0].pw_userlogin,sa_transacno=sa_transacno,sa_status="VT",
sa_remark=h.sa_remark,sa_totamt=-float("{:.2f}".format(float(h.sa_totamt))),sa_totqty=-h.sa_totqty,sa_totdisc=-float("{:.2f}".format(float(h.sa_totdisc))) if h.sa_totdisc else 0,
sa_totgst=-float("{:.2f}".format(float(h.sa_totgst))) if h.sa_totgst else None,sa_totservice=h.sa_totservice if h.sa_totservice else None,sa_amtret=h.sa_amtret if h.sa_amtret else None,sa_staffnoid=h.sa_staffnoid,
sa_staffno=h.sa_staffno,sa_staffname=h.sa_staffname,sa_custnoid=h.sa_custnoid,sa_custno=h.sa_custno,
sa_custname=h.sa_custname,sa_reason=None,sa_discuser=h.sa_discuser,sa_discno=h.sa_discno,
sa_discdesc=h.sa_discdesc,sa_discvalue=h.sa_discvalue,sa_discamt=-float("{:.2f}".format(float(h.sa_discamt))) if h.sa_discamt else 0,sa_disctotal=-float("{:.2f}".format(float(h.sa_disctotal))) if h.sa_disctotal else 0,
ItemSite_Codeid=h.ItemSite_Codeid,itemsite_code=h.itemsite_code,sa_cardno=h.sa_cardno,seat_no=h.seat_no,
sa_depositamt=-h.sa_depositamt,sa_chargeamt=None,isvoid=True,void_refno=h.sa_transacno,
payment_remarks=h.payment_remarks,next_payment=h.next_payment,next_appt=h.next_appt,
sa_transacamt=h.sa_transacamt,appt_time=h.appt_time,hold_item=h.hold_item,sa_discecard=h.sa_discecard,
holditemqty=h.holditemqty,walkin=h.walkin,cust_sig=h.cust_sig,sa_round="{:.2f}".format(float(h.sa_round)) if h.sa_round else None,
total_outstanding="{:.2f}".format(float(total_outstanding)) if total_outstanding else None,trans_user_login=h.trans_user_login,
trans_user_loginid=h.trans_user_loginid,sa_transacno_ref=sa_transacno_ref,
sa_transacno_type='Void Transaction',cust_sig_path=h.cust_sig_path,sa_transacno_title="VOID",
issuestrans_user_login=h.trans_user_login)
haud.save()
if haud.pk:
control_obj.control_no = int(control_obj.control_no) + 1
control_obj.save()
refcontrol_obj.control_no = int(refcontrol_obj.control_no) + 1
refcontrol_obj.save()
finalsatrasc = haud.sa_transacno
cart_ids = ItemCart.objects.filter(isactive=True,cart_id=cart_id,cart_status="Inprogress",
sitecode=site.itemsite_code,cart_date=date.today(),cust_noid=haudobj.sa_custnoid).exclude(type__in=type_tx)
for cart in cart_ids:
ItemCart.objects.filter(pk=cart.pk).update(cart_status='Completed',quantity=-cart.quantity)
result = {'status': status.HTTP_200_OK, "message": "Created Successfully", 'error': False, 'data':{'sa_transacno':finalsatrasc if finalsatrasc else None}}
return Response(data=result, status=status.HTTP_200_OK)
elif haudobj.sa_transacno_type == 'Redeem Service':
for ta in taud_ids:
taud = PosTaud(sa_transacno=sa_transacno,pay_groupid=ta.pay_groupid,pay_group=ta.pay_group,
pay_typeid=ta.pay_typeid,pay_type=ta.pay_type,pay_desc=ta.pay_desc,pay_tendamt=ta.pay_tendamt,
pay_tendrate=ta.pay_tendrate,pay_tendcurr=ta.pay_tendcurr,pay_amt=ta.pay_amt,pay_amtrate=ta.pay_amtrate,
pay_amtcurr=ta.pay_amtcurr,pay_rem1=ta.pay_rem1,pay_rem2=ta.pay_rem2,pay_rem3=ta.pay_rem3,pay_rem4=ta.pay_rem4,
pay_status=ta.pay_status,pay_actamt=ta.pay_actamt,ItemSIte_Codeid=ta.ItemSIte_Codeid,
itemsite_code=ta.itemsite_code,paychange=ta.paychange,dt_lineno=ta.dt_lineno,pay_gst_amt_collect=ta.pay_gst_amt_collect,
pay_gst=ta.pay_gst,posdaudlineno=ta.posdaudlineno,posdaudlineamountassign=ta.posdaudlineamountassign,
posdaudlineamountused=ta.posdaudlineamountused,voucher_name=ta.voucher_name,pp_bal=ta.pp_bal,
billed_by=ta.billed_by,subtotal=ta.subtotal,tax=ta.tax,discount_amt=ta.discount_amt,
billable_amount=ta.billable_amount,credit_debit=ta.credit_debit,points=ta.points,prepaid=ta.prepaid,
pay_premise=ta.pay_premise,is_voucher=ta.is_voucher,voucher_no=ta.voucher_no,voucher_amt=ta.voucher_amt)
taud.save()
for m in multi_ids:
multi = Multistaff(sa_transacno=sa_transacno,item_code=m.item_code,emp_code=m.emp_code,
ratio=m.ratio,salesamt=-float("{:.2f}".format(float(m.salesamt))) if m.salesamt else 0,type=m.type,isdelete=m.isdelete,role=m.role,dt_lineno=m.dt_lineno,
level_group_code=m.level_group_code)
multi.save()
for da in daud_ids:
if float(da.dt_discpercent) > 0.0:
dt_discpercent = -da.dt_discpercent
else:
dt_discpercent = da.dt_discpercent
cart_obj = ItemCart.objects.filter(isactive=True,cart_id=cart_id,lineno=da.dt_lineno,
sitecode=site.itemsite_code,cart_date=date.today(),cart_status="Inprogress",
cust_noid=haudobj.sa_custnoid).exclude(type__in=type_tx).first()
sales = "";service = ""
if cart_obj.sales_staff.all():
for i in cart_obj.sales_staff.all():
if sales == "":
sales = sales + i.display_name
elif not sales == "":
sales = sales +","+ i.display_name
if cart_obj.service_staff.all():
for s in cart_obj.service_staff.all():
if service == "":
service = service + s.display_name
elif not service == "":
service = service +","+ s.display_name
daud = PosDaud(sa_transacno=sa_transacno,dt_status="VT",dt_itemnoid=da.dt_itemnoid,dt_itemno=da.dt_itemno,
dt_itemdesc=da.dt_itemdesc,dt_price=da.dt_price,dt_promoprice="{:.2f}".format(float(da.dt_promoprice)),dt_amt=-float("{:.2f}".format(float(da.dt_amt))) if da.dt_amt else 0,
dt_qty=-da.dt_qty,dt_discamt=-da.dt_discamt if float(da.dt_discamt) > 0.0 else da.dt_discamt,
dt_discpercent=-da.dt_discpercent if float(da.dt_discpercent) > 0.0 else da.dt_discpercent,dt_discdesc=da.dt_discdesc,
dt_discno=da.dt_discno,dt_remark=da.dt_remark,dt_Staffnoid=da.dt_Staffnoid,dt_staffno=da.dt_staffno,
dt_staffname=da.dt_staffname,dt_reason=da.dt_reason,dt_discuser=da.dt_discuser,dt_combocode=da.dt_combocode,
ItemSite_Codeid=da.ItemSite_Codeid,itemsite_code=da.itemsite_code,dt_lineno=da.dt_lineno,
dt_stockupdate=da.dt_stockupdate,dt_stockremark=da.dt_stockremark,dt_uom=da.dt_uom,isfoc=da.isfoc,
item_remarks=da.item_remarks,next_payment=da.next_payment,next_appt=da.next_appt,dt_transacamt="{:.2f}".format(float(da.dt_transacamt)),
dt_deposit=-float("{:.2f}".format(float(da.dt_deposit))) if da.dt_deposit else 0,appt_time=da.appt_time,hold_item_out=da.hold_item_out,issue_date=da.issue_date,
hold_item=da.hold_item,holditemqty=da.holditemqty,st_ref_treatmentcode=da.st_ref_treatmentcode,
item_status_code=da.item_status_code,first_trmt_done=da.first_trmt_done,
first_trmt_done_staff_code=da.first_trmt_done_staff_code,first_trmt_done_staff_name=da.first_trmt_done_staff_name,
record_detail_type=da.record_detail_type,trmt_done_staff_code=da.trmt_done_staff_code,trmt_done_staff_name=da.trmt_done_staff_name,
trmt_done_id=da.trmt_done_id,trmt_done_type=da.trmt_done_type,topup_service_trmt_code=da.topup_service_trmt_code,
topup_product_treat_code=da.topup_product_treat_code,topup_prepaid_trans_code=da.topup_prepaid_trans_code,
topup_prepaid_type_code=da.topup_prepaid_type_code,voucher_link_cust=da.voucher_link_cust,
voucher_no=da.voucher_no,update_prepaid_bonus=da.update_prepaid_bonus,deduct_commission=da.deduct_commission,
deduct_comm_refline=da.deduct_comm_refline,gst_amt_collect=-float("{:.2f}".format(float(da.gst_amt_collect))) if da.gst_amt_collect else 0,
topup_prepaid_pos_trans_lineno=da.topup_prepaid_pos_trans_lineno,open_pp_uid_ref=None,compound_code=da.compound_code,
topup_outstanding=da.topup_outstanding,t1_tax_code=da.t1_tax_code,t1_tax_amt=da.t1_tax_amt,
t2_tax_code=da.t2_tax_code,t2_tax_amt=da.t2_tax_amt,dt_grossamt=da.dt_grossamt,dt_topup_old_outs_amt=da.dt_topup_old_outs_amt,
dt_topup_new_outs_amt=da.dt_topup_new_outs_amt,dt_td_tax_amt=da.dt_td_tax_amt,earnedpoints=da.earnedpoints,
earnedtype=da.earnedtype,redeempoints=da.redeempoints,redeemtype=da.redeemtype,itemcart=cart_obj,
staffs=sales +" "+"/"+" "+ service)
daud.save()
if int(da.itemcart.itemcodeid.item_div) == 3:
if da.itemcart.type == 'Sales':
sacc_ids = TreatmentAccount.objects.filter(sa_transacno=haudobj.sa_transacno,type='Sales',
cust_code=haudobj.sa_custno,site_code=site.itemsite_code)
description = da.itemcart.itemcodeid.item_name+" "+"(Void Transaction by {0})".format(fmspw[0].pw_userlogin)
Treatment.objects.filter(pk=da.itemcart.treatment.pk).update(course=description,status="Open",
trmt_room_code=None,treatment_count_done=0)
for sa in sacc_ids:
master_ids = Treatment_Master.objects.filter(sa_transacno=sa.ref_transacno,
treatment_code=sa.ref_no,site_code=site.itemsite_code).update(status="Cancel")
appt_ids = Appointment.objects.filter(sa_transacno=sa.ref_transacno,
treatmentcode=sa.ref_no,itemsite_code=site.itemsite_code).update(appt_status="Cancelled")
TreatmentAccount.objects.filter(pk=sa.pk).update(sa_status='VOID')
# type__in=('Deposit', 'Top Up')
olacc_ids = TreatmentAccount.objects.filter(ref_transacno=sa.ref_transacno,
treatment_parentcode=sa.treatment_parentcode,cust_code=haudobj.sa_custno,site_code=site.itemsite_code).order_by('id').exclude(type='Sales').last()
TreatmentAccount(Cust_Codeid=sa.Cust_Codeid,cust_code=sa.cust_code,
description=description,ref_no=sa.ref_no,type=sa.type,amount="{:.2f}".format(float(da.itemcart.treatment.unit_amount)),
balance="{:.2f}".format(float(olacc_ids.balance)),user_name=sa.user_name,User_Nameid=sa.User_Nameid,
ref_transacno=sa.ref_transacno,sa_transacno=sa_transacno,qty=sa.qty,
outstanding="{:.2f}".format(float(olacc_ids.outstanding)) if olacc_ids.outstanding else None,deposit=-float("{:.2f}".format(float(sa.deposit))) if sa.deposit else 0,treatment_parentcode=sa.treatment_parentcode,
treatment_code=sa.treatment_code,sa_status="SA",cas_name=fmspw[0].pw_userlogin,sa_staffno=sa.sa_staffno,
sa_staffname=sa.sa_staffname,next_paydate=sa.next_paydate,hasduedate=sa.hasduedate,
dt_lineno=sa.dt_lineno,lpackage=sa.lpackage,package_code=sa.package_code,Site_Codeid=sa.Site_Codeid,
site_code=sa.site_code,treat_code=sa.treat_code,focreason=sa.focreason,itemcart=cart_obj).save()
ihelper_ids = ItemHelper.objects.filter(helper_transacno=haudobj.sa_transacno,
site_code=site.itemsite_code)
for hlp in ihelper_ids:
ItemHelper(item_code=hlp.item_code,item_name=hlp.item_name,line_no=hlp.line_no,
sa_transacno=hlp.sa_transacno,amount=-float("{:.2f}".format(float(hlp.amount))) if hlp.amount else 0,helper_name=hlp.helper_name,
helper_code=hlp.helper_code,site_code=hlp.site_code,share_amt=-hlp.share_amt,
helper_transacno=sa_transacno,system_remark=hlp.system_remark,
wp1=hlp.wp1,wp2=hlp.wp2,wp3=hlp.wp3,td_type_code=hlp.td_type_code,
td_type_short_desc=hlp.td_type_short_desc).save()
h = haudobj
# void_obj[0].reason_desc if void_obj else None
PosHaud.objects.filter(pk=h.pk).update(isvoid=True,void_refno=sa_transacno)
haud = PosHaud(cas_name=fmspw[0].pw_userlogin,sa_transacno=sa_transacno,sa_status="VT",
sa_remark=h.sa_remark,sa_totamt="{:.2f}".format(float(h.sa_totamt)),sa_totqty=h.sa_totqty,sa_totdisc="{:.2f}".format(float(h.sa_totdisc)) if h.sa_totdisc else 0,
sa_totgst="{:.2f}".format(float(h.sa_totgst)) if h.sa_totgst else 0,sa_totservice="{:.2f}".format(float(h.sa_totservice)) if h.sa_totservice else 0,sa_amtret="{:.2f}".format(float(h.sa_amtret)) if h.sa_amtret else 0 ,sa_staffnoid=h.sa_staffnoid,
sa_staffno=h.sa_staffno,sa_staffname=h.sa_staffname,sa_custnoid=h.sa_custnoid,sa_custno=h.sa_custno,
sa_custname=h.sa_custname,sa_reason=None,sa_discuser=h.sa_discuser,sa_discno=h.sa_discno,
sa_discdesc=h.sa_discdesc,sa_discvalue=h.sa_discvalue,sa_discamt="{:.2f}".format(float(h.sa_discamt)) if h.sa_discamt else 0,sa_disctotal="{:.2f}".format(float(h.sa_disctotal)) if h.sa_disctotal else 0,
ItemSite_Codeid=h.ItemSite_Codeid,itemsite_code=h.itemsite_code,sa_cardno=h.sa_cardno,seat_no=h.seat_no,
sa_depositamt="{:.2f}".format(float(h.sa_depositamt)) if h.sa_depositamt else 0,sa_chargeamt=None,isvoid=True,void_refno=h.sa_transacno,
payment_remarks=h.payment_remarks,next_payment=h.next_payment,next_appt=h.next_appt,
sa_transacamt="{:.2f}".format(float(h.sa_transacamt)) if h.sa_transacamt else 0,appt_time=h.appt_time,hold_item=h.hold_item,sa_discecard=h.sa_discecard,
holditemqty=h.holditemqty,walkin=h.walkin,cust_sig=h.cust_sig,sa_round="{:.2f}".format(float(h.sa_round)) if h.sa_round else 0,
total_outstanding="{:.2f}".format(float(h.total_outstanding)) if h.total_outstanding else 0,trans_user_login=h.trans_user_login,
trans_user_loginid=h.trans_user_loginid,sa_transacno_ref=sa_transacno_ref,
sa_transacno_type='Void Transaction',cust_sig_path=h.cust_sig_path,sa_transacno_title="VOID",
issuestrans_user_login=fmspw[0].pw_userlogin)
haud.save()
if haud.pk:
control_obj.control_no = int(control_obj.control_no) + 1
control_obj.save()
refcontrol_obj.control_no = int(refcontrol_obj.control_no) + 1
refcontrol_obj.save()
finalsatrasc = haud.sa_transacno
cart_ids = ItemCart.objects.filter(isactive=True,cart_id=cart_id,cart_status="Inprogress",
sitecode=site.itemsite_code,cart_date=date.today(),cust_noid=haudobj.sa_custnoid).exclude(type__in=type_tx)
for cart in cart_ids:
ItemCart.objects.filter(pk=cart.pk).update(cart_status='Completed',quantity=-cart.quantity)
result = {'status': status.HTTP_200_OK, "message": "Created Successfully", 'error': False,
'data':{'sa_transacno':finalsatrasc if finalsatrasc else None}}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def sa_transacno_update_void(self, site, fmspw):
sacontrol_obj = ControlNo.objects.filter(control_description__iexact="Transaction number",Site_Codeid__pk=fmspw.loginsite.pk).first()
if not sacontrol_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Transaction Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
haudre = PosHaud.objects.filter(ItemSite_Codeid__pk=site.pk).order_by('sa_transacno')
haudfinal = list(set([r.sa_transacno for r in haudre]))
code_site = site.itemsite_code
prefix_s = sacontrol_obj.control_prefix
slst = []
if haudfinal != []:
for fh in haudfinal:
fhstr = fh.replace(prefix_s,"")
fhnew_str = fhstr.replace(code_site, "")
slst.append(fhnew_str)
slst.sort(reverse=True)
# print(slst,"slst")
sa_id = int(slst[0]) + 1
sacontrol_obj.control_no = str(sa_id)
sacontrol_obj.save()
return True
class VoidCheck(generics.ListAPIView):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = VoidListSerializer
def list(self, request):
try:
if str(self.request.GET.get('cust_id',None)) != "undefined":
if isinstance(int(self.request.GET.get('cust_id',None)), int):
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
cust_id = self.request.GET.get('cust_id',None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id',None),cust_isactive=True).first()
if cust_obj is None:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please give customer id!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if str(self.request.GET.get('cust_id',None)) == "undefined":
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Please select customer!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
control_obj = ControlNo.objects.filter(control_description__iexact="ITEM CART",Site_Codeid__pk=fmspw.loginsite.pk).first()
if not control_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Item Cart Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
# poshdr_id = self.request.GET.get('poshdr_id',None)
# queryset = ItemCart.objects.filter(isactive=True,cart_date=date.today(),customercode=cust_obj.cust_code,
# sitecode=site.itemsite_code,cart_status="Inprogress",is_payment=False).exclude(type__in=type_tx)
#sa_date__date=date.today()
queryset = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
isvoid=False,ItemSite_Codeid__pk=site.pk).only('sa_custno','isvoid','cart_id',
'itemsite_code').exclude(cart_id=None).order_by('pk')
# print(queryset,"queryset")
oldidcart = list(set([q.cart_id for q in queryset if q.cart_id]))
# print(oldidcart,"oldidcart")
old_cart_ids = ItemCart.objects.filter(customercode=cust_obj.cust_code,
cart_id__in=oldidcart,sitecode=site.itemsite_code,isactive=True,
cart_status="Inprogress").filter(~Q(cart_date=date.today())).exclude(type__in=type_tx).order_by('pk')
todidcart = list(set([t.cart_id for t in old_cart_ids if t.cart_id]))
# print(todidcart,"todidcart")
if queryset:
if len(queryset) >= 1:
#previous record
query_set = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
isvoid=False,ItemSite_Codeid__pk=site.pk,cart_id__in=todidcart).only('sa_custno','isvoid',
'cart_id','itemsite_code').exclude(cart_id=None).order_by('pk')
# print(query_set,"query_set")
for q in query_set:
# active / Inactive , not today rec
q.cart_id = None
q.save()
idscart = ItemCart.objects.filter(customercode=cust_obj.cust_code,
cart_id__in=todidcart,sitecode=site.itemsite_code,
cart_status="Inprogress").filter(~Q(cart_date=date.today())).exclude(type__in=type_tx).delete()
# print(idscart,"idscart")
#today record
querysetafter = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
isvoid=False,ItemSite_Codeid__pk=site.pk).only('sa_custno','isvoid','cart_id',
'itemsite_code').exclude(cart_id=None).order_by('pk')
idcart = list(set([e.cart_id for e in querysetafter if e.cart_id]))
idscart_ids = ItemCart.objects.filter(customercode=cust_obj.cust_code,
cart_id__in=idcart,sitecode=site.itemsite_code,cart_date=date.today(),
isactive=True,cart_status="Inprogress").exclude(type__in=type_tx).order_by('pk')
# print(idscart_ids,"idscart_ids")
if len(querysetafter) > 1:
if idscart_ids:
lastrec = idscart_ids.last()
# print(lastrec,"lastrec")
del_query_set = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
isvoid=False,ItemSite_Codeid__pk=site.pk).only('sa_custno','isvoid',
'cart_id','itemsite_code').filter(~Q(cart_id=lastrec.cart_id)).exclude(cart_id=None).order_by('pk')
# print(del_query_set,"del_query_set")
for dq in del_query_set:
dq.cart_id = None
dq.save()
idscart = ItemCart.objects.filter(customercode=cust_obj.cust_code,
cart_id=dq.cart_id,sitecode=site.itemsite_code,cart_date=date.today(),
cart_status="Inprogress").filter(~Q(cart_id=lastrec.cart_id)).exclude(type__in=type_tx).delete()
cartre = ItemCart.objects.filter(sitecodeid=site).order_by('cart_id')
final = list(set([r.cart_id for r in cartre]))
code_site = site.itemsite_code
prefix = control_obj.control_prefix
clst = []
if final != []:
for f in final:
newstr = f.replace(prefix,"")
new_str = newstr.replace(code_site, "")
clst.append(new_str)
clst.sort(reverse=True)
# print(clst,"clst")
cart_id = int(clst[0]) + 1
control_obj.control_no = str(cart_id)
control_obj.save()
savalue = sa_transacno_update_void(self, site, fmspw)
last_rec = idscart_ids.last()
if last_rec:
ids_cart_ids = ItemCart.objects.filter(customercode=cust_obj.cust_code,
cart_id=last_rec.cart_id,sitecode=site.itemsite_code,cart_date=date.today(),
isactive=True,cart_status="Inprogress").exclude(type__in=type_tx).order_by('pk')
# print(ids_cart_ids,"ids_cart_ids")
if ids_cart_ids:
finalquery = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
isvoid=False,ItemSite_Codeid__pk=site.pk,cart_id=last_rec.cart_id).only('sa_custno','isvoid','cart_id',
'itemsite_code').exclude(cart_id=None).order_by('pk')
if finalquery:
serializer = VoidListSerializer(finalquery, many=True)
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': serializer.data}
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_200_OK,"message":"No Data",'error': False, "data":[]}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class VoidCancel(generics.CreateAPIView):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = VoidCancelSerializer
def create(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
serializer = self.get_serializer(data=request.data)
# print(serializer,"serializer")
if serializer.is_valid():
# print(request.data,"request.data")
cart_id = request.data['cart_id']
# print(cart_id,"cart_id")
if cart_id:
# print(cart_id,"cart_id")
bro_ids = ItemCart.objects.filter(cart_id=cart_id,sitecode=site.itemsite_code,
cart_status="Inprogress",cart_date=date.today())
if bro_ids:
queryset = PosHaud.objects.filter(cart_id=cart_id,sa_custno=bro_ids[0].customercode,
isvoid=False,ItemSite_Codeid__pk=site.pk).only('isvoid','cart_id',
'sa_custno','itemsite_code').exclude(cart_id=None).order_by('pk')
if queryset:
queryset[0].cart_id = None
queryset[0].save()
#cart_date=date.today()
ids_cart = ItemCart.objects.filter(cart_id=cart_id,cust_noid=bro_ids[0].cust_noid,
sitecode=site.itemsite_code,cart_status="Inprogress").exclude(type__in=type_tx).delete()
result = {'status': status.HTTP_200_OK,"message":"Void Cancelled Successfully",'error': False}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"ItemCart is not in Inprogress so cant delete!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
else:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Invalid Cart ID !!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
else:
data = serializer.errors
result = {'status': status.HTTP_400_BAD_REQUEST,"message":data['cart_id'][0],'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class VoidReasonViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
queryset = VoidReason.objects.filter(isactive=True).order_by('pk')
serializer_class = VoidReasonSerializer
def list(self, request):
try:
queryset = self.filter_queryset(self.get_queryset())
if queryset:
serializer = self.get_serializer(queryset, many=True)
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False, 'data': serializer.data}
else:
serializer = self.get_serializer()
result = {'status': status.HTTP_204_NO_CONTENT,"message":"No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class TreatmentAccListViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = TreatmentAccSerializer
def list(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)[0]
site = fmspw.loginsite
cust_id = self.request.GET.get('cust_id',None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id',None),cust_isactive=True).only('pk','cust_isactive').first()
if not cust_obj:
result = {'status': status.HTTP_200_OK,"message":"Please give customer id!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
if self.request.GET.get('year',None):
year = self.request.GET.get('year',None)
if year != "All":
queryset = TreatmentAccount.objects.filter(site_code=fmspw.loginsite.itemsite_code,cust_code=cust_obj.cust_code,sa_date__year=year,type='Deposit').only('site_code','cust_code','sa_date','type').order_by('pk')
else:
queryset = TreatmentAccount.objects.filter(site_code=fmspw.loginsite.itemsite_code,cust_code=cust_obj.cust_code,type='Deposit').only('site_code','cust_code','type').order_by('pk')
else:
result = {'status': status.HTTP_200_OK,"message":"Please give year!!",'error': True}
return Response(data=result, status=status.HTTP_200_OK)
if queryset:
serializer = self.get_serializer(queryset, many=True)
lst = []; id_lst = []; balance = 0; outstanding = 0
for data in serializer.data:
trobj = TreatmentAccount.objects.filter(pk=data["id"]).first()
# trmids = Treatment.objects.filter(treatment_account__pk=trobj.pk,site_code=site.itemsite_code).only('treatment_account').first()
trmids = Treatment.objects.filter(treatment_parentcode=trobj.treatment_parentcode,
site_code=site.itemsite_code).only('treatment_parentcode').first()
# print(data,"data")
if data["id"] not in id_lst:
id_lst.append(data["id"])
# pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
# sa_transacno=trobj.sa_transacno,sa_transacno_type='Receipt',
# itemsite_code=fmspw.loginsite.itemsite_code).only('sa_custno','sa_transacno','sa_transacno_type').order_by('pk').first()
# sa_transacno_type__in=['Receipt','NON SALES']
pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
sa_transacno=trobj.sa_transacno,itemsite_code=fmspw.loginsite.itemsite_code
).only('sa_custno','sa_transacno').order_by('pk').first()
if pos_haud:
data['transaction'] = pos_haud.sa_transacno_ref if pos_haud.sa_transacno_ref else ""
if pos_haud.sa_date:
splt = str(pos_haud.sa_date).split(" ")
data['sa_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%m-%Y")
data['description'] = ""
if trmids:
if trmids.course:
data['description'] = trmids.course
sumacc_ids = TreatmentAccount.objects.filter(ref_transacno=trobj.sa_transacno,
treatment_parentcode=data["treatment_parentcode"],site_code=trobj.site_code,
type__in=('Deposit', 'Top Up')).only('ref_transacno','treatment_parentcode','site_code','type').order_by('pk').aggregate(Sum('balance'))
if sumacc_ids:
data["payment"] = "{:.2f}".format(float(sumacc_ids['balance__sum']))
else:
data["payment"] = "0.00"
acc_ids = TreatmentAccount.objects.filter(ref_transacno=trobj.sa_transacno,
treatment_parentcode=data["treatment_parentcode"],site_code=trobj.site_code
).only('ref_transacno','treatment_parentcode','site_code').last()
if acc_ids.balance:
data["balance"] = "{:.2f}".format(float(acc_ids.balance))
balance += acc_ids.balance
else:
data["balance"] = "0.00"
if acc_ids.outstanding:
data["outstanding"] = "{:.2f}".format(float(acc_ids.outstanding))
outstanding += acc_ids.outstanding
else:
data["outstanding"] = "0.00"
lst.append(data)
if lst != []:
header_data = {"balance" : "{:.2f}".format(float(balance)),
"outstanding" : "{:.2f}".format(float(outstanding)), "treatment_count" : len(id_lst)}
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False,
'header_data':header_data, 'data': lst}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return TreatmentAccount.objects.get(pk=pk)
except TreatmentAccount.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
account = self.get_object(pk)
queryset = TreatmentAccount.objects.filter(ref_transacno=account.sa_transacno,
treatment_parentcode=account.treatment_parentcode,site_code=account.site_code
).only('ref_transacno','treatment_parentcode','site_code').order_by('pk')
if queryset:
last = queryset.last()
serializer = self.get_serializer(queryset, many=True)
for v in serializer.data:
v.pop('payment')
if v['sa_date']:
splt = str(v['sa_date']).split('T')
v['sa_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%b-%y")
trobj = TreatmentAccount.objects.filter(pk=v["id"]).only('pk').first()
v['type'] = trobj.type
if trobj.amount:
v["amount"] = "{:.2f}".format(float(trobj.amount))
else:
v["amount"] = "0.00"
if v["balance"]:
v["balance"] = "{:.2f}".format(float(v['balance']))
else:
v["balance"] = "0.00"
if v["outstanding"]:
v["outstanding"] = "{:.2f}".format(float(v['outstanding']))
else:
v["outstanding"] = "0.00"
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False,
'header_data':{'credit_balance':last.balance if last.balance else "0.00",
'outstanding_balance':last.outstanding if last.outstanding else "0.00"},
'data': serializer.data}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class CreditNoteListViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = CreditNoteSerializer
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id', None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id', None),cust_isactive=True).only('pk','cust_isactive').first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK, "message": "Customer ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user, pw_isactive=True)
site = fmspw[0].loginsite
is_all = self.request.GET.get('is_all', None)
if is_all:
queryset = CreditNote.objects.filter(cust_code=cust_obj.cust_code,site_code=site.itemsite_code).only('cust_code').order_by('pk')
else:
queryset = CreditNote.objects.filter(cust_code=cust_obj.cust_code, status='OPEN',site_code=site.itemsite_code).only('cust_code','status').order_by('pk')
if queryset:
serializer = self.get_serializer(queryset, many=True)
lst = []
for data in serializer.data:
if data['sa_date']:
splt = str(data['sa_date']).split('T')
data['sa_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d/%m/%Y")
crdobj = CreditNote.objects.filter(pk=data["id"]).first()
# sa_transacno_type='Receipt',
pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code, sa_transacno=crdobj.sa_transacno,
itemsite_code=site.itemsite_code).order_by('pk').first()
if pos_haud:
data['transaction'] = pos_haud.sa_transacno_ref
else:
data['transaction'] = ""
if data["amount"]:
data["amount"] = "{:.2f}".format(float(data['amount']))
else:
data["amount"] = "0.00"
if data["balance"]:
data["balance"] = "{:.2f}".format(float(data['balance']))
else:
data["balance"] = "0.00"
lst.append(data)
result = {'status': status.HTTP_200_OK, "message": "Listed Succesfully", 'error': False, 'data': lst}
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return CreditNote.objects.get(pk=pk)
except CreditNote.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
creditnote = self.get_object(pk)
serializer = CreditNoteAdjustSerializer(creditnote,context={'request': self.request})
adjustamt = 0.00
val = serializer.data
data = {'id': val['id'],'credit_code': val['credit_code'],'balance': val['balance'],
'new_balance': val['new_balance'],'refund_amt': val['refund_amt'],
'adjust_amount':"{:.2f}".format(float(adjustamt))}
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False,
'data': data}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def partial_update(self, request, pk=None):
try:
fmspw = Fmspw.objects.filter(user=self.request.user, pw_isactive=True)
site = fmspw[0].loginsite
new_balance = self.request.data.get('new_balance', None)
refund_amt = self.request.data.get('refund_amt', None)
if new_balance is None and refund_amt is None:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Please give New Balance or refund amount!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
creditnt = self.get_object(pk)
#front calculation
#adjust_amount = new_balance - creditnt.balance
balance = creditnt.balance
serializer = CreditNoteAdjustSerializer(creditnt, data=request.data, partial=True, context={'request': self.request})
if serializer.is_valid():
if float(new_balance) == float(refund_amt):
if float(new_balance) == balance:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "New Balance and Refund Amt, Existing credit note Balance should not be same!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if float(refund_amt) > float(balance):
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Refund Amt Should not be greater than new balance!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
control_obj = ControlNo.objects.filter(control_description__iexact="Refund CN",Site_Codeid__pk=site.pk).first()
if not control_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Refund CN Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
refund_code = str(control_obj.control_prefix)+str(control_obj.Site_Codeid.itemsite_code)+str(control_obj.control_no)
adjustamt = float(balance) - float(new_balance)
if not refund_amt is None and float(refund_amt) > 0.00:
amount = refund_amt
elif not refund_amt is None and float(refund_amt) == 0.00:
amount = 0.00
# print(amount,balance,adjustamt,new_balance,"daa")
cn_refund = CnRefund.objects.create(rfn_trans_no=refund_code,cn_no=creditnt.credit_code,
site_code=site.itemsite_code,amount=amount,staff_code=fmspw[0].emp_code,transac_no=creditnt.sa_transacno,
rfn_before_amt=balance,rfn_adjust_amt=adjustamt,rfn_new_amt=float(new_balance),
rfn_date=timezone.now())
if cn_refund.pk:
control_obj.control_no = int(control_obj.control_no) + 1
control_obj.save()
if not new_balance is None and float(new_balance) > 0.00:
serializer.save(balance=new_balance)
elif not new_balance is None and float(new_balance) == 0.00:
serializer.save(balance=new_balance,status="CLOSE")
result = {'status': status.HTTP_200_OK,"message":"Updated Succesfully",'error': False}
return Response(result, status=status.HTTP_200_OK)
result = {'status': status.HTTP_400_BAD_REQUEST,"message":serializer.errors,'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ProductAccListViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = ProductAccSerializer
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id', None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id', None),cust_isactive=True).only('pk','cust_isactive').first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK, "message": "Customer ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user, pw_isactive=True)[0]
site = fmspw.loginsite
queryset = DepositAccount.objects.filter(site_code=site.itemsite_code,cust_code=cust_obj.cust_code,
type='Deposit').only('site_code','cust_code','type').order_by('pk')
if queryset:
serializer = self.get_serializer(queryset, many=True)
lst = []; id_lst = []; balance = 0; outstanding = 0; hold_qty = 0
for data in serializer.data:
depobj = DepositAccount.objects.filter(pk=data["id"]).only('pk').first()
if data["id"]:
id_lst.append(data["id"])
# sa_transacno_type='Receipt',ItemSite_Codeid__pk=site.pk,
pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
sa_transacno=depobj.sa_transacno,itemsite_code=site.itemsite_code,
).only('sa_custno','sa_transacno','itemsite_code').order_by('pk').first()
if pos_haud:
data['transaction'] = pos_haud.sa_transacno_ref if pos_haud.sa_transacno_ref else ""
if pos_haud.sa_date:
splt = str(pos_haud.sa_date).split(" ")
data['sa_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%b-%y")
if not data['package_code']:
data['package_code'] = ""
acc_ids = DepositAccount.objects.filter(sa_transacno=depobj.sa_transacno,
site_code=depobj.site_code,ref_productcode=depobj.ref_productcode
).only('sa_transacno','site_code','ref_productcode').last()
if acc_ids.balance:
data["balance"] = "{:.2f}".format(float(acc_ids.balance))
balance += acc_ids.balance
else:
data["balance"] = "0.00"
if acc_ids.outstanding:
data["outstanding"] = "{:.2f}".format(float(acc_ids.outstanding))
outstanding += acc_ids.outstanding
else:
data["outstanding"] = "0.00"
holdids = Holditemdetail.objects.filter(sa_transacno=depobj.sa_transacno,
itemno=depobj.item_barcode,itemsite_code=site.itemsite_code,
sa_custno=cust_obj.cust_code).only('sa_transacno','itemno').last()
if holdids:
data['item_status'] = holdids.status if holdids.status else ""
hold_qty += holdids.holditemqty
data["hold_qty"] = holdids.holditemqty
data['hold_id'] = holdids.pk
else:
data['item_status'] = ""
data["hold_qty"] = ""
data['hold_id'] = ""
lst.append(data)
if lst != []:
header_data = {"balance" : "{:.2f}".format(float(balance)), "totalholdqty" : hold_qty,
"outstanding" : "{:.2f}".format(float(outstanding)), "totalproduct_count" : len(id_lst)}
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False,
'header_data':header_data, 'data': lst}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return DepositAccount.objects.get(pk=pk)
except DepositAccount.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
account = self.get_object(pk)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
queryset = DepositAccount.objects.filter(sa_transacno=account.sa_transacno,
site_code=account.site_code,ref_productcode=account.ref_productcode).only('sa_transacno',
'site_code','ref_productcode').order_by('pk')
if queryset:
hold_qty = 0
last = queryset.last()
holdids = Holditemdetail.objects.filter(sa_transacno=account.sa_transacno,
itemno=account.item_barcode,itemsite_code=site.itemsite_code,
sa_custno=account.cust_code).only('sa_transacno','itemno','itemsite_code','sa_custno').first()
if holdids:
hold_qty += holdids.holditemqty
serializer = self.get_serializer(queryset, many=True)
for v in serializer.data:
v.pop('package_code');v.pop('item_description')
if v['sa_date']:
splt = str(v['sa_date']).split('T')
v['sa_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%b-%y")
depobj = DepositAccount.objects.filter(pk=v["id"]).first()
v['description'] = depobj.description # transaction
v['type'] = depobj.type #treatment
if depobj.amount:
v["payment"] = "{:.2f}".format(float(depobj.amount))
else:
v["payment"] = "0.00"
if v["balance"]:
v["balance"] = "{:.2f}".format(float(v['balance']))
else:
v["balance"] = "0.00"
if v["outstanding"]:
v["outstanding"] = "{:.2f}".format(float(v['outstanding']))
else:
v["outstanding"] = "0.00"
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False,
'header_data':{'credit_balance':"{:.2f}".format(float(last.balance)) if last.balance else "0.00",
'outstanding_balance':"{:.2f}".format(float(last.outstanding)) if last.outstanding else "0.00",
"totalholdqty" : hold_qty},
'data': serializer.data}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class PrepaidAccListViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = PrepaidAccSerializer
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id', None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id', None),cust_isactive=True).only('pk','cust_isactive').first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK, "message": "Customer ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user, pw_isactive=True)[0]
site = fmspw.loginsite
is_all = self.request.GET.get('is_all',None)
if is_all:
queryset = PrepaidAccount.objects.filter(site_code=site.itemsite_code,cust_code=cust_obj.cust_code,
sa_status__in=['DEPOSIT','SA']).only('site_code','cust_code','sa_status').order_by('pk')
else:
queryset = PrepaidAccount.objects.filter(site_code=site.itemsite_code,cust_code=cust_obj.cust_code,
sa_status__in=['DEPOSIT','SA'],remain__gt=0).only('site_code','cust_code','sa_status').order_by('pk')
if queryset:
serializer = self.get_serializer(queryset, many=True)
lst = []; id_lst = []; product_type = 0; service_type = 0; all_type = 0
for data in serializer.data:
data.pop('voucher_no'); data.pop('condition_type1')
preobj = PrepaidAccount.objects.filter(pk=data["id"]).only('pk').first()
if data["id"]:
id_lst.append(data["id"])
# sa_transacno_type='Receipt',ItemSite_Codeid__pk=site.pk
pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
sa_transacno=preobj.pp_no,itemsite_code=site.itemsite_code,
).only('sa_custno','sa_transacno','sa_transacno_type','itemsite_code').order_by('pk').first()
if pos_haud:
data['prepaid'] = pos_haud.sa_transacno_ref if pos_haud.sa_transacno_ref else ""
last_acc_ids = PrepaidAccount.objects.filter(pp_no=preobj.pp_no,
site_code=preobj.site_code,status=True,line_no=preobj.line_no).only('pp_no','site_code','status','line_no').last()
l_splt = str(data['last_update']).split("T")
data['last_update'] = datetime.datetime.strptime(str(l_splt[0]), "%Y-%m-%d").strftime("%d-%m-%Y")
if last_acc_ids:
if last_acc_ids.sa_date:
splt = str(last_acc_ids.sa_date).split(" ")
data['last_update'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%m-%Y")
oriacc_ids = PrepaidAccount.objects.filter(pp_no=preobj.pp_no,
site_code=preobj.site_code,sa_status__in=['DEPOSIT','SA'],line_no=preobj.line_no).only('pp_no','site_code','sa_status','line_no').first()
if oriacc_ids.sa_date: #purchase date
#purchase date
splt_st = str(oriacc_ids.sa_date).split(" ")
data['sa_date'] = datetime.datetime.strptime(str(splt_st[0]), "%Y-%m-%d").strftime("%d-%m-%Y")
if last_acc_ids.pp_type:
rangeobj = ItemRange.objects.filter(itm_code=last_acc_ids.pp_type).first()
if rangeobj:
data['type'] = rangeobj.itm_desc
else:
data['type'] = " "
if last_acc_ids.exp_date:
splt_ex = str(last_acc_ids.exp_date).split(" ")
data['exp_date'] = datetime.datetime.strptime(str(splt_ex[0]), "%Y-%m-%d").strftime("%d-%b-%y")
if last_acc_ids.exp_status:
if last_acc_ids.exp_status == True:
data['exp_status'] = "Open"
elif last_acc_ids.exp_status == False:
data['exp_status'] = "Expired"
else:
data['exp_status'] = ""
if last_acc_ids.pp_amt:
data['pp_amt'] = "{:.2f}".format(float(last_acc_ids.pp_amt))
if last_acc_ids.pp_bonus:
data['pp_bonus'] = "{:.2f}".format(float(last_acc_ids.pp_bonus))
if last_acc_ids.pp_total:
data['pp_total'] = "{:.2f}".format(float(last_acc_ids.pp_total))
if last_acc_ids.use_amt:
data['use_amt'] = "{:.2f}".format(float(last_acc_ids.use_amt ))
if last_acc_ids.remain:
data['remain'] = "{:.2f}".format(float(last_acc_ids.remain))
data['voucher'] = "P.P"
if last_acc_ids.topup_amt: # Deposit
data['topup_amt'] = "{:.2f}".format(float(last_acc_ids.topup_amt ))
if last_acc_ids.outstanding:
data['outstanding'] = "{:.2f}".format(float(last_acc_ids.outstanding))
open_ids = PrepaidAccountCondition.objects.filter(pp_no=preobj.pp_no,
pos_daud_lineno=preobj.line_no).only('pp_no','pos_daud_lineno').first()
data["product"] = 0.00;data["service"] = 0.00;data["all"] = 0.00
if open_ids.conditiontype1 == "Product Only":
data["product"] = "{:.2f}".format(float(last_acc_ids.pp_amt))
product_type += last_acc_ids.pp_amt
elif open_ids.conditiontype1 == "Service Only":
data["service"] = "{:.2f}".format(float(last_acc_ids.pp_amt))
service_type += last_acc_ids.pp_amt
elif open_ids.conditiontype1 == "All":
data["all"] = "{:.2f}".format(float(last_acc_ids.pp_amt))
all_type += last_acc_ids.pp_amt
lst.append(data)
if lst != []:
header_data = {"balance_producttype" : "{:.2f}".format(float(product_type)),
"balance_servicetype" : "{:.2f}".format(float(service_type)),
"balance_alltype" : "{:.2f}".format(float(all_type)),"totalprepaid_count" : len(id_lst)}
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False,
'header_data':header_data, 'data': lst}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return PrepaidAccount.objects.get(pk=pk)
except PrepaidAccount.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
account = self.get_object(pk)
queryset = PrepaidAccount.objects.filter(pp_no=account.pp_no,line_no=account.line_no,
site_code=account.site_code).only('pp_no','line_no').order_by('pk')
if queryset:
last = queryset.last()
serializer = PrepaidacSerializer(queryset, many=True)
# sa_transacno_type='Receipt',ItemSite_Codeid__pk=site.pk
pos_haud = PosHaud.objects.filter(sa_custno=account.cust_code,
sa_transacno=account.pp_no,itemsite_code=site.itemsite_code,
).only('sa_custno','sa_transacno','sa_transacno_type','itemsite_code').order_by('pk').first()
for v in serializer.data:
if pos_haud:
v['prepaid_ref'] = pos_haud.sa_transacno_ref
else:
v['prepaid_ref'] = ""
ppobj = PrepaidAccount.objects.filter(pk=v["id"]).first()
if ppobj.sa_status in ['DEPOSIT','TOPUP']:
v['old_transaction'] = "-"
v['transaction_ref'] = "-"
v['voucher#'] = "-"
v['item_no'] = "-"
v['item_name'] = "-"
elif ppobj.sa_status == 'SA':
if ppobj.transac_no:
poshaud = PosHaud.objects.filter(sa_custno=account.cust_code,
sa_transacno=ppobj.transac_no,ItemSite_Codeid__pk=site.pk).only('sa_custno','sa_transacno').order_by('pk').first()
if poshaud:
v['old_transaction'] = poshaud.sa_transacno
v['transaction_ref'] = poshaud.sa_transacno_ref
else:
v['old_transaction'] = "-"
v['transaction_ref'] = "-"
v['voucher#'] = "-"
v['item_no'] = ppobj.item_no if ppobj.item_no else "-"
stockobj = Stock.objects.filter(item_code=ppobj.item_no).only('item_code').first()
if stockobj:
v['item_name'] = stockobj.item_name if stockobj.item_name else "-"
else:
v['item_name'] = "-"
else:
v['old_transaction'] = "-";v['transaction_ref'] = "-";v['voucher#'] = "-";v['item_no'] = "-"
v['item_name'] = "-";
v['use_amt'] = "{:.2f}".format(float(v['use_amt'])) if v['use_amt'] else 0.00
if ppobj.sa_status == 'DEPOSIT':
v['topup_amt'] = "-"
v['topup_no'] = "-"
v['topup_date'] = "-"
v['status'] = "-"
elif ppobj.sa_status == 'TOPUP':
v['topup_amt'] = "{:.2f}".format(float(v['topup_amt'])) if v['topup_amt'] else ""
v['topup_no'] = ppobj.topup_no
if ppobj.topup_date:
splt = str(ppobj.topup_date).split(" ")
v['topup_date'] = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d-%b-%y")
v['status'] = ppobj.sa_status
elif ppobj.sa_status == 'SA':
v['topup_amt'] = "-"
v['topup_no'] = "-"
v['topup_date'] = "-"
v['status'] = "-"
else:
v['topup_amt'] = "-";v['topup_no'] = "-";v['topup_date'] = "-";v['status'] = "-"
v['balance'] = "{:.2f}".format(float(ppobj.remain)) if ppobj.remain else 0.00
v['supplementary'] = ""
depoamt_acc_ids = PrepaidAccount.objects.filter(pp_no=account.pp_no,
site_code=account.site_code,line_no=account.line_no,sa_status__in=('DEPOSIT', 'TOPUP','SA')).only('pp_no','site_code','line_no','sa_status').aggregate(Sum('topup_amt'))
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False,
'header_data':{'prepaid_amount':"{:.2f}".format(float(last.pp_amt)) if last.pp_amt else "0.00",
'used_amount':"{:.2f}".format(float(last.use_amt)) if last.use_amt else "0.00",
'bonus':"{:.2f}".format(float(last.pp_bonus)) if last.pp_bonus else "0.00",
'balance':"{:.2f}".format(float(last.remain)) if last.remain else "0.00",
'outstanding':"{:.2f}".format(float(last.outstanding)) if last.outstanding else "0.00",
'deposit_amount': "{:.2f}".format(float(depoamt_acc_ids['topup_amt__sum'])) if depoamt_acc_ids else "0.00"},
'data': serializer.data}
return Response(result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class ComboViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = ComboServicesSerializer
def get_queryset(self):
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True).first()
site = fmspw.loginsite
queryset = Combo_Services.objects.filter(Isactive=True,Site_Code__pk=site.pk).order_by('-pk')
return queryset
def list(self, request):
try:
queryset = self.filter_queryset(self.get_queryset())
serializer_class = ComboServicesSerializer
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
# print(result,"result")
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Combo_Services.objects.get(pk=pk, Isactive=True)
except Combo_Services.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
combo = self.get_object(pk)
serializer = ComboServicesSerializer(combo,context={'request': self.request})
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
return Response(result, status=status.HTTP_200_OK)
class DashboardAPIView(APIView):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = DashboardSerializer
def get(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)[0]
site = ItemSitelist.objects.filter(pk=fmspw.loginsite.pk)
serializer = DashboardSerializer(site, many=True)
data = serializer.data[0]
result = {'status': status.HTTP_200_OK,"message":"Listed Successful",'error': False,
'data': data}
return Response(result,status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class BillingViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = BillingSerializer
def get_queryset(self):
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
year = timezone.now().year
from_date = self.request.GET.get('from_date',None)
to_date = self.request.GET.get('to_date',None)
transac_no = self.request.GET.get('transac_no',None)
cust_code = self.request.GET.get('cust_code',None)
cust_name = self.request.GET.get('cust_name',None)
queryset = PosHaud.objects.filter(ItemSite_Codeid__pk=site.pk).order_by('-pk')
if not from_date and not to_date and not transac_no and not cust_code and not cust_name:
queryset = queryset
else:
if from_date and to_date:
queryset = queryset.filter(sa_date__date__gte=from_date,sa_date__date__lte=to_date).order_by('-pk')
if transac_no:
queryset = queryset.filter(sa_transacno_ref__icontains=transac_no).order_by('-pk')
if cust_code:
queryset = queryset.filter(sa_custno__icontains=cust_code).order_by('-pk')
if cust_name:
queryset = queryset.filter(sa_custname__icontains=cust_name).order_by('-pk')
return queryset
def list(self, request):
try:
year = timezone.now().year
queryset = self.filter_queryset(self.get_queryset()).order_by('-pk')
# queryset = PosHaud.objects.filter(sa_date__year=year).order_by('-pk')
serializer_class = BillingSerializer
total = len(queryset)
state = status.HTTP_200_OK
message = "Listed Succesfully"
error = False
data = None
result=response(self,request, queryset, total, state, message, error, serializer_class, data, action=self.action)
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class CreditNotePayAPIView(APIView):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = CreditNotePaySerializer
def get(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)[0]
site = fmspw.loginsite
cust_id = self.request.GET.get('cust_id', None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id', None),cust_isactive=True).only('pk','cust_isactive').first()
if cust_obj is None:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Customer ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
queryset = CreditNote.objects.filter(cust_code=cust_obj.cust_code, status='OPEN',site_code=site.itemsite_code).only('cust_code','status').order_by('pk')
if queryset:
serializer = CreditNotePaySerializer(queryset, many=True)
result = {'status': status.HTTP_200_OK, "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message": "No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
class PrepaidPayViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
serializer_class = PrepaidPaySerializer
queryset = PrepaidAccount.objects.filter().order_by('-id')
def get_queryset(self,request):
global type_ex
type_ex.append('Sales')
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)[0]
# print(fmspw,"fmspw")
site = fmspw.loginsite
cart_date = timezone.now().date()
cust_obj = Customer.objects.filter(pk=self.request.GET.get('cust_id', None),cust_isactive=True).only('pk','cust_isactive').first()
cart_id = self.request.GET.get('cart_id',None)
cartc_ids = ItemCart.objects.filter(isactive=True,cart_date=cart_date,
cart_id=cart_id,cart_status="Completed",is_payment=True,sitecode=site.itemsite_code).exclude(type__in=type_ex).order_by('lineno')
# print(cartc_ids,"cartc_ids")
if cartc_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Invalid Cart ID,Send correct Cart Id,Given Cart ID Payment done!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
cartids = ItemCart.objects.filter(cust_noid=cust_obj,cart_id=cart_id,cart_date=cart_date,
cart_status="Inprogress",isactive=True,is_payment=False,sitecode=site.itemsite_code,
itemcodeid__item_div__in=[1,3]).exclude(type__in=type_ex).order_by('lineno')
if not cartids:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Given Cart ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
return cartids
def list(self, request):
try:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)[0]
site = fmspw.loginsite
cust_id = self.request.GET.get('cust_id', None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id', None),cust_isactive=True).only('pk','cust_isactive').first()
if cust_obj is None:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Customer ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
cart_id = self.request.GET.get('cart_id',None)
if not cart_id:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"cart_id is not given",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
cartids = self.filter_queryset(self.get_queryset(request))
cartquery = []
if cartids:
cartquery = CartPrepaidSerializer(cartids, many=True)
queryset = PrepaidAccount.objects.filter(site_code=site.itemsite_code,cust_code=cust_obj.cust_code,status=True).only('site_code','cust_code','status').order_by('pk')
if queryset:
serializer = PrepaidPaySerializer(queryset, many=True)
data = {'pp_data':serializer.data,'cart_data': cartquery.data if cartquery else []}
result = {'status': status.HTTP_200_OK, "message": "Listed Succesfully", 'error': False, 'data': data}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT,"message": "No Content",'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return PrepaidAccount.objects.get(pk=pk)
except PrepaidAccount.DoesNotExist:
raise Http404
def partial_update(self, request, pk=None):
try:
global type_ex
type_ex.append('Sales')
pp = self.get_object(pk)
serializer = PrepaidPaySerializer(pp, data=request.data, partial=True)
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)[0]
site = fmspw.loginsite
cart_date = timezone.now().date()
cust_obj = Customer.objects.filter(cust_code=pp.cust_code,cust_isactive=True).only('cust_code','cust_isactive').first()
cart_id = self.request.GET.get('cart_id',None)
if not cart_id:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"cart_id is not given",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
cartc_ids = ItemCart.objects.filter(isactive=True,cart_date=cart_date,
cart_id=cart_id,cart_status="Completed",is_payment=True,sitecode=site.itemsite_code).exclude(type__in=type_ex).order_by('lineno')
if cartc_ids:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Invalid Cart ID,Send correct Cart Id,Given Cart ID Payment done!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
cartids = ItemCart.objects.filter(cust_noid=cust_obj,cart_id=cart_id,cart_date=cart_date,
cart_status="Inprogress",isactive=True,is_payment=False,sitecode=site.itemsite_code,
itemcodeid__item_div__in=[1,3]).exclude(type__in=type_ex).order_by('lineno')
if not cartids:
result = {'status': status.HTTP_400_BAD_REQUEST, "message": "Given Cart ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if serializer.is_valid():
div = list(set([c.itemcodeid.item_div for c in cartids if c.itemcodeid.item_div]))
open_ids = PrepaidAccountCondition.objects.filter(pp_no=pp.pp_no,
pos_daud_lineno=pp.line_no).only('pp_no','pos_daud_lineno').first()
if open_ids:
if open_ids.conditiontype1 == "Product Only":
if '1' not in div:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"No Condition found for Retail Product in order list",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
elif open_ids.conditiontype1 == "Service Only":
if '3' not in div:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"No Condition found for Service in order list",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
elif open_ids.conditiontype1 == "All":
if '1' not in div and '3' not in div:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"No Condition found for Service/Retail Product in order list",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
result = {'status': status.HTTP_200_OK,"message":"Checked Succesfully",'error': False}
return Response(result, status=status.HTTP_200_OK)
result = {'status': status.HTTP_204_NO_CONTENT,"message":serializer.errors,'error': True}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
# class DeleteAPIView(generics.CreateAPIView):
# authentication_classes = [TokenAuthentication]
# permission_classes = [IsAuthenticated & authenticated_only]
# def post(self, request):
# cart_ids = ItemCart.objects.filter(customercode='HQ100022',price=0)
# treat_ids = Treatment.objects.filter(cust_code='HQ100022',unit_amount=0)
# return Response(data="deleted sucessfully", status=status.HTTP_200_OK)
# class ControlAPIView(generics.CreateAPIView):
# authentication_classes = [TokenAuthentication]
# permission_classes = [IsAuthenticated & authenticated_only]
# def post(self, request):
# site_ids = ItemSitelist.objects.filter().exclude(itemsite_code='HQ')
# control_ids = ControlNo.objects.filter(site_code='HQ')
# for s in site_ids:
# for c in control_ids:
# ControlNo(control_no=c.control_no,control_prefix=c.control_prefix,
# control_description=c.control_description,controldate=c.controldate,
# Site_Codeid=s,site_code=s.itemsite_code,mac_code=c.mac_code).save()
# return Response(data="Created Sucessfully", status=status.HTTP_200_OK)
# @register.filter
# def get_item(dictionary, key):
# return dictionary.get(key)
class HolditemdetailViewset(viewsets.ModelViewSet):
authentication_classes = [ExpiringTokenAuthentication]
permission_classes = [IsAuthenticated & authenticated_only]
queryset = Holditemdetail.objects.filter().order_by('-id')
serializer_class = HolditemdetailSerializer
def list(self, request):
try:
cust_id = self.request.GET.get('cust_id', None)
cust_obj = Customer.objects.filter(pk=request.GET.get('cust_id', None),cust_isactive=True).only('pk','cust_isactive').first()
if cust_obj is None:
result = {'status': status.HTTP_200_OK, "message": "Customer ID does not exist!!", 'error': True}
return Response(data=result, status=status.HTTP_200_OK)
fmspw = Fmspw.objects.filter(user=self.request.user, pw_isactive=True)[0]
site = fmspw.loginsite
queryset = Holditemdetail.objects.filter(itemsite_code=site.itemsite_code,sa_custno=cust_obj.cust_code,
status='OPEN').order_by('-pk')
satrasc_ids = list(set([e.sa_transacno for e in queryset if e.sa_transacno]))
# print(satrasc_ids,"satrasc_ids")
lst = [] ; final = []
if satrasc_ids:
for q in satrasc_ids:
# print(q,"sa_transacno")
pos_haud = PosHaud.objects.filter(sa_custno=cust_obj.cust_code,
sa_transacno=q,sa_transacno_type='Receipt',
ItemSite_Codeid__pk=site.pk).only('sa_custno','sa_transacno','sa_transacno_type').order_by('pk').first()
# print(pos_haud,"pos_haud")
if pos_haud:
line_ids = Holditemdetail.objects.filter(itemsite_code=site.itemsite_code,sa_custno=cust_obj.cust_code,
status='OPEN',sa_transacno=q).order_by('-pk')
lineno_ids = list(set([e.hi_lineno for e in line_ids if e.hi_lineno]))
# print(lineno_ids,"lineno_ids")
if lineno_ids:
for l in lineno_ids:
# print(l,"line noo")
queryids = Holditemdetail.objects.filter(itemsite_code=site.itemsite_code,sa_custno=cust_obj.cust_code,
status='OPEN',sa_transacno=q,hi_lineno=l
).only('itemsite_code','sa_custno','status','sa_transacno','itemno','hi_lineno').order_by('pk').last()
# print(queryids,"queryids")
if queryids:
depoids = DepositAccount.objects.filter(site_code=site.itemsite_code,cust_code=cust_obj.cust_code,
sa_status="SA",sa_transacno=q,item_barcode=queryids.itemno,dt_lineno=l).only('site_code','cust_code','sa_status').order_by('pk').last()
# print(depoids,"depoids")
if depoids:
laqueryids = Holditemdetail.objects.filter(itemsite_code=site.itemsite_code,sa_custno=cust_obj.cust_code,
status='OPEN',sa_transacno=q,hi_lineno=l,itemno=queryids.itemno
).only('itemsite_code','sa_custno','status','sa_transacno','itemno','hi_lineno').order_by('pk').last()
# print(laqueryids,"laqueryids")
if laqueryids:
if laqueryids.pk not in lst:
lst.append(laqueryids.pk)
if laqueryids.sa_date:
# print(laqueryids.sa_date,"data['sa_date']")
splt = str(laqueryids.sa_date).split(" ")
sa_date = datetime.datetime.strptime(str(splt[0]), "%Y-%m-%d").strftime("%d/%m/%Y")
check = ""
if depoids.outstanding == 0:
check = "fullpay"
elif depoids.outstanding > 0:
check = "partialpay"
val ={'id':laqueryids.pk,'sa_date':sa_date,'sa_transacno_ref':pos_haud.sa_transacno_ref,
'hi_itemdesc':laqueryids.hi_itemdesc,'itemno':laqueryids.itemno,
'holditemqty':laqueryids.holditemqty,'qty_issued':"",'staff_issued':"",'check':check}
final.append(val)
# print(lst,"lst")
if final != []:
result = {'status': status.HTTP_200_OK,"message":"Listed Succesfully",'error': False,
'data': final}
return Response(data=result, status=status.HTTP_200_OK)
else:
result = {'status': status.HTTP_204_NO_CONTENT, 'message': "No Content", 'error': False, 'data': []}
return Response(data=result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
def get_object(self, pk):
try:
return Holditemdetail.objects.get(pk=pk)
except Holditemdetail.DoesNotExist:
raise Http404
def retrieve(self, request, pk=None):
try:
holditem = self.get_object(pk)
serializer = HolditemSerializer(holditem)
result = {'status': status.HTTP_200_OK , "message": "Listed Succesfully", 'error': False, 'data': serializer.data}
return Response(result, status=status.HTTP_200_OK)
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
@action(methods=['post'], detail=False, permission_classes=[IsAuthenticated & authenticated_only],
authentication_classes=[ExpiringTokenAuthentication])
def issued(self, request):
try:
if request.data:
fmspw = Fmspw.objects.filter(user=self.request.user,pw_isactive=True)
site = fmspw[0].loginsite
for idx, reqt in enumerate(request.data, start=1):
hold_obj = Holditemdetail.objects.filter(hi_no=reqt['id']).first()
if not hold_obj:
raise Exception('Holditemdetail id Does not exist')
cust_obj = Customer.objects.filter(cust_code=hold_obj.sa_custno,cust_isactive=True,site_code=site.itemsite_code).first()
if not cust_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Customer ID does not exist!!",'error': True}
return Response(data=result, status=status.HTTP_400_BAD_REQUEST)
if not reqt['issued_qty']:
msg = "{0} This Product issued qty should not empty".format(str(hold_obj.hi_itemdesc))
result = {'status': status.HTTP_400_BAD_REQUEST,"message": msg,'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
if not reqt['emp_id']:
msg = "{0} This Product staff issued should not empty".format(str(hold_obj.hi_itemdesc))
result = {'status': status.HTTP_400_BAD_REQUEST,"message": msg,'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
if int(reqt['issued_qty']) <= 0:
msg = "{0} This Product issued qty should not be less than 0".format(str(hold_obj.hi_itemdesc))
result = {'status': status.HTTP_400_BAD_REQUEST,"message": msg,'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
if int(reqt['issued_qty']) > int(hold_obj.holditemqty) :
msg = "{0} This Product should not greater than Qty Hold".format(str(hold_obj.hi_itemdesc))
result = {'status': status.HTTP_400_BAD_REQUEST,"message": msg,'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
empobj = Employee.objects.filter(pk=int(reqt['emp_id']),emp_isactive=True).first()
if not empobj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Employee ID does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
# print(request.data,"request.data")
lst = []
for idx, req in enumerate(request.data, start=1):
# print(req,"req")
serializer = HolditemupdateSerializer(data=req)
if serializer.is_valid():
holdobj = Holditemdetail.objects.filter(hi_no=req['id']).first()
new_balance = int(holdobj.holditemqty) - int(req['issued_qty'])
val = {'sa_transacno':holdobj.sa_transacno,'hi_itemdesc':holdobj.hi_itemdesc,
'balance':holdobj.holditemqty,'issued_qty':int(req['issued_qty']),
'new_balance':new_balance,'id': holdobj.pk}
lst.append(val)
emp_obj = Employee.objects.filter(pk=int(req['emp_id']),emp_isactive=True).first()
remainqty = int(holdobj.holditemqty) - int(req['issued_qty'])
# print(remainqty,"remainqty")
laqueryids = Holditemdetail.objects.filter(itemsite_code=site.itemsite_code,sa_custno=holdobj.sa_custno,
status='OPEN',sa_transacno=holdobj.sa_transacno,hi_lineno=holdobj.hi_lineno,itemno=holdobj.itemno
).only('itemsite_code','sa_custno','status','sa_transacno','itemno','hi_lineno').order_by('pk')
# print(laqueryids,"laqueryids")
length = len(laqueryids) + 1
con_obj = ControlNo.objects.filter(control_description__iexact="Product Issues",Site_Codeid__pk=fmspw[0].loginsite.pk).first()
if not con_obj:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":"Product Issues Control No does not exist!!",'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
product_issues_no = str(con_obj.control_prefix)+str(con_obj.Site_Codeid.itemsite_code)+str(con_obj.control_no)
hold = Holditemdetail(itemsite_code=site.itemsite_code,sa_transacno=holdobj.sa_transacno,
transacamt=holdobj.transacamt,itemno=holdobj.itemno,
hi_staffno=emp_obj.emp_code,
hi_itemdesc=holdobj.hi_itemdesc,hi_price=holdobj.hi_price,hi_amt=holdobj.hi_amt,hi_qty=holdobj.hi_qty,
hi_staffname=emp_obj.emp_name,
hi_lineno=holdobj.hi_lineno,hi_uom=holdobj.hi_uom,hold_item=True,hi_deposit=holdobj.hi_deposit,
holditemqty=remainqty,sa_custno=holdobj.sa_custno,
sa_custname=holdobj.sa_custname,history_line=length,hold_type=holdobj.hold_type,
product_issues_no=product_issues_no)
if remainqty == 0:
oldqueryids = Holditemdetail.objects.filter(itemsite_code=site.itemsite_code,sa_custno=holdobj.sa_custno,
status='OPEN',sa_transacno=holdobj.sa_transacno,hi_lineno=holdobj.hi_lineno,itemno=holdobj.itemno
).only('itemsite_code','sa_custno','status','sa_transacno','itemno','hi_lineno').order_by('pk').update(status="CLOSE")
print(oldqueryids,"oldqueryids")
hold.status = "CLOSE"
hold.save()
elif remainqty > 0:
hold.status = "OPEN"
hold.save()
else:
result = {'status': status.HTTP_400_BAD_REQUEST,"message":serializer.errors,'error': True}
return Response(result, status=status.HTTP_400_BAD_REQUEST)
if lst != []:
# print(lst[0])
value = lst[0]['id']
# print(value,"value")
title = Title.objects.filter(product_license=site.itemsite_code).first()
# holdids = Holditemdetail.objects.filter(pk__in=lst)
hold_ids = Holditemdetail.objects.filter(pk=value).order_by('-pk').first()
path = None
if title and title.logo_pic:
path = BASE_DIR + title.logo_pic.url
split = str(hold_ids.sa_date).split(" ")
date = datetime.datetime.strptime(str(split[0]), '%Y-%m-%d').strftime("%d/%m/%Y")
data = {'name': title.trans_h1 if title and title.trans_h1 else '',
'address': title.trans_h2 if title and title.trans_h2 else '',
'footer1':title.trans_footer1 if title and title.trans_footer1 else '',
'footer2':title.trans_footer2 if title and title.trans_footer2 else '',
'footer3':title.trans_footer3 if title and title.trans_footer3 else '',
'footer4':title.trans_footer4 if title and title.trans_footer4 else '',
'hold_ids': hold_ids, 'date':date,
'hold': lst,'cust':cust_obj,'staff':hold_ids.hi_staffname,'fmspw':fmspw,
'path':path if path else '','title':title if title else None,
}
template = get_template('hold_item.html')
display = Display(visible=0, size=(800, 600))
display.start()
html = template.render(data)
options = {
'margin-top': '.25in',
'margin-right': '.25in',
'margin-bottom': '.25in',
'margin-left': '.25in',
'encoding': "UTF-8",
'no-outline': None,
}
dst ="holditem_" + str(str(hold_ids.sa_transacno)) + ".pdf"
p=pdfkit.from_string(html,False,options=options)
PREVIEW_PATH = dst
pdf = FPDF()
pdf.add_page()
pdf.set_font("Arial", size = 15)
file_path = os.path.join(settings.PDF_ROOT, PREVIEW_PATH)
pdf.output(file_path)
if p:
file_path = os.path.join(settings.PDF_ROOT, PREVIEW_PATH)
report = os.path.isfile(file_path)
if report:
file_path = os.path.join(settings.PDF_ROOT, PREVIEW_PATH)
with open(file_path, 'wb') as fh:
fh.write(p)
display.stop()
ip_link = "http://"+request.META['HTTP_HOST']+"/media/pdf/holditem_"+str(hold_ids.sa_transacno)+".pdf"
result = {'status': status.HTTP_200_OK,"message":"Updated Succesfully",
'error': False,'data': ip_link}
return Response(result, status=status.HTTP_200_OK)
else:
raise Exception('Request body data does not exist')
except Exception as e:
invalid_message = str(e)
return general_error_response(invalid_message)
|
# 给定一个二维数组,计算可以围成矩形的个数。这道题当时废了
# 一点功夫,最后想到如果可以围成一个矩形,那么那两行做与
# 运算两个角一定为1,剩下的一定为0,如果有不止两个1,那么
# 用C(n,2)即可
class Solution:
def countCornerRectangles(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
vertical = len(grid)
horizontal = len(grid[0])
def grid2int(grid, line):
result = []
for i in grid[line]:
result.append(str(i))
temp = "".join(result)
return int(temp, 2)
def countbit(num):
temp = bin(num)[2:]
return temp.count('1')
count = 0
bitmap = [-1] * (vertical)
for i in range(0, vertical):
for j in range(i+1, vertical):
if bitmap[i] != -1:
bit1 = bitmap[i]
else:
bit1 = grid2int(grid, i)
bitmap[i] = bit1
if bitmap[j] != -1:
bit2 = bitmap[j]
else:
bit2 = grid2int(grid, j)
bitmap[j] = bit2
result = bit1 & bit2
bit1count = countbit(result)
if bit1count == 0:
continue
else:
count += (bit1count*(bit1count-1))//2
return count
print(Solution().countCornerRectangles([[1, 1, 1]]))
|
from requirementmanager.mongodb import (
requirement_collection, requirement_tree_collection,
archive_requirement_collection, archive_requirement_tree_collection
)
if __name__ == '__main__':
requirement_collection.drop()
requirement_tree_collection.drop()
archive_requirement_collection.drop()
archive_requirement_tree_collection.drop()
|
import numpy as np
def genGaussian(x, mu, sigma):
value = 1 / (np.sqrt(2 * np.pi) * sigma) * np.exp(-0.5 * (((x - mu) / sigma) ** 2))
return value
def genMix(x, k, w, mu, sigma):
value = np.zeros((len(x)), dtype=float)
for i in range(k):
value += w[i] * genGaussian(x, mu[i], sigma[i])
# normalize
value = value / value.sum()
return value
def init_params(k, variant, experiment='dot_motion'):
w = np.ones(k) / k
m = np.deg2rad([-40.0, 0.0, 40.0])
if variant == 'fixed_control' or variant == 'fixed_s':
if experiment == 'dot_motion_fb':
s = np.deg2rad([23.23, 4.59, 23.23])
elif experiment == 'dot_motion':
s = np.deg2rad([25.8, 4.27, 25.8])
else:
s = np.deg2rad([21.0, 4.7, 21.0])
else:
s = np.deg2rad([20.0, 5.0, 20.0])
return w, m, s
def EM(X, k, w, mu, sigma, variant, same=False):
max_iter = 50
epsilon = 1e-7
for iteration in range(max_iter):
cluster_pdf = np.zeros((k, X.shape[0]))
for cluster in range(k):
cluster_pdf[cluster] = genGaussian(X, mu[cluster], sigma[cluster])
weighted_pdf = np.multiply(cluster_pdf, np.tile(w, (X.shape[0], 1)).T)
gamma = weighted_pdf / (np.sum(weighted_pdf, axis=0) + epsilon)
w = np.mean(gamma, axis=1)
gamma_sum = np.sum(gamma, axis=1) + epsilon
if variant in ['fixed_mcs', 'fixed_m', 'fixed_initial']:
sigma = np.sqrt(np.diagonal(np.dot(gamma, np.square(np.tile(X, (k, 1)).T - mu))) / gamma_sum)
if variant in ['fixed_initial', 'fixed_s', 'fixed_ncm']:
mu = np.dot(gamma, X) / gamma_sum
if variant == 'fixed_ncm':
mu[0] = np.deg2rad(-40.0)
mu[2] = np.deg2rad(40.0)
if same:
w[0] = w[2]
if variant in ['fixed_mcs', 'fixed_m', 'fixed_initial']:
sigma[0] = sigma[2]
if variant == 'fixed_initial' or variant == 'fixed_s':
mu[0] = mu[2]
if variant == 'fixed_mcs':
sigma[1] = np.deg2rad(5.0)
w = np.around(w, 3)
diff = 1 - sum(w)
while (diff != 0):
if diff < 0:
w += diff / k
elif diff < 1e-3:
w[1] += diff
else:
w += diff / k
diff = 1 - sum(w)
return w, mu, sigma
def gmm_model_fit(bins, probabilities): # your histogram values are in data
#variants = ['fixed_initial', 'fixed_m', 'fixed_mcs', 'fixed_ms', 'fixed_control']
# fixed_initial = all parameters can vary
# fixed_m = mean stays constant
# fixed_mcs = mean and variance of center are constant
# fixed_ms = mean and variance are fixed
# fixed_control = mean and variance are fixed based on control condition
variant = 'fixed_initial'
k = 3 # number of clusters
num_points = 5000
samples = np.random.choice(np.radians(bins), num_points, p=probabilities)
w, m, s = init_params(k, variant=variant) # initialize
# # Decide if center mean should be zero or shifted
# if abs(data[num_bins // 2] - data[num_bins // 2 - 1]) < 1e-3:
# m[1] = 0.0
# else:
# m[1] = angles[np.argmax(data)]
m[1] = 0.0
fit_w, fit_m, fit_s = EM(samples, k, w, m, s, variant) # fit model
return fit_w, np.degrees(fit_m), np.degrees(fit_s) |
import imapclient
import imaplib
import re
import datetime
from selenium import webdriver
imaplib._MAXLINE=10000000 #zwiększenie rozmiaru pobieranych wiadomości
imapObj = imapclient.IMAPClient('poczta.interia.pl',ssl=True) #imap dla danej poczty
imapObj.login('nazwauzytkownika','haslo')
#for i in imapObj.list_folders():
# print(i[2])
imapObj.select_folder('INBOX', readonly=True)
#UIDs = imapObj.search([u'ALL'])
UIDs = imapObj.search([u'SINCE', datetime.date(2017,1,1)])
rawMessages=imapObj.fetch(UIDs,['BODY[]', 'FLAGS'])
pattern=r"List-Unsubscribe: <(http://[^>]+)>"
regex=re.compile(pattern)
f = open("unsuby.txt", "w")
f.write("Ilosc wiadomosci: {}".format(len(UIDs)))
f.write("\n \n")
for i in UIDs:
x = regex.findall(str(rawMessages[i][b'BODY[]']))
try:
f.write(str(x[0])+"\n")
except:
pass
f.close()
browser=webdriver.Firefox()
with open("unsuby.txt", 'r') as f:
lines = f.readlines()
for i in range(len(lines)):
link = lines.pop()
browser.get(link)
time.sleep(2)
try:
button=browser.find_element_by_tag_name('button').click()
except:
pass
try:
button=browser.find_element_by_tag_name('btn').click()
except:
pass
time.sleep(1)
#import pyzmail #do szczegółow danej wiadomości
#message=pyzmail.PyzMessage.factory(rawMessages[16771][b'BODY[]'])
#print(message.get_subject())
#print(message.get_addresses('from'))
#print(message.get_addresses('to'))
#print(message.get_addresses('cc'))
#print(message.get_addresses('bcc'))
#print(message.text_part != None)
#print(message.text_part.get_payload().decode(message.text_part.charset))
#print(message.html_part != None)
#print(message.html_part.get_payload().decode(message.html_part.charset))
imapObj.logout()
|
winlist = []
fname = "Anibal"
sname = "Lecter"
checklist = [item[1] for item in winlist]
if not fname in checklist:#Se agrega a winlist xq no esta
winlist.append([fname, sname, 5])
else:#Ya existe fname solo actualizo el uptime
winlist[checklist.index(fname)][2] = winlist[checklist.index(fname)][2]+1*5
|
from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp import netsvc
import openerp.addons.decimal_precision as dp
class account_move_line(osv.Model):
_inherit = "account.move.line"
def validate_amount(self, cr, uid, ids, field_name, arg, context):
result={}
invoice_obj = self.pool.get('account.move.line')
for move_lines in invoice_obj.browse(cr, uid, ids):
print move_lines["amount_currency"]
if move_lines["amount_currency"] == 0:
fecha = move_lines["date"]
if move_lines['currency_id']:
#por defecto el id es 46 que es clp
#descomentar si no quieres todos los valores en pesos
#move_rate_id = move_lines['currency_id'][0]
move_rate_id = 46
else :
#si no tiene divisa pondremos CLP por defecto
move_rate_id = 46
obj_rate = self.pool.get('res.currency.rate')
accounts_rate_id = obj_rate.search(cr, uid,[('currency_id', '=', move_rate_id),('name','<=',fecha)],limit=10)
divisa = obj_rate.browse(cr, uid, int(accounts_rate_id[0]))
divisa_rate = divisa['rate']
print move_lines["debit"]
if move_lines["debit"] > 0 :
result[move_lines["id"]] = divisa_rate * move_lines["debit"]
else :
result[move_lines["id"]] = divisa_rate * move_lines["credit"]
result[move_lines["id"]]*=-1
else:
result[move_lines["id"]] = move_lines["amount_currency"]
return result
_columns = {
'function' : fields.function(validate_amount, type='float', method=True, store=False, digits_compute=dp.get_precision('purchase'), string='Total $'),
}
|
from random import shuffle
from word_rank import word_rank
"""
Scrabble Game
Classes:
Tile - keeps track of the tile letter and value
Rack - keeps track of the tiles in a player's letter rack
Bag - keeps track of the remaining tiles in the bag
Word - checks the validity of a word and its placement
Board - keeps track of the tiles' location on the board
"""
#Keeps track of the score-worth of each letter-tile.
LETTER_VALUES = {"A": 1,
"B": 3,
"C": 3,
"D": 2,
"E": 1,
"F": 4,
"G": 2,
"H": 4,
"I": 1,
"J": 8,
"K": 5,
"L": 1,
"M": 3,
"N": 1,
"O": 1,
"P": 3,
"Q": 10,
"R": 1,
"S": 1,
"T": 1,
"U": 1,
"V": 4,
"W": 4,
"X": 8,
"Y": 4,
"Z": 10,
"#": 0}
class Tile:
"""
Class that allows for the creation of a tile. Initializes using an uppercase string of one letter,
and an integer representing that letter's score.
"""
def __init__(self, letter, letter_values):
#Initializes the tile class. Takes the letter as a string, and the dictionary of letter values as arguments.
self.letter = letter.upper()
if self.letter in letter_values:
self.score = letter_values[self.letter]
else:
self.score = 0
def get_letter(self):
#Returns the tile's letter (string).
return self.letter
def get_score(self):
#Returns the tile's score value.
return self.score
class Bag:
"""
Creates the bag of all tiles that will be available during the game. Contains 98 letters and two blank tiles.
Takes no arguments to initialize.
"""
def __init__(self):
#Creates the bag full of game tiles, and calls the initialize_bag() method, which adds the default 100 tiles to the bag.
#Takes no arguments.
self.bag = []
self.initialize_bag()
def add_to_bag(self, tile, quantity):
#Adds a certain quantity of a certain tile to the bag. Takes a tile and an integer quantity as arguments.
for i in range(quantity):
self.bag.append(tile)
def initialize_bag(self):
#Adds the intiial 100 tiles to the bag.
global LETTER_VALUES
self.add_to_bag(Tile("A", LETTER_VALUES), 9)
self.add_to_bag(Tile("B", LETTER_VALUES), 2)
self.add_to_bag(Tile("C", LETTER_VALUES), 2)
self.add_to_bag(Tile("D", LETTER_VALUES), 4)
self.add_to_bag(Tile("E", LETTER_VALUES), 12)
self.add_to_bag(Tile("F", LETTER_VALUES), 2)
self.add_to_bag(Tile("G", LETTER_VALUES), 3)
self.add_to_bag(Tile("H", LETTER_VALUES), 2)
self.add_to_bag(Tile("I", LETTER_VALUES), 9)
self.add_to_bag(Tile("J", LETTER_VALUES), 1)
self.add_to_bag(Tile("K", LETTER_VALUES), 1)
self.add_to_bag(Tile("L", LETTER_VALUES), 4)
self.add_to_bag(Tile("M", LETTER_VALUES), 2)
self.add_to_bag(Tile("N", LETTER_VALUES), 6)
self.add_to_bag(Tile("O", LETTER_VALUES), 8)
self.add_to_bag(Tile("P", LETTER_VALUES), 2)
self.add_to_bag(Tile("Q", LETTER_VALUES), 1)
self.add_to_bag(Tile("R", LETTER_VALUES), 6)
self.add_to_bag(Tile("S", LETTER_VALUES), 4)
self.add_to_bag(Tile("T", LETTER_VALUES), 6)
self.add_to_bag(Tile("U", LETTER_VALUES), 4)
self.add_to_bag(Tile("V", LETTER_VALUES), 2)
self.add_to_bag(Tile("W", LETTER_VALUES), 2)
self.add_to_bag(Tile("X", LETTER_VALUES), 1)
self.add_to_bag(Tile("Y", LETTER_VALUES), 2)
self.add_to_bag(Tile("Z", LETTER_VALUES), 1)
# self.add_to_bag(Tile("#", LETTER_VALUES), 2)
shuffle(self.bag)
def take_from_bag(self):
#Removes a tile from the bag and returns it to the user. This is used for replenishing the rack.
return self.bag.pop()
def get_remaining_tiles(self):
#Returns the number of tiles left in the bag.
return len(self.bag)
class Rack:
"""
Creates each player's 'dock', or 'hand'. Allows players to add, remove and replenish the number of tiles in their hand.
"""
def __init__(self, bag):
#Initializes the player's rack/hand. Takes the bag from which the racks tiles will come as an argument.
self.rack = []
#self.bag = bag
#self.initialize()
self.initialize(bag)
def add_to_rack(self, bag):
#Takes a tile from the bag and adds it to the player's rack.
self.rack.append(bag.take_from_bag())
def initialize(self, bag):
#Adds the initial 7 tiles to the player's hand.
for i in range(7):
self.add_to_rack(bag)
def get_rack_str(self):
#Displays the user's rack in string form.
return ", ".join(str(item.get_letter()) for item in self.rack)
def get_rack_arr(self):
#Returns the rack as an array of tile instances
return self.rack
def remove_from_rack(self, tile):
#Removes a tile from the rack (for example, when a tile is being played).
self.rack.remove(tile)
def get_rack_length(self):
#Returns the number of tiles left in the rack.
return len(self.rack)
def replenish_rack(self, bag):
#Adds tiles to the rack after a turn such that the rack will have 7 tiles (assuming a proper number of tiles in the bag).
while self.get_rack_length() < 7 and bag.get_remaining_tiles() > 0:
self.add_to_rack(bag)
class Player:
"""
Creates an instance of a player. Initializes the player's rack, and allows you to set/get a player name.
"""
def __init__(self, bag):
#Intializes a player instance. Creates the player's rack by creating an instance of that class.
#Takes the bag as an argument, in order to create the rack.
self.name = ""
self.rack = Rack(bag)
self.score = 0
def set_name(self, name):
#Sets the player's name.
self.name = name
def get_name(self):
#Gets the player's name.
return self.name
def get_rack_str(self):
#Returns the player's rack.
return self.rack.get_rack_str()
def get_rack_arr(self):
#Returns the player's rack in the form of an array.
return self.rack.get_rack_arr()
def increase_score(self, increase):
#Increases the player's score by a certain amount. Takes the increase (int) as an argument and adds it to the score.
self.score += increase
def get_score(self):
#Returns the player's score
return self.score
class Board:
"""
Creates the scrabble board.
"""
def __init__(self):
#Creates a 2-dimensional array that will serve as the board, as well as adds in the premium squares.
self.board = [[" " for i in range(15)] for j in range(15)]
self.add_premium_squares()
self.board[7][7] = " * "
def get_board(self):
#Returns the board in string form.
board_str = " | " + " | ".join(str(item) for item in range(10)) + " | " + " | ".join(str(item) for item in range(10, 15)) + " |"
board_str += "\n _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\n"
board = list(self.board)
for i in range(len(board)):
if i < 10:
board[i] = str(i) + " | " + " | ".join(str(item) for item in board[i]) + " |"
if i >= 10:
board[i] = str(i) + " | " + " | ".join(str(item) for item in board[i]) + " |"
board_str += "\n |_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _|\n".join(board)
board_str += "\n _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _"
return board_str
def add_premium_squares(self):
#Adds all of the premium squares that influence the word's score.
TRIPLE_WORD_SCORE = ((0,0), (7, 0), (14,0), (0, 7), (14, 7), (0, 14), (7, 14), (14,14))
DOUBLE_WORD_SCORE = ((1,1), (2,2), (3,3), (4,4), (1, 13), (2, 12), (3, 11), (4, 10), (13, 1), (12, 2), (11, 3), (10, 4), (13,13), (12, 12), (11,11), (10,10))
TRIPLE_LETTER_SCORE = ((1,5), (1, 9), (5,1), (5,5), (5,9), (5,13), (9,1), (9,5), (9,9), (9,13), (13, 5), (13,9))
DOUBLE_LETTER_SCORE = ((0, 3), (0,11), (2,6), (2,8), (3,0), (3,7), (3,14), (6,2), (6,6), (6,8), (6,12), (7,3), (7,11), (8,2), (8,6), (8,8), (8, 12), (11,0), (11,7), (11,14), (12,6), (12,8), (14, 3), (14, 11))
for coordinate in TRIPLE_WORD_SCORE:
self.board[coordinate[0]][coordinate[1]] = "TWS"
for coordinate in TRIPLE_LETTER_SCORE:
self.board[coordinate[0]][coordinate[1]] = "TLS"
for coordinate in DOUBLE_WORD_SCORE:
self.board[coordinate[0]][coordinate[1]] = "DWS"
for coordinate in DOUBLE_LETTER_SCORE:
self.board[coordinate[0]][coordinate[1]] = "DLS"
def place_word(self, word, location, direction, player, bag):
#Allows you to play words, assuming that they have already been confirmed as valid.
global premium_spots
premium_spots = []
direction.lower()
word = word.upper()
#Places the word going rightwards
if direction.lower() == "r":
for i in range(len(word)):
if self.board[location[0]][location[1]+i] != " ":
premium_spots.append((word[i], self.board[location[0]][location[1]+i]))
self.board[location[0]][location[1]+i] = " " + word[i] + " "
#Places the word going downwards
elif direction.lower() == "d":
for i in range(len(word)):
if self.board[location[0]+i][location[1]] != " ":
premium_spots.append((word[i], self.board[location[0]+i][location[1]]))
self.board[location[0]+i][location[1]] = " " + word[i] + " "
#Removes tiles from player's rack and replaces them with tiles from the bag.
for letter in word:
for tile in player.get_rack_arr():
if tile.get_letter() == letter:
player.rack.remove_from_rack(tile)
player.rack.replenish_rack(bag)
def board_array(self):
#Returns the 2-dimensional board array.
return self.board
class Word:
def __init__(self, word, location, player, direction, board):
self.word = word.upper()
self.location = location
self.player = player
self.direction = direction.lower()
self.board = board
self.attached_words = []
self.score = 0
self.board_squares = []
def check_word(self, round_number, players):
#Checks the word to make sure that it is in the dictionary, and that the location falls within bounds.
#Also controls the overlapping of words.
dictionary = open('dic.txt').read()
current_board_ltr = ""
needed_tiles = ""
blank_tile_val = ""
#Assuming that the player is not skipping the turn:
if self.word != "":
#Raises an error if the location of the word will be out of bounds.
if self.location[0] > 14 or self.location[1] > 14 or self.location[0] < 0 or self.location[1] < 0 or (self.direction == "d" and (self.location[0]+len(self.word)-1) > 14) or (self.direction == "r" and (self.location[1]+len(self.word)-1) > 14):
return False
# TODO Fix blank tile function and program the bot to use them
#Allows for players to declare the value of a blank tile.
if "#" in self.word:
while len(blank_tile_val) != 1:
blank_tile_val = input("Please enter the letter value of the blank tile: ")
self.word = self.word[:self.word.index("#")] + blank_tile_val.upper() + self.word[(self.word.index("#")+1):]
#Reads in the board's current values under where the word that is being played will go. Raises an error if the direction is not valid.
if self.direction == "r":
# Check for adjacent letters and add them to the word
board_row = self.board[self.location[0]]
col = self.location[1]
end_ltrs = self.get_letters(board_row, col + len(self.word))
start_ltrs = self.get_letters(board_row[::-1], 15 - col)
if len(end_ltrs) > 0:
self.word = self.word + ''.join(end_ltrs)
if len(start_ltrs) > 0:
self.word = ''.join(start_ltrs[::-1]) + self.word
self.location[1] -= len(self.word)
for i in range(len(self.word)):
if self.board[self.location[0]][self.location[1]+i][1] == " " or self.board[self.location[0]][self.location[1]+i] == "TLS" or self.board[self.location[0]][self.location[1]+i] == "TWS" or self.board[self.location[0]][self.location[1]+i] == "DLS" or self.board[self.location[0]][self.location[1]+i] == "DWS" or self.board[self.location[0]][self.location[1]+i][1] == "*":
current_board_ltr += " "
else:
current_board_ltr += self.board[self.location[0]][self.location[1]+i][1]
elif self.direction == "d":
board_col = [x[self.location[1]] for x in self.board]
row = self.location[0]
end_ltrs = self.get_letters(board_col, row + len(self.word))
start_ltrs = self.get_letters(board_col[::-1], 15 - row)
if len(end_ltrs) > 0:
self.word = self.word + ''.join(end_ltrs)
if len(start_ltrs) > 0:
self.word = ''.join(start_ltrs[::-1]) + self.word
self.location[0] -= len(self.word)
for i in range(len(self.word)):
if self.board[self.location[0]+i][self.location[1]] == " " or self.board[self.location[0]+i][self.location[1]] == "TLS" or self.board[self.location[0]+i][self.location[1]] == "TWS" or self.board[self.location[0]+i][self.location[1]] == "DLS" or self.board[self.location[0]+i][self.location[1]] == "DWS" or self.board[self.location[0]+i][self.location[1]] == " * ":
current_board_ltr += " "
else:
current_board_ltr += self.board[self.location[0]+i][self.location[1]][1]
else:
return "Error: please enter a valid direction."
#Raises an error if the word being played is not in the official scrabble dictionary (dic.txt).
if '\n' + self.word + '\n' not in dictionary:
return "Please enter a valid dictionary word.\n"
#Ensures that the words overlap correctly. If there are conflicting letters between the current board and the word being played, raises an error.
for i in range(len(self.word)):
if current_board_ltr[i] == " ":
needed_tiles += self.word[i]
elif current_board_ltr[i] != self.word[i]:
# print("Current_board_ltr: " + str(current_board_ltr) + ", Word: " + self.word + ", Needed_Tiles: " + needed_tiles)
return "The letters do not overlap correctly, please choose another word."
#If there is a blank tile, remove it's given value from the tiles needed to play the word.
if blank_tile_val != "":
needed_tiles = needed_tiles[needed_tiles.index(blank_tile_val):] + needed_tiles[:needed_tiles.index(blank_tile_val)]
#Ensures that the word will be connected to other words on the playing board.
if (round_number != 1 or (round_number == 1 and players[0] != self.player)) and current_board_ltr == " " * len(self.word):
# print("Current_board_ltr: " + str(current_board_ltr) + ", Word: " + self.word + ", Needed_Tiles: " + needed_tiles)
return "Please connect the word to a previously played letter."
#Raises an error if the player does not have the correct tiles to play the word.
for letter in needed_tiles:
if letter not in self.player.get_rack_str() or self.player.get_rack_str().count(letter) < needed_tiles.count(letter):
return "You do not have the tiles for this word\n"
#Ensures that first turn of the game will have the word placed at (7,7).
if round_number == 1 and players[0] == self.player and self.location != [7,7]:
return "The first turn must begin at location (7, 7).\n"
# Check that new words formed that are attached to the word played are real
attached_words, board_squares = self.get_attached_words()
for word_info in attached_words:
word = '\n' + ''.join(word_info['word']) + '\n'
if word not in dictionary:
return 'invalid word attached'
self.attached_words = attached_words
self.board_squares = board_squares
return True
#If the user IS skipping the turn, confirm. If the user replies with "Y", skip the player's turn. Otherwise, allow the user to enter another word.
else:
if input("Are you sure you would like to skip your turn? (y/n)").upper() == "Y":
return True
else:
return "Please enter a word."
# get letters that are apart of the attached word
def get_letters(self, word_col, row):
word = []
space = False
while not space and row >= 0 and row < 15:
if len(word_col[row].strip()) == 1:
word.append(word_col[row].strip())
row += 1
else:
space = True
return word
# Get the score for the word that is attached to the played word
def format_word(self, col, row, board, ltr):
word_col = [x[col] for x in board]
word_end = self.get_letters(word_col, row + 1)
word_start = self.get_letters(word_col[::-1], 15 - row)
word = word_start[::-1] + [ltr] + word_end
return word
# check the spaces on both sides of the word for letters
def get_other_words(self, start, row, end, board, played_ltrs):
global LETTER_VALUES
words = []
for square in played_ltrs:
if self.direction == 'r':
col = square['col']
else:
col = square['row']
ltr_after = self.is_letter(board, row + 1, col)
ltr_before = self.is_letter(board, row - 1, col)
if ltr_after or ltr_before:
word = self.format_word(col, row, board, square['ltr'])
words.append({'word': word, 'ltr': square['ltr'], 'ltr_indx': [row, col], 'score': 0})
return words
#check if there is a letter on the square
def is_letter(self, board, row, i):
try:
space = board[row][i].strip()
is_ltr = len(space) == 1
except IndexError:
is_ltr = False
return is_ltr
def get_played_tiles(self, board):
player_ltrs = []
squares = []
loc = self.location
if self.direction == 'r':
end = loc[1] + len(self.word) - 1
squares = board[loc[0]][loc[1]:end + 1]
else:
end = loc[0] + len(self.word) - 1
squares = board[loc[1]][loc[0]: end + 1]
for i in range(len(squares)):
if len(squares[i].strip()) == 1 and squares[i].strip() != "*":
pass
else:
if self.direction == 'r':
player_ltrs.append({'ltr': self.word[i], 'row': loc[0], 'col': loc[1] + i, 'score': 0})
else:
player_ltrs.append({'ltr': self.word[i], 'row': loc[0] + i, 'col': loc[1], 'score': 0})
return player_ltrs, squares
def get_attached_words(self):
loc = self.location
if self.direction == 'r':
stat = loc[0]
start = loc[1]
board = self.board
else:
stat = loc[1]
start = loc[0]
board = [list(i) for i in zip(*self.board)]
end = start + len(self.word) -1
if end > 14 or end < 0 or loc[0] < 0 or loc[0] > 14 or loc[1] < 0 or loc[1] > 14:
return 0 # change to invalid
played_ltrs, squares = self.get_played_tiles(board)
return self.get_other_words(start, stat, end, board, played_ltrs), squares
# calculate the score of the word being played, as well as words that are attached
def calculate_word_score(self):
self.score = 0
global LETTER_VALUES
word_score = 0
total_score = 0
word_mult = 1
board_ltrs = []
for word in self.attached_words:
row = word['ltr_indx'][0]
col = word['ltr_indx'][1]
ltr_sqr = self.board[row][col]
for ltr in word['word']:
word['score'] += LETTER_VALUES[ltr]
if ltr_sqr == "TLS":
word['score']+= LETTER_VALUES[word['ltr']] * 2
elif ltr_sqr == "DLS":
word['score'] += LETTER_VALUES[word['ltr']]
if ltr_sqr == "DWS":
word['score'] *= 2
elif ltr_sqr == "TWS":
word['score'] *= 3
total_score += word['score']
# calculate the score of the word being played
if self.location == [7,7] and self.board[7][7][1] == '*':
word_mult = 2
for i in range(len(self.word)):
if len(self.board_squares[i].strip()) == 1 and self.board_squares[i][1] != '*':
board_ltrs.append(self.board_squares[i][1])
if self.board_squares[i] == "TLS":
word_score += LETTER_VALUES[self.word[i]] * 3
elif self.board_squares[i] == "DLS":
word_score += LETTER_VALUES[self.word[i]] * 2
else:
word_score += LETTER_VALUES[self.word[i]]
if self.board_squares[i] == "DWS":
word_mult *= 2
elif self.board_squares[i] == "TWS":
word_mult *= 3
word_score *= word_mult
total_score += word_score
#fix
if len(self.word) - len(board_ltrs) == 7:
total_score += 50
self.score = total_score
return total_score
def format_output(self, rack):
moves = []
rack = rack.split(", ").copy()
row = self.location[0]
col = self.location[1]
for i in range(len(self.word)):
if self.direction == 'r':
col = self.location[1] + i
else:
row = self.location[0] + i
if len(self.board_squares[i].strip()) != 1:
rack_pos = rack.index(self.word[i])
rack[rack_pos] = ''
moves.append({'ltr': self.word[i], 'rack_pos': rack_pos, 'board_pos': [row, col]})
return moves
def add_score(self):
self.player.increase_score(self.score)
def set_word(self, word):
self.word = word
def set_location(self, location):
self.location = location
def set_direction(self, direction):
self.direction = direction
def get_word(self):
return self.word
class Game:
"""
Creates an instance of a game. Initializes the board and players
"""
def __init__(self, player_name):
self.board = Board()
self.bag = Bag()
self.round_number = 1
self.skipped_turns = 0
players = [Player(self.bag), Player(self.bag)]
players[0].set_name(player_name)
players[1].set_name("Bot")
self.players = players
self.current_player = 0
def get_word_played(self, new_board):
# if one letter found, need to figure out which direction word is going
# if multiple letters, we know the direction
old_board = self.board.board_array()
for (i, row) in enumerate(old_board):
for (j, letter) in enumerate(row):
letter = letter.strip()
if letter != new_board[i,j]:
# new letter
print("new letter \"" + letter + "\" at [" + str(i) + ", " + str(j) + "]")
#TODO: report new word
def print_game(self):
players = self.players
print("\nRound " + str(self.round_number) + ": " + players[self.current_player].get_name() + "'s turn \n")
print(self.board.get_board())
print("\nLetter Racks")
for player in players:
print(player.get_name() + ": " + player.get_rack_str())
print("\nScores")
for player in players:
print(player.get_name() + ": " + str(player.get_score()))
print("")
def get_board_data(self):
return self.board.board_array()
def bot_turn(self, player):
print('bot turn')
word_to_play = word_rank(self.players[player].get_rack_str(), self.get_board_data(), self.round_number, self.players, player)
output = word_to_play.format_output(self.players[player].get_rack_str())
self.player_turn(word_to_play.word, word_to_play.location[0], word_to_play.location[1], word_to_play.direction)
def is_ended(self):
player = self.players[self.current_player]
# TODO: logic for the second condition seems wrong
#If the number of skipped turns is less than 6 and a row, and there are either tiles in the bag, or no players have run out of tiles, play the turn.
#Otherwise, end the game.
if (self.skipped_turns < 6) or (player.rack.get_rack_length() == 0 and self.bag.get_remaining_tiles() == 0):
return False
#If the number of skipped turns is over 6 or the bag has both run out of tiles and a player is out of tiles, end the game.
else:
return True
# word [type string], col/row [type num], direction [r or d]
def player_turn(self, word_to_play, row, col, direction):
player = self.players[self.current_player]
#Code added to let BESSIE pick a word to play
location = []
if (col > 14 or col < 0) or (row > 14 or row < 0):
location = [-1, -1]
else:
location = [row, col]
word = Word(word_to_play, location, player, direction, self.board.board_array())
# return error, ask user to play different word
word_valid = word.check_word(self.round_number, self.players)
if (word_valid != True):
print('INVALID WORD')
return
#If the user has confirmed that they would like to skip their turn, skip it.
#Otherwise, plays the correct word and prints the board.
if word.get_word() == "":
print("Your turn has been skipped.")
self.skipped_turns += 1
else:
word.calculate_word_score()
word.add_score()
self.board.place_word(word_to_play, location, direction, player, self.bag)
self.skipped_turns = 0
#Gets the next player.
self.current_player += 1
self.current_player %= 2
if (self.current_player == 0):
self.round_number += 1
def end_game(self):
#Forces the game to end when the bag runs out of tiles.
highest_score = 0
winning_player = ""
for player in self.players:
if player.get_score > highest_score:
highest_score = player.get_score()
winning_player = player.get_name()
return "The game is over! " + player.get_name() + ", you have won!"
|
from flask import Flask, render_template, url_for, request
import sys
import os
import git
app = Flask(__name__)
@app.route('/', methods=['POST'])
def webhook():
if request.method == 'POST':
repo = git.Repo('https://github.com/fabrizioperuzzo/magicpy.git')
origin = repo.remotes.origin
origin.pull()
return 'Updated PythonAnywhere successfully', 200
else:
return 'Wrong event type', 400
@app.route("/")
def index():
return render_template("todaytrade.html")
|
# Generated by Django 2.2.11 on 2020-03-22 10:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('product', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Status',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default=None, max_length=24, null=True)),
('is_active', models.BooleanField(default=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': 'Статус заказа',
'verbose_name_plural': 'Статусы заказа',
},
),
migrations.CreateModel(
name='ProductInBasketModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token_key', models.CharField(blank=True, default=None, max_length=128, null=True)),
('qty', models.IntegerField(default=1, verbose_name='Кол.во')),
('size', models.CharField(blank=True, default=None, max_length=10, null=True, verbose_name='Размер')),
('price', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Цена')),
('image', models.CharField(blank=True, default=None, max_length=128, null=True, verbose_name='Фото')),
('total_price', models.DecimalField(decimal_places=2, default=0, max_digits=10, verbose_name='Итого')),
('is_active', models.BooleanField(default=True)),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Создан')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлён')),
('product', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='product.Product', verbose_name='Продукт')),
],
options={
'verbose_name': 'Товар в корзине',
'verbose_name_plural': 'Товары в корзине',
},
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('total_price', models.DecimalField(decimal_places=2, default=0, max_digits=10)),
('customer_email', models.EmailField(blank=True, default=None, max_length=254, null=True)),
('customer_surname', models.CharField(blank=True, default=None, max_length=120, null=True)),
('customer_name', models.CharField(blank=True, default=None, max_length=120, null=True)),
('customer_tel', models.CharField(blank=True, default=None, max_length=50, null=True)),
('customer_address', models.CharField(blank=True, default=None, max_length=128, null=True)),
('comments', models.TextField(blank=True, default=None, null=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('token', models.CharField(blank=True, default=None, max_length=128, null=True)),
('status', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='order.Status')),
('user', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Заказ',
'verbose_name_plural': 'Заказы',
},
),
]
|
#!/usr/bin/env
############################################
# exercise_8_ifelse.py
# Author: Paul Yang
# Date: June, 2016
# Brief: handling the exception of ValueError and FileNotFoundError
############################################
############################################
# print_file()
# open file by the filepath user input and will check if the filepath is valid
# inputs: None
# returns: None
import os
def print_file():
filename = input("輸入要開啟的檔名:")
if os.path.exists(filename):
data = open(filename, encoding="utf-8")
for line in data:
try:
(role,line_spoken) = line.split(":",maxsplit=1)
print(role,end="")
print("說: ", end="")
print(line_spoken, end="")
except:
pass
data.close()
else:
print("檔名不存在")
|
#!/usr/bin/python3
''' This module runs a query on a database to find states '''
if __name__ == "__main__":
import MySQLdb
from sys import argv
db = MySQLdb.connect(host="localhost", port=3306,
user=argv[1], passwd=argv[2], db=argv[3])
cur = db.cursor()
cur.execute("SELECT * FROM states ORDER BY id ASC")
query_db = cur.fetchall()
for row in query_db:
print(row)
cur.close()
db.close()
|
def abbreviate(words):
string_upper = words.upper()
my_string = string_upper.replace('-',' ').replace('_',' ').split()
abbreviation = []
for letters in my_string :
abbreviation.append(letters[0])
answer = ''.join(abbreviation)
return answer
|
import h5py
import mne
import numpy as np
from mne import create_info
from mne.io import RawArray
from scipy.io import loadmat
from moabb.datasets import download as dl
from moabb.datasets.base import BaseDataset
Thielen2021_URL = "https://public.data.donders.ru.nl/dcc/DSC_2018.00122_448_v3"
# The default electrode locations in the raw file are wrong. We used the ExG channels on the Biosemi with a custom 8
# channel set, according to an optimization as published in the following article:
# Ahmadi, S., Borhanazad, M., Tump, D., Farquhar, J., & Desain, P. (2019). Low channel count montages using sensor
# tying for VEP-based BCI. Journal of Neural Engineering, 16(6), 066038. DOI: https://doi.org/10.1088/1741-2552/ab4057
ELECTRODE_MAPPING = {
"AF3": "Fpz",
"F3": "T7",
"FC5": "O1",
"P7": "POz",
"P8": "Oz",
"FC6": "Iz",
"F4": "O2",
"AF4": "T8",
}
# Individual sessions of each of the 30 individual participants in the dataset
SESSIONS = (
"20181128",
"20181206",
"20181217",
"20181217",
"20181217",
"20181218",
"20181218",
"20181219",
"20181219",
"20181220",
"20181220",
"20181220",
"20190107",
"20190107",
"20190110",
"20190110",
"20190110",
"20190117",
"20190117",
"20190118",
"20190118",
"20190118",
"20190220",
"20190222",
"20190225",
"20190301",
"20190307",
"20190308",
"20190311",
"20190311",
)
# Each session consisted of 5 blocks (i.e., runs)
NR_BLOCKS = 5
# Each trial contained 15 cycles of a 2.1 second code
NR_CYCLES_PER_TRIAL = 15
# Codes were presented at a 60 Hz monitor refresh rate
PRESENTATION_RATE = 60
class Thielen2021(BaseDataset):
"""c-VEP dataset from Thielen et al. (2021)
Dataset [1]_ from the study on zero-training c-VEP [2]_.
.. admonition:: Dataset summary
============= ======= ======= ================== =============== =============== ===========
Name #Subj #Chan #Trials / class Trials length Sampling rate #Sessions
============= ======= ======= ================== =============== =============== ===========
Thielen2021 30 8 18900 NT / 18900 T 0.3s 512Hz 1
============= ======= ======= ================== =============== =============== ===========
**Dataset description**
EEG recordings were acquired at a sampling rate of 512 Hz, employing 8 Ag/AgCl electrodes. The Biosemi ActiveTwo EEG
amplifier was utilized during the experiment. The electrode array consisted of Fz, T7, O1, POz, Oz, Iz, O2, and T8,
connected as EXG channels. This is a custom electrode montage as optimized in a previous study for c-VEP, see [3]_.
During the experimental sessions, participants engaged in passive operation (i.e., without feedback) of a 4 x 5
visual speller brain-computer interface (BCI) comprising 20 distinct classes. Each cell of the symbol grid
underwent luminance modulation at full contrast, accomplished through pseudo-random noise-codes derived from a
collection of modulated Gold codes. These codes are binary, have a balanced distribution of ones and zeros, and
adhere to a limited run-length pattern (maximum run-length of 2 bits). The codes were presented at a presentation
rate of 60 Hz. As one cycle of these modulated Gold codes contains 126 bits, the duration of one cycle is 2.1
seconds.
For each of the five blocks, a trial started with a cueing phase, during which the target symbol was highlighted in
a green hue for a duration of 1 second. Following this, participants maintained their gaze fixated on the target
symbol while all symbols flashed in accordance with their respective pseudo-random noise-codes for a duration of
31.5 seconds (i.e., 15 code cycles). Each block encompassed 20 trials, presented in a randomized sequence, thereby
ensuring that each symbol was attended to once within the span of a block.
Note, here, we only load the offline data of this study and ignore the online phase.
References
----------
.. [1] Thielen, J. (Jordy), Pieter Marsman, Jason Farquhar, Desain, P.W.M. (Peter) (2023): From full calibration to
zero training for a code-modulated visual evoked potentials brain computer interface. Version 3. Radboud
University. (dataset).
DOI: https://doi.org/10.34973/9txv-z787
.. [2] Thielen, J., Marsman, P., Farquhar, J., & Desain, P. (2021). From full calibration to zero training for a
code-modulated visual evoked potentials for brain–computer interface. Journal of Neural Engineering, 18(5),
056007.
DOI: https://doi.org/10.1088/1741-2552/abecef
.. [3] Ahmadi, S., Borhanazad, M., Tump, D., Farquhar, J., & Desain, P. (2019). Low channel count montages using
sensor tying for VEP-based BCI. Journal of Neural Engineering, 16(6), 066038.
DOI: https://doi.org/10.1088/1741-2552/ab4057
Notes
-----
.. versionadded:: 0.6.0
"""
def __init__(self):
super().__init__(
subjects=list(range(1, 30 + 1)),
sessions_per_subject=1,
events={"1.0": 101, "0.0": 100},
code="Thielen2021",
interval=(0, 0.3),
paradigm="cvep",
doi="10.34973/9txv-z787",
)
def _add_stim_channel_trial(
self, raw, onsets, labels, offset=200, ch_name="stim_trial"
):
"""
Add a stimulus channel with trial onsets and their labels.
Parameters
----------
raw: mne.Raw
The raw object to add the stimulus channel to.
onsets: List | np.ndarray
The onsets of the trials in sample numbers.
labels: List | np.ndarray
The labels of the trials.
offset: int (default: 200)
The integer value to start markers with. For instance, if 200, then label 0 will be marker 200, label 1
will be be marker 201, etc.
ch_name: str (default: "stim_trial")
The name of the added stimulus channel.
Returns
-------
mne.Raw
The raw object with the added stimulus channel.
"""
stim_chan = np.zeros((1, len(raw)))
for onset, label in zip(onsets, labels):
stim_chan[0, onset] = offset + label
info = create_info(
ch_names=["stim_trial"],
ch_types=["stim"],
sfreq=raw.info["sfreq"],
verbose=False,
)
raw = raw.add_channels([RawArray(data=stim_chan, info=info, verbose=False)])
return raw
def _add_stim_channel_epoch(
self,
raw,
onsets,
labels,
codes,
presentation_rate=60,
offset=100,
ch_name="stim_epoch",
):
"""
Add a stimulus channel with epoch onsets and their labels, which are the values of the presented code for each
of the trials.
Parameters
----------
raw: mne.Raw
The raw object to add the stimulus channel to.
onsets: List | np.ndarray
The onsets of the trials in sample numbers.
labels: List | np.ndarray
The labels of the trials.
codes: np.ndarray
The codebook containing each presented code of shape (nr_bits, nr_codes), sampled at the presentation rate.
presentation_rate: int (default: 60):
The presentation rate (e.g., frame rate) at which the codes were presented in Hz.
offset: int (default: 100)
The integer value to start markers with. For instance, if 100, then label 0 will be marker 100, label 1
will be be marker 101, etc.
ch_name: str (default: "stim_epoch")
The name of the added stimulus channel.
Returns
-------
mne.Raw
The raw object with the added stimulus channel.
"""
stim_chan = np.zeros((1, len(raw)))
for onset, label in zip(onsets, labels):
idx = np.round(
onset + np.arange(codes.shape[0]) / presentation_rate * raw.info["sfreq"]
).astype("int")
stim_chan[0, idx] = offset + codes[:, label]
info = create_info(
ch_names=[ch_name],
ch_types=["stim"],
sfreq=raw.info["sfreq"],
verbose=False,
)
raw = raw.add_channels([RawArray(data=stim_chan, info=info, verbose=False)])
return raw
def _get_single_subject_data(self, subject):
"""Return the data of a single subject."""
file_path_list = self.data_path(subject)
# Codes
codes = np.tile(loadmat(file_path_list[-2])["codes"], (NR_CYCLES_PER_TRIAL, 1))
# Channels
montage = mne.channels.read_custom_montage(file_path_list[-1])
# There is only one session, each of 5 blocks (i.e., runs)
sessions = {"session_1": {}}
for i_b in range(NR_BLOCKS):
# EEG
raw = mne.io.read_raw_gdf(
file_path_list[2 * i_b],
stim_channel="status",
preload=True,
verbose=False,
)
# The default electrode locations in the raw file are wrong. We used the ExG channels on the Biosemi with a
# custom 8 channel set, according to an optimization as published in the following article:
# Ahmadi, S., Borhanazad, M., Tump, D., Farquhar, J., & Desain, P. (2019). Low channel count montages using
# sensor tying for VEP-based BCI. Journal of Neural Engineering, 16(6), 066038.
# DOI: https://doi.org/10.1088/1741-2552/ab4057
mne.rename_channels(raw.info, ELECTRODE_MAPPING)
raw.set_montage(montage)
# Labels at trial level (i.e., symbols)
trial_labels = (
np.array(h5py.File(file_path_list[2 * i_b + 1], "r")["v"])
.astype("uint8")
.flatten()
- 1
)
# Find onsets of trials
# Note, every 2.1 seconds an event was generated: 15 times per trial, plus one 16th "leaking epoch". This
# "leaking epoch" is not always present, so taking epoch[::16, :] won't work.
events = mne.find_events(raw, verbose=False)
cond = np.logical_or(
np.diff(events[:, 0]) < 1.8 * raw.info["sfreq"],
np.diff(events[:, 0]) > 2.4 * raw.info["sfreq"],
)
idx = np.concatenate(([0], 1 + np.where(cond)[0]))
trial_onsets = events[idx, 0]
# Create stim channel with trial information (i.e., symbols)
# Specifically: 200 = symbol-0, 201 = symbol-1, 202 = symbol-2, etc.
raw = self._add_stim_channel_trial(
raw, trial_onsets, trial_labels, offset=200
)
# Create stim channel with epoch information (i.e., 1 / 0, or on / off)
# Specifically: 100 = "0", 101 = "1"
raw = self._add_stim_channel_epoch(
raw, trial_onsets, trial_labels, codes, PRESENTATION_RATE, offset=100
)
# Add data as a new run
sessions["session_1"][f"run_{1 + i_b:02d}"] = raw
return sessions
def data_path(
self, subject, path=None, force_update=False, update_path=None, verbose=None
):
"""Return the data paths of a single subject."""
if subject not in self.subject_list:
raise (ValueError("Invalid subject number"))
sub = f"sub-{subject:02d}"
ses = SESSIONS[subject - 1]
subject_paths = []
for i_b in range(NR_BLOCKS):
blk = f"block_{1 + i_b:d}"
# EEG
url = f"{Thielen2021_URL:s}/sourcedata/offline/{sub}/{blk}/{sub}_{ses}_{blk}_main_eeg.gdf"
subject_paths.append(dl.data_dl(url, self.code, path, force_update, verbose))
# Labels at trial level (i.e., symbols)
url = f"{Thielen2021_URL:s}/sourcedata/offline/{sub}/{blk}/trainlabels.mat"
subject_paths.append(dl.data_dl(url, self.code, path, force_update, verbose))
# Codes
url = f"{Thielen2021_URL:s}/resources/mgold_61_6521_flip_balanced_20.mat"
subject_paths.append(dl.data_dl(url, self.code, path, force_update, verbose))
# Channel locations
url = f"{Thielen2021_URL:s}/resources/nt_cap8.loc"
subject_paths.append(dl.data_dl(url, self.code, path, force_update, verbose))
return subject_paths
|
"""empty message
Revision ID: 725ee58a391d
Revises:
Create Date: 2019-01-21 21:36:09.043513
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '725ee58a391d'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('revoked_tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('jti', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('idUser', sa.Integer(), nullable=False),
sa.Column('login', sa.String(length=80), nullable=True),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('paswd', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('idUser'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('login')
)
op.create_table('surveys',
sa.Column('idSurvey', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=True),
sa.Column('desc', sa.Text(), nullable=True),
sa.Column('idUser', sa.Integer(), nullable=True),
sa.Column('isActive', sa.Boolean(), nullable=True),
sa.Column('subCount', sa.Integer(), nullable=True),
sa.Column('dueDate', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['idUser'], ['users.idUser'], ),
sa.PrimaryKeyConstraint('idSurvey')
)
op.create_table('questions',
sa.Column('idQuestion', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.Column('idSurvey', sa.Integer(), nullable=True),
sa.Column('replyContent', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['idSurvey'], ['surveys.idSurvey'], ),
sa.PrimaryKeyConstraint('idQuestion')
)
op.create_table('replies',
sa.Column('idReply', sa.Integer(), nullable=False),
sa.Column('idQuestion', sa.Integer(), nullable=True),
sa.Column('reply', postgresql.JSON(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['idQuestion'], ['questions.idQuestion'], ),
sa.PrimaryKeyConstraint('idReply')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('replies')
op.drop_table('questions')
op.drop_table('surveys')
op.drop_table('users')
op.drop_table('revoked_tokens')
# ### end Alembic commands ###
|
import os, sys, re;
from datetime import datetime, date, time
def mkMinutesFriq(minutes, startTime, stopTime):
# (curTime-mwSleep[curMW]).seconds/60;
#print(minutes, startTime, stopTime);
minutesStart = startTime.minute;
for i in range(int((stopTime-startTime).seconds/60)+1):
#print(i+minutesStart);
if(not i+minutesStart in minutes): minutes[i+minutesStart] = 0;
minutes[i+minutesStart] += 1;
return;
data = [line.strip() for line in open("input.txt", 'r')]
#print(type(data), data.sort());
data.sort();
#print(data);
manoWar = dict();
mwSleep = dict();
curMW = '';
for l in data:
lpart =re.search(r'\[(.*)\] (.*) (.*)', l)
#print(lpart.groups().length);
#print(l, "---", lpart.groups(), lpart.group,'--', lpart[1], '--',lpart[2]);
#print(datetime.strptime(lpart[1],"%Y-%m-%d %H:%M"));
curTime = datetime.strptime(lpart[1],"%Y-%m-%d %H:%M");
if (lpart[2]=='falls'):
mwSleep[curMW] = curTime;
elif (lpart[2]=='wakes'):
#print(curTime, mwSleep[curMW], curMW, (curTime-mwSleep[curMW]) );
manoWar[num[1]] += (curTime-mwSleep[curMW]).seconds/60;
#print(manoWar[num[1]]);
mwSleep.pop(curMW, None);
else:
num = re.search(r'#(.*) ', lpart[2]);
curMW = num[1];
if (not (num[1] in manoWar)) :
manoWar[num[1]] = 0;
#print(lpart[2], num, num[1]);
print('----', manoWar, max(manoWar.values()), );
p2 = dict();
sl = list(manoWar.keys())[list(manoWar.values()).index(max(manoWar.values()))];
print("Sleeper: ",sl);
for sl in manoWar.keys():
#print(sl);
if(manoWar[sl]==0): break;
isPrint = False;
minutes = dict();
for l in data:
lpart =re.search(r'\[(.*)\] (.*) (.*)', l);
qq = lpart[2];
if (lpart[2]=='falls'):
if(isPrint): startTime = datetime.strptime(lpart[1],"%Y-%m-%d %H:%M");
elif (lpart[2]=='wakes'):
if(isPrint):
stopTime = datetime.strptime(lpart[1],"%Y-%m-%d %H:%M");
mkMinutesFriq(minutes, startTime, stopTime);
else:
num = re.search(r'#(.*) ', lpart[2]);
curMW = num[1];
if (curMW == sl):
isPrint = True;
else:
isPrint = False;
if (not (num[1] in manoWar)) :
manoWar[num[1]] = 0;
if (isPrint) :
curTime = datetime.strptime(lpart[1],"%Y-%m-%d %H:%M");
# print(l, curTime);
print("Sleeper: ",sl, minutes, max(minutes.values()));
p2[sl] = max(minutes.values());
print();
print(p2);
print("Part2 ", 179 * 30);
|
from multiprocessing import Pool, Semaphore, cpu_count
import sys
sem = Semaphore(value=3)
def recur_fibo(n):
"""Recursive function to
print Fibonacci sequence"""
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
def mycallback(param):
print(param)
if __name__ == "__main__":
print("hello world")
workers = cpu_count() - 1
results = []
with Pool(processes=workers) as pool:
for x in range(0, 25):
results.append(pool.apply_async(recur_fibo, (32,), callback=mycallback))
pool.close()
pool.join()
sum = 0
for result in results:
sum += result.get()
print(sum)
|
###############################################################################
#
# blobtrackHSV.py
#
# Python OpenCV and Python Lego NXT implementing an object tracking
# webcam mounted on a Lego pan-tilt device.
# Using HSV color space for object color detection
#
# January 9, 2018
#
###############################################################################
import numpy as np
import cv2
import opencvconst as cv
def main():
global x0, y0, x1, y1, drawing, frameHSV, mode, lower, upper
global MODE_TRACK, MODE_SHOW, MODE_MARK
#
# initialization
#
MODE_TRACK = 0 # track an object
MODE_SHOW = 1 # only show tracking markers on video
MODE_MARK = 2 # mark region color to track
lower = np.array([0,0,0], dtype="uint8")
upper = np.array([255,255,255], dtype="uint8")
mode = MODE_SHOW
mode_text = 'Show'
drawing = False # true if mouse is pressed
x0, y0 = -1, -1
x1, y1 = -1, -1
print ' m - mark color region to track\n t - track\n s - display tracking marker only\n ESC - quit'
#
# link event callback function
#
cv2.namedWindow('image', cv2.WINDOW_GUI_NORMAL+cv2.WINDOW_AUTOSIZE)
cv2.setMouseCallback('image', mark_rect)
#
# setup font for overlay text
#
font = cv2.FONT_HERSHEY_SIMPLEX
#
# kernel for dilation or erosion
#
dilate_kernel = np.ones((3, 3), np.uint8)
erode_kernel = np.ones((5, 5), np.uint8)
#
# Set up a blob detector with some parameters
#
det_param = cv2.SimpleBlobDetector_Params()
#det_param.thresholdStep = 1
#det_param.minThreshold = 0
#det_param.maxThreshold = 0
#det_param.minRepeatability = 10
#det_param.minDistBetweenBlobs = 10
det_param.filterByColor = False
det_param.filterByCircularity = False
det_param.filterByInertia = False
det_param.filterByConvexity = False
det_param.filterByArea = True
det_param.minArea = 500
det_param.maxArea = 10000
if cv2.__version__.startswith("3."):
detector = cv2.SimpleBlobDetector_create(det_param)
else:
detector = cv2.SimpleBlobDetector(det_param)
#
# open the capture device and print some
# useful properties
#
cap = cv2.VideoCapture(0)
if cap.isOpened():
#cap.set(cv.CV_CAP_PROP_FRAME_WIDTH, 320)
#cap.set(cv.CV_CAP_PROP_FRAME_HEIGHT, 240)
frameWidth = cap.get(cv.CV_CAP_PROP_FRAME_WIDTH)
frameHeight = cap.get(cv.CV_CAP_PROP_FRAME_HEIGHT)
print 'frame: width {}, height {}'.format(frameWidth, frameHeight)
frameCenterX = int(frameWidth/2)
frameCenterY = int(frameHeight/2)
#
# frame capture and processing loop
#
while(True):
#
# capture a frame
# cover to appropriate color space to improve detection
# in different lighting conditions
#
ret, frame = cap.read()
frameHSV = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
#
# operations on the frame done here
#
if mode == MODE_MARK:
cv2.line(frame, (x0, y0), (x1, y0), (0, 255, 0), 1)
cv2.line(frame, (x1, y0), (x1, y1), (0, 255, 0), 1)
cv2.line(frame, (x1, y1), (x0, y1), (0, 255, 0), 1)
cv2.line(frame, (x0, y1), (x0, y0), (0, 255, 0), 1)
else:
#
# calculate tracking and show markers on video
# create NumPy arrays from the 'upper' and lower' boundaries
# source: https://www.pyimagesearch.com/2014/08/04/opencv-python-color-detection/
# source: https://www.learnopencv.com/color-spaces-in-opencv-cpp-python/
#
mask = cv2.inRange(frameHSV, lower, upper)
#
# find blob and calculate center of mass and deviation from
# center of frame. this will be the tracking error
# source: https://www.learnopencv.com/blob-detection-using-opencv-python-c/
#
mask = cv2.erode(mask, erode_kernel, iterations = 1)
mask = cv2.dilate(mask, dilate_kernel, iterations = 1)
mask = cv2.bitwise_not(mask)
keypoints = detector.detect(mask)
#
# draw detected blobs
#
if len(keypoints) == 1:
#
# draw a vector from frame center to block center of mass
# calculate 'X' and 'Y' errors from center where
#
blobX = int(keypoints[0].pt[0])
blobY = int(keypoints[0].pt[1])
errX = frameCenterX - blobX
errY = frameCenterY - blobY
cv2.arrowedLine(frame, (frameCenterX, frameCenterY), (blobX, blobY), (0, 255, 0), 1)
cv2.circle(frame, (blobX, blobY), int(keypoints[0].size), (0, 255, 0), 1)
elif len(keypoints) == 0:
#
# red cross marker in center of frame if no
# blob were detected
#
cv2.drawMarker(frame, (frameCenterX, frameCenterY), (0, 0, 255), cv2.MARKER_TILTED_CROSS)
else:
#
# draw all detected blobs if more than one
# were detected; ambiguous detection
#
frame = cv2.drawKeypoints(frame, keypoints, np.array([]), (0, 0, 255), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
if mode == MODE_TRACK:
#
# TODO PID tracking and NXT pan-tilt control
# TODO some form of filtering on the error
#
pass
#
# add text and markers to the image
#
cv2.putText(frame, mode_text, (1, 20), font, 0.5, (0, 255, 0), 1, cv2.LINE_AA)
#
# display the resulting frame
#
cv2.imshow('mask', mask)
cv2.imshow('image', frame)
#
# key input mode/command
#
key = cv2.waitKey(1) & 0xFF
if key == 27:
break
elif key == ord('m'):
x0,y0 = -1,-1
x1,y1 = -1,-1
mode_text = 'Mark'
mode = MODE_MARK
elif key == ord('t'):
mode_text = 'Track'
mode = MODE_TRACK
elif key == ord('s'):
mode_text = 'Show'
mode = MODE_SHOW
else:
pass
#
# when done, release the capture
#
cap.release()
cv2.destroyAllWindows()
#
# mouse event callback function
#
def mark_rect(event, x, y, flags, param):
global x0, y0, x1, y1, drawing, frameHSV, mode, lower, upper
global MODE_TRACK, MODE_SHOW, MODE_MARK
if mode != MODE_MARK:
return
if event == cv2.EVENT_LBUTTONDOWN:
drawing = True
x0, y0 = x, y
x1, y1 = x, y
elif event == cv2.EVENT_MOUSEMOVE:
if drawing == True:
x1, y1 = x, y
elif event == cv2.EVENT_LBUTTONUP:
drawing = False
#
# convert any start (x0,y0) and end (X1,Y1) points to be
# a top-left to bottom right pair
# extract ROI and calculate lower and upper RGB limit tuples
#
if x0 == x1 or y0 == y1 or x0 < 0 or x1 < 0 or y0 < 0 or y1 < 0:
return
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
roi = frameHSV[y0+1:y1, x0+1:x1]
lower = np.array([np.amin(roi[:,:,0]), np.amin(roi[:,:,1]), np.amin(roi[:,:,2])], dtype="uint8")
upper = np.array([np.amax(roi[:,:,0]), np.amax(roi[:,:,1]), np.amax(roi[:,:,2])], dtype="uint8")
if __name__ == '__main__':
main()
|
import re
from PIL import Image
f = Image.open("oxygen.png")
data = [f.getpixel((x,45)) for x in range(0,f.size[0],7)] # 7 is the implication on the picture,try to get the information at the gray line in the pic
value = [r for r,g,b,a in data if r==g==b]
print "".join(map(chr,map(int,re.findall("\d+","".join(map(chr,value)))))) |
#!/usr/bin/env python
# coding=utf-8
import numpy as np
def base_average_deviation(arry):
"""
Compute the average deviation
The formal parameter is a one dimensional list.
"""
arry.sort()
if len(arry)%2==1:
res = abs(np.array(arry)-arry[len(arry)/2])
else:
res = abs(np.array(arry)-(arry[len(arry)/2]+arry[len(arry)/2-1])/2.0)
result = 1.0*res.sum()/len(arry)
return round(result,2)
def base_standard_deviation(arry):
"""
Compute the standard deviation
The formal parameter is a one dimensional list.
"""
result = np.std(arry)
return round(result,2)
def base_variance(arry):
"""
Compute the variance
The formal parameter is a one dimensional list.
"""
result = np.var(arry)
return round(result,2)
def base_variation_coefficient(arry):
"""
Compute the coefficient of variation
The formal parameter is a one dimensional list.
"""
result = 1.0*np.std(arry)/np.mean(arry)
return '%.2f%%'%(result*100)
def base_interquartile_range(arry):
"""
Compute the interquartile range
The formal parameter is a one dimensional list.
"""
arry.sort()
result = (np.percentile(arry, 75)-np.percentile(arry, 25))/2.0
return round(result,2)
def base_percentile_range(arry):
"""
Compute the percentile range
The formal parameter is a one dimensional list.
"""
result = np.percentile(arry, 90)-np.percentile(arry, 10)
return result
|
class Solution(object):
def convert(self, s, numRows):
"""
:type s: str
:type numRows: int
:rtype: str
"""
if numRows == 1:
return s
newRow = ""
gap1 = (numRows-1)*2
gap2 = 0
for i in range(0,numRows):
position = i
k = 1
while position < len(s):
if k%2 == 1 and gap1 != 0:
newRow = newRow + s[position]
position = position + gap1
elif k%2 == 0 and gap2 !=0 :
newRow = newRow + s[position]
position = position + gap2
else:
pass
k = k+1
gap1 = gap1-2
gap2 = gap2+2
return newRow
if __name__ == '__main__':
print Solution().convert("dlkfjdlsjfldsjfldsjkfldsjkflkdsj",3) |
from django.apps import AppConfig
class SamlIdpConfig(AppConfig):
name = 'samlidp'
label = 'saml2 idp'
verbose_name = 'saml2 idp'
|
#!/usr/bin/python
import mplayer
import toolbelt
#import smartlog
import mplayer
import code
class HashtagPlayer(mplayer.Player, toolbelt.interpreter.Interpreter):
log = smartlog.Smartlog();
cursor = toolbelt.coordinates.Cursor();
playlist = [];
def __init__(self):
args = "-quiet -ao alsa:device=default"
super().__init__(args);
self.ht = hashtag.HashTagger();
self.cursor.xwrap = True;
self.commands.update({
'help' : {
'func' : self.help,
'args' : None,
'opts' : { 'help' : 'print this info' }
},
'quit' : {
'func' : self.quit,
'args' : None,
'opts' : {
'log' : 'Quitting program',
'help' : 'quit program'
}
}
})
def rate(self, rating):
self.ht.tagwith(
self.current_path(),
"rating=%d" % rating
);
def tagwith(self, args):
self.ht.tagwith(
self.current_path(),
self.ac.input
);
def single(self, l):
if isinstance(l, list):
if len(l) > 0:
return l[0];
else: return l;
def load(self, playlist):
if isinstance(playlist, str):
playlist = [playlist];
self.playlist = playlist;
self.cursor.xmax = len(playlist)-1;
self.cursor.x = 0;
def prev(self):
self.cursor.left();
self.play(pauseplay=False);
def next(self):
self.cursor.right();
self.play(pauseplay=False);
def current_path(self):
return self.playlist[
self.cursor.x
];
def play(self, pauseplay=True):
if pauseplay: self.pause();
else: self.loadfile(
self.current_path()
);
if __name__ == "__main__":
htp = HashtagPlayer();
htp.load(sys.argv[1:]);
htp.run();
|
# -*- coding: utf-8 -*-
import time
import venusian
from os import path
from irc3 import base
from irc3 import utils
from irc3 import config
from irc3.compat import asyncio
from irc3.compat import Queue
from collections import defaultdict
from .plugins.command import command
from .dec import plugin
from .dec import extend
from .dec import event
class IrcClient(asyncio.Protocol):
"""asyncio protocol to handle an irc connection"""
def connection_made(self, transport):
self.closed = False
self.transport = transport
self.queue = Queue()
self.uuid = self.transport.get_extra_info('peername')
self.factory.clients[self.uuid] = self
self.encoding = self.factory.encoding
self.data = {}
self.modes = set()
self.channels = set()
t = time.time()
self.data.update(
uuid=self.uuid, host=self.uuid[0],
connection_made=t, data_received=t,
srv=self.factory.config.servername,
version=self.factory.config.version)
def __getattr__(self, attr):
return self.data[attr]
def get_nick(self):
return self.data.get('nick')
def set_nick(self, nick):
self.data['nick'] = nick
self.data['mask'] = utils.IrcString(
'{nick}!{username}@{uuid[0]}'.format(**self.data))
nick = property(get_nick, set_nick)
@property
def registered(self):
return bool('nick' in self.data and 'username' in self.data)
def decode(self, data): # pragma: no cover
"""Decode data with bot's encoding"""
encoding = getattr(self, 'encoding', 'ascii')
return data.decode(encoding, 'ignore')
def data_received(self, data): # pragma: no cover
self.data['data_received'] = time.time()
data = self.decode(data)
if not self.queue.empty():
data = self.queue.get_nowait() + data
lines = data.split('\r\n')
self.queue.put_nowait(lines.pop(-1))
for line in lines:
self.factory.dispatch(line, client=self)
def fwrite(self, messages, **kwargs):
kwargs['c'] = self
if not isinstance(messages, (list, tuple)):
fmt = getattr(messages, 'tpl', messages)
else:
fmt = '\r\n'.join([getattr(m, 'tpl', m) for m in messages])
self.write(fmt.format(**kwargs))
def write(self, data):
if data is not None:
self.factory.dispatch(data, client=self, iotype='out')
data = data.encode(self.encoding)
if not data.endswith(b'\r\n'):
data = data + b'\r\n'
self.transport.write(data)
def connection_lost(self, exc):
self.factory.log.critical('connection lost (%s): %r',
id(self.transport),
exc)
self.factory.notify('connection_lost', client=self)
del self.factory.clients[self.uuid]
if not self.closed:
self.closed = True
self.close()
def close(self): # pragma: no cover
if not self.closed:
self.factory.log.critical('closing old transport (%r)',
id(self.transport))
try:
self.transport.close()
finally:
self.closed = True
def __str__(self):
if 'nick' in self.data:
return '{nick}'.format(**self.data)
else:
return 'unknown'
__repr__ = __str__
class IrcServer(base.IrcObject):
"""An irc server"""
nick = None
server = True
plugin_category = '__irc3_plugin__'
_pep8 = [config, extend, plugin, event, command]
venusian = venusian
venusian_categories = [
'irc3d',
'irc3d.extend',
'irc3d.rfc1459',
'irc3d.plugins.command',
]
server_config = {
'NETWORK': 'freenode', 'MODES': '1', 'DEAF': 'D', 'SAFELIST': True,
'CHANTYPES': '#', 'TARGMAX':
'NAMES:1,LIST:1,KICK:1,WHOIS:1,PRIVMSG:1,NOTICE:1,ACCEPT:,MONITOR:',
'CNOTICE': True, 'TOPICLEN': '390', 'EXTBAN': '$,ajrxz',
'CALLERID': 'g', 'ETRACE': True, 'CHANLIMIT': '#:120', 'CHARSET':
'ascii', 'PREFIX': '(ov)@+', 'INVEX': True, 'NICKLEN': '16',
'CLIENTVER': '3.0', 'CPRIVMSG': True, 'CHANMODES':
'', 'MAXLIST': 'bqeI:100', 'KNOCK':
True, 'EXCEPTS': True, 'CHANNELLEN': '50', 'CASEMAPPING':
'rfc1459', 'FNC': True, 'STATUSMSG': '@+', 'ELIST': 'CTU', 'WHOX':
True, 'MONITOR': '100'}
defaults = dict(
base.IrcObject.defaults,
motd=path.join(path.dirname(__file__), 'motd.txt'),
cmd='',
host='0.0.0.0',
port=6667,
connection=IrcClient,
server_config=server_config,
servername='localhost',
)
def __init__(self, *args, **kwargs):
self.clients = defaultdict(dict)
super(IrcServer, self).__init__(*args, **kwargs)
def connection_made(self, f): # pragma: no cover
if getattr(self, 'protocol', None):
self.protocol.close()
try:
f.result()
except Exception as e:
self.log.exception(e)
self.loop.call_later(3, self.create_connection)
else:
self.log.info('Started')
def notice(self, client, message):
"""send a notice to client"""
if client and message:
messages = utils.split_message(message, self.config.max_length)
for msg in messages:
client.fwrite(':{c.srv} NOTICE {c.nick} :{msg}', msg=msg)
privmsg = notice
def SIGHUP(self, *args): # pragma: no cover
self.loop.stop()
def SIGINT(self, *args): # pragma: no cover
self.loop.stop()
def run(argv=None):
return IrcServer.from_argv(argv)
|
from django import db
from django.shortcuts import render
from django.shortcuts import redirect
from django.http import HttpResponse
from .models import *
from django.contrib import messages
from django.utils import timezone
import datetime
import json
from json import dumps
import pyrebase
# Create your views here.
config = {
"apiKey": "AIzaSyCz1w7z4RklhcvWlXMq0DOI63C7uT6FdkQ",
"authDomain": "sharp-technique-304506.firebaseapp.com",
"databaseURL": "https://sharp-technique-304506-default-rtdb.firebaseio.com/",
"projectId": "sharp-technique-304506",
"storageBucket": "sharp-technique-304506.appspot.com",
"messagingSenderId": "618699052643",
"appId": "1:618699052643:web:d7b54177c1ba464bad0a56"
}
firebase = pyrebase.initialize_app(config)
authe = firebase.auth()
database = firebase.database()
def index(request):
if (request.GET.get('name')):
return render(request, 'pages/register.html')
if (request.POST.get('feedback')):
rating = request.POST.get('rating')
if (rating == None):
rating = 0
comment = request.POST.get('comment')
if (comment == ""):
comment = "NONE"
time = datetime.datetime.now()
currentDanhGia = DanhGia(Rate=rating, Comment=comment, DateTime=time)
currentDanhGia.save()
rateComment = str(request.POST.get('rating')) + " " + str(request.POST.get('comment'))
return render(request, 'pages/index.html')
def login(request):
displaytaikhoandoanhnghiep = TaiKhoanDoanhNghiep.objects.all()
displaydoanhnghiep = DoanhNghiep.objects.all()
displaydncurrent = DoanhNghiepCurrent.objects.first()
displaydncurrent.TenDoanhNghiep = "NONE"
displaydncurrent.SoDienThoai = "NONE"
displaydncurrent.Email = "NONE"
displaydncurrent.MoTaDoanhNghiep = "NONE"
displaydncurrent.CoCauDoanhNghiep = "NONE"
displaydncurrent.QuyMoDoanhNghiep = "NONE"
displaydncurrent.NganhNgheKinhDoanh = "NONE"
displaydncurrent.DiaChiTruSoChinh = "NONE"
displaydncurrent.save()
if(request.POST.get('username')):
username = request.POST.get('username')
password = request.POST.get('password')
for resulttaikhoandoanhnghiep in displaytaikhoandoanhnghiep:
if (resulttaikhoandoanhnghiep.TenTaiKhoan == username and resulttaikhoandoanhnghiep.MatKhau == password):
for resultdn in displaydoanhnghiep:
if (resultdn.MaDoanhNghiep == resulttaikhoandoanhnghiep.MaTaiKhoan):
displaydncurrent.TenDoanhNghiep = resultdn.TenDoanhNghiep
displaydncurrent.SoDienThoai = resultdn.SoDienThoai
displaydncurrent.Email = resultdn.Email
displaydncurrent.MoTaDoanhNghiep = resultdn.MoTaDoanhNghiep
displaydncurrent.CoCauDoanhNghiep = resultdn.CoCauDoanhNghiep
displaydncurrent.QuyMoDoanhNghiep = resultdn.QuyMoDoanhNghiep
displaydncurrent.NganhNgheKinhDoanh = resultdn.NganhNgheKinhDoanh
displaydncurrent.DiaChiTruSoChinh = resultdn.DiaChiTruSoChinh
displaydncurrent.save()
# return redirect('/account/')
# return HttpResponse(username)
return render(request, 'pages/account.html')
# else:
# return HttpResponse("Sai")
return render(request, 'pages/login.html')
def register(request):
return render(request, 'pages/register.html')
def transport(request):
displaydncurrent = DoanhNghiepCurrent.objects.first()
return render(request, 'pages/transport.html', {"doanhnghiep":displaydncurrent})
def test(request):
displayttvc = ThongTinVanChuyen.objects.all()
displaydonhang = DonHang.objects.all()
displaydoitac = DoiTac.objects.all()
FBThongTinVanChuyen = database.child('Data').child('ThongTinVanChuyen').get().val()
dh = "DH2"
for key, value in FBThongTinVanChuyen.items():
# newTTVC_MTTVC = key
# newTTVC_DH = value['DH']
# newTTVC_ThoiGian = value['ThoiGian']
# newTTVC_X = value['X']
# newTTVC_Y = value['Y']
# newTTVC_TrangThai = value['TrangThai']
newTTVC = ThongTinVanChuyen(key,
value['DH'],
value['ThoiGian'],
20.9984205,
105.8170838,
value['TrangThai'])
newTTVC.save()
return render(request,"pages/test.html", {"dh":dh})
def viewHistory(request):
displayttvc = ThongTinVanChuyen.objects.all()
displaydonhang = DonHang.objects.all()
displaydoitac = DoiTac.objects.all()
FBThongTinVanChuyen = database.child('Data').child('ThongTinVanChuyen').get().val()
speed = "500"
if(request.POST.get('speed')):
DHHistory = request.POST.get('speed')
diadiemnhanhang = ""
diadiemgiaohang = ""
if(request.POST.get('DH')):
DHHistory = request.POST.get('DH')
for resultDH in displaydonhang:
if (resultDH.MaDonHang == DHHistory):
diadiemnhanhang = resultDH.DiaDiemNhanHang
diadiemgiaohang = resultDH.DiaDiemGiaoHang
return render(request,"pages/viewHistory.html", {"DHHistory":DHHistory, "speed":speed,
"diadiemnhanhang":diadiemnhanhang, "diadiemgiaohang":diadiemgiaohang})
return HttpResponse(DHHistory)
def map(request):
displayttvc = ThongTinVanChuyen.objects.all()
displaytaikhoandoanhnghiep = TaiKhoanDoanhNghiep.objects.all()
displaydonhang = DonHang.objects.all()
displaycurrentuser = DoanhNghiepCurrent.objects.first()
dh = ""
diadiemnhanhang = ""
diadiemgiaohang = ""
hanghoa = ""
nhanhang = ""
giaohang = ""
trangthai = ""
DonHang.objects.all().delete()
FBDonHang = database.child('Data').child('DonHang').get().val()
for key, value in FBDonHang.items():
newDH = DonHang()
newDH.MaDonHang = value['IDDonHang']
for resulttkdn in displaytaikhoandoanhnghiep:
if (resulttkdn.MaDoanhNghiep.TenDoanhNghiep == value['IDKhachHang']):
newDH.IDKhachHang = resulttkdn.MaDoanhNghiep
newDH.IDDoiTac = value['IDDoiTac']
newDH.LoaiHangHoa = value['LoaiHangHoa']
newDH.TrongTai = value['TrongTai']
newDH.DiaDiemNhanHang = value['DiaDiemNhanHang']
newDH.NgayNhanHang = datetime.datetime.strptime(value['NgayNhanHang'], "%Y-%m-%d").strftime("%Y-%m-%d")
newDH.GioNhanHang = datetime.datetime.strptime(value['GioNhanHang'], "%H:%M:%S").strftime("%H:%M:%S")
newDH.DiaDiemGiaoHang = value['DiaDiemGiaoHang']
newDH.NgayGiaoHang = datetime.datetime.strptime(value['NgayGiaoHang'], "%Y-%m-%d").strftime("%Y-%m-%d")
newDH.GioGiaoHang = datetime.datetime.strptime(value['GioGiaoHang'], "%H:%M:%S").strftime("%H:%M:%S")
newDH.ChuY = value['ChuY']
newDH.TrangThai = value['TrangThai']
newDH.TrangThaiChiTiet = value['TrangThaiChiTiet']
hanghoa = newDH.LoaiHangHoa
nhanhang = newDH.DiaDiemNhanHang
nhanhangtime = newDH.NgayNhanHang + " " + newDH.GioNhanHang
giaohang = newDH.DiaDiemGiaoHang
giaohangtime = newDH.NgayGiaoHang + " " + newDH.GioGiaoHang
trangthai = newDH.TrangThaiChiTiet
newDH.save()
for resultDH in displaydonhang:
if ((resultDH.IDKhachHang.TenDoanhNghiep == displaycurrentuser.TenDoanhNghiep)
and (resultDH.TrangThai == "Đang Giao")):
diadiemnhanhang = resultDH.DiaDiemNhanHang
diadiemgiaohang = resultDH.DiaDiemGiaoHang
dh = resultDH.MaDonHang
strAdd1 = "NONE"
if(request.GET.get('select')):
strSelected = request.GET.get('select')
return HttpResponse(strSelected)
return render(request, 'pages/map.html', {"strAdd1":strAdd1, "dh":dh,
"diadiemnhanhang":diadiemnhanhang, "diadiemgiaohang":diadiemgiaohang,
"hanghoa":hanghoa, "nhanhang":nhanhang, "nhanhangtime":nhanhangtime,
"giaohang":giaohang, "giaohangtime":giaohangtime,
"trangthai":trangthai})
def directionConfirm(request):
displaydonhang = DonHang.objects.all()
addDonHang = DonHangMomentary.objects.latest()
displaytaikhoandoanhnghiep = TaiKhoanDoanhNghiep.objects.all()
displaydncurrent = DoanhNghiepCurrent.objects.first()
if (request.POST.get('loaihanghoa')):
madonhang = "DH" + str(displaydonhang.count()+1)
loaihanghoa = request.POST.get('loaihanghoa')
trongtai = request.POST.get('trongtai')
diadiemnhanhang = request.POST.get('diadiemnhanhang')
ngaynhanhang = request.POST.get('date1')
gionhanhang = request.POST.get('time1')
# thoigiannhanhang = "NONE"
diadiemgiaohang = request.POST.get('diadiemgiaohang')
ngaygiaohang = request.POST.get('date2')
giogiaohang = request.POST.get('time2')
# thoigiangiaohang = "NONE"
chuy = request.POST.get('yeucaukhac')
tkkhachhang = request.POST.get('order')
# doanhnghiepduocchon = DoanhNghiep()
addDonHang.MaDonHang = madonhang
for resulttkdn in displaytaikhoandoanhnghiep:
if (resulttkdn.MaDoanhNghiep.TenDoanhNghiep == displaydncurrent.TenDoanhNghiep):
addDonHang.IDKhachHang = resulttkdn.MaDoanhNghiep
addDonHang.IDDoiTac = "NONE"
addDonHang.LoaiHangHoa = loaihanghoa
addDonHang.TrongTai = trongtai
addDonHang.DiaDiemNhanHang = diadiemnhanhang
addDonHang.NgayNhanHang = ngaynhanhang
addDonHang.GioNhanHang = gionhanhang
addDonHang.DiaDiemGiaoHang = diadiemgiaohang
addDonHang.NgayGiaoHang = ngaygiaohang
addDonHang.GioGiaoHang = giogiaohang
addDonHang.ChuY = chuy
addDonHang.TrangThai = "Chưa Giao"
addDonHang.TrangThaiChiTiet = "Nhận đơn"
addDonHang.save()
displaydonhang = DonHang.objects.all().order_by("MaDonHang")
messages.success(request, "Lên đơn thành công")
return render(request, 'pages/direction_confirm.html', {"diadiemnhanhang":diadiemnhanhang, "diadiemgiaohang":diadiemgiaohang,
"madonhang":madonhang, "loaihanghoa":loaihanghoa, "trongtai":trongtai,
"ngaynhanhang":ngaynhanhang, "gionhanhang":gionhanhang, "ngaygiaohang":ngaygiaohang,
"giogiaohang":giogiaohang, "chuy":chuy, "tkkhachhang":tkkhachhang,
"donhangadd":addDonHang, "doanhnghiep":displaydncurrent})
return render(request, 'pages/direction_confirm.html')
def account(request):
displaytaikhoandoanhnghiep = TaiKhoanDoanhNghiep.objects.all()
displaydoanhnghiep = DoanhNghiep.objects.all()
displaydonhang = DonHang.objects.all()
intSoChuyenHang = 0
if(request.POST.get('username')):
username = request.POST.get('username')
password = request.POST.get('password')
for resulttaikhoandoanhnghiep in displaytaikhoandoanhnghiep:
if (resulttaikhoandoanhnghiep.TenTaiKhoan == username and
resulttaikhoandoanhnghiep.MatKhau == password):
# return redirect('/account/')
for resultdonhang in displaydonhang:
if (resultdonhang.IDKhachHang == resulttaikhoandoanhnghiep.MaDoanhNghiep):
intSoChuyenHang = intSoChuyenHang + 1
return render(request, 'pages/account.html', {"doanhnghiep":displaydoanhnghiep,
"taikhoan":resulttaikhoandoanhnghiep, "tkduocchon":username, "soChuyenHang":intSoChuyenHang})
if(request.POST.get('back')):
back = request.POST.get('back')
for resulttaikhoandoanhnghiep in displaytaikhoandoanhnghiep:
if (resulttaikhoandoanhnghiep.TenTaiKhoan == back):
for resultdonhang in displaydonhang:
if (resultdonhang.IDKhachHang == resulttaikhoandoanhnghiep.MaDoanhNghiep):
intSoChuyenHang = intSoChuyenHang + 1
return render(request, 'pages/account.html', {"doanhnghiep":displaydoanhnghiep,
"taikhoan":resulttaikhoandoanhnghiep, "tkduocchon":back, "soChuyenHang":intSoChuyenHang})
return render(request, 'pages/account.html')
def history(request):
displaytaikhoandoanhnghiep = TaiKhoanDoanhNghiep.objects.all()
displaydoanhnghiep = DoanhNghiep.objects.all()
displaydonhang = DonHang.objects.all().order_by("MaDonHang")
displaydncurrent = DoanhNghiepCurrent.objects.first()
addDonHangMomentary = DonHangMomentary.objects.latest()
addDonHang = DonHang.objects.latest()
# if (request.POST.get('history')):
# username = request.POST.get('history')
# for resulttaikhoandoanhnghiep in displaytaikhoandoanhnghiep:
# if (resulttaikhoandoanhnghiep.TenTaiKhoan == username):
# tkkhachhang = resulttaikhoandoanhnghiep.MaDoanhNghiep
# displaydonhang = DonHang.objects.all().order_by("MaDonHang")
# return render(request, 'pages/history.html', {"donhang":displaydonhang,
# "doanhnghiepduocchon":tkkhachhang, "tkduocchon":username,
# "doanhnghiep":displaydncurrent})
if (request.POST.get('history_new')):
addDonHang.MaDonHang = addDonHangMomentary.MaDonHang
addDonHang.IDKhachHang = addDonHangMomentary.IDKhachHang
addDonHang.IDDoiTac = addDonHangMomentary.IDDoiTac
addDonHang.LoaiHangHoa = addDonHangMomentary.LoaiHangHoa
addDonHang.TrongTai = addDonHangMomentary.TrongTai
addDonHang.DiaDiemNhanHang = addDonHangMomentary.DiaDiemNhanHang
addDonHang.NgayNhanHang = addDonHangMomentary.NgayNhanHang
addDonHang.GioNhanHang = addDonHangMomentary.GioNhanHang
addDonHang.DiaDiemGiaoHang = addDonHangMomentary.DiaDiemGiaoHang
addDonHang.NgayGiaoHang = addDonHangMomentary.NgayGiaoHang
addDonHang.GioGiaoHang = addDonHangMomentary.GioGiaoHang
addDonHang.ChuY = addDonHangMomentary.ChuY
addDonHang.TrangThai = addDonHangMomentary.TrangThai
addDonHang.save()
###############################################
data = dict()
data['IDDonHang'] = str(addDonHang.MaDonHang)
data['IDKhachHang'] = str(addDonHang.IDKhachHang)
data['IDDoiTac'] = str(addDonHang.IDDoiTac)
data['LoaiHangHoa'] = str(addDonHang.LoaiHangHoa)
data['TrongTai'] = str(addDonHang.TrongTai)
data['DiaDiemNhanHang'] = str(addDonHang.DiaDiemNhanHang)
data['NgayNhanHang'] = str(addDonHang.NgayNhanHang)
data['GioNhanHang'] = str(addDonHang.GioNhanHang)
data['DiaDiemGiaoHang'] = str(addDonHang.DiaDiemGiaoHang)
data['NgayGiaoHang'] = str(addDonHang.NgayGiaoHang)
data['GioGiaoHang'] = str(addDonHang.GioGiaoHang)
data['ChuY'] = str(addDonHang.ChuY)
data['TrangThai'] = str(addDonHang.TrangThai)
data['TrangThaiChiTiet'] = str(addDonHang.TrangThaiChiTiet)
database.child("Data").child("DonHang").child(addDonHang.MaDonHang).set(data)
username = request.POST.get('history_new')
for resulttaikhoandoanhnghiep in displaytaikhoandoanhnghiep:
if (resulttaikhoandoanhnghiep.TenTaiKhoan == username):
tkkhachhang = resulttaikhoandoanhnghiep.MaDoanhNghiep
displaydonhang = DonHang.objects.all().order_by("MaDonHang")
messages.success(request, "Lên đơn thành công")
return render(request, 'pages/history.html', {"donhang":displaydonhang,
"doanhnghiepduocchon":tkkhachhang, "donhangadd":addDonHang, "tkduocchon":username,
"doanhnghiep":displaydncurrent})
# if (request.GET.get('back')):
# username = request.GET.get('back')
# for resulttaikhoandoanhnghiep in displaytaikhoandoanhnghiep:
# if (resulttaikhoandoanhnghiep.TenTaiKhoan == username):
# # return redirect('/account/')
# return render(request, 'pages/account.html', {"doanhnghiep":displaydoanhnghiep,"taikhoan":resulttaikhoandoanhnghiep, "tkduocchon":username})
# if (request.GET.get('loaihanghoa')):
# displaydonhang = DonHang.objects.all()
# madonhang = "DH" + str(displaydonhang.count()+1)
# loaihanghoa = request.GET.get('loaihanghoa')
# trongtai = request.GET.get('trongtai')
# diadiemnhanhang = request.GET.get('diadiemnhanhang')
# ngaynhanhang = request.GET.get('date1')
# gionhanhang = request.GET.get('time1')
# # thoigiannhanhang = "NONE"
# diadiemgiaohang = request.GET.get('diadiemgiaohang')
# ngaygiaohang = request.GET.get('date2')
# giogiaohang = request.GET.get('time2')
# # thoigiangiaohang = "NONE"
# chuy = request.GET.get('yeucaukhac')
# tkkhachhang = request.GET.get('order')
# # doanhnghiepduocchon = DoanhNghiep()
# for resulttkdn in displaytaikhoandoanhnghiep:
# if (resulttkdn.TenTaiKhoan == tkkhachhang):
# addDonHang = DonHang(MaDonHang = madonhang,
# IDKhachHang = resulttkdn.MaDoanhNghiep,
# LoaiHangHoa = loaihanghoa,
# TrongTai = trongtai,
# DiaDiemNhanHang = diadiemnhanhang,
# # ThoiGianNhanHang = thoigiannhanhang,
# NgayNhanHang = ngaynhanhang,
# GioNhanHang = gionhanhang,
# DiaDiemGiaoHang = diadiemgiaohang,
# NgayGiaoHang = ngaygiaohang,
# GioGiaoHang = giogiaohang,
# # ThoiGianGiaoHang = thoigiangiaohang,
# ChuY = chuy,
# TrangThai = "Chưa Giao")
# addDonHang.save()
# displaydonhang = DonHang.objects.all().order_by("MaDonHang")
# tkkhachhang = resulttkdn.MaDoanhNghiep
# # thoigiannhanhang = "[" + str(gionhanhang) + "] [" + str(ngaynhanhang) + "]"
# # thoigiangiaohang = "[" + str(giogiaohang) + "] [" + str(ngaygiaohang) + "]"
# messages.success(request, "Lên đơn thành công")
# return render(request, 'pages/history.html', {"donhang":displaydonhang, "doanhnghiepduocchon":tkkhachhang,
# "donhangadd":addDonHang, "tkduocchon":request.GET.get('order'),
# "doanhnghiep":displaydncurrent})
return render(request, 'pages/history.html', {"doanhnghiep":displaydncurrent, "donhang":displaydonhang,
"donhangadd":addDonHang})
def profile(request):
displaytaikhoandoanhnghiep = TaiKhoanDoanhNghiep.objects.all()
displaydoanhnghiep = DoanhNghiep.objects.all()
displaydncurrent = DoanhNghiepCurrent.objects.first()
# displaydncurrent.TenDoanhNghiep = "NONE"
# displaydncurrent.SoDienThoai = "NONE"
# displaydncurrent.Email = "NONE"
# displaydncurrent.MoTaDoanhNghiep = "NONE"
# displaydncurrent.CoCauDoanhNghiep = "NONE"
# displaydncurrent.QuyMoDoanhNghiep = "NONE"
# displaydncurrent.NganhNgheKinhDoanh = "NONE"
# displaydncurrent.DiaChiTruSoChinh = "NONE"
# displaydncurrent.save()
if(request.POST.get('username')):
username = request.POST.get('username')
password = request.POST.get('password')
for resulttaikhoandoanhnghiep in displaytaikhoandoanhnghiep:
if (resulttaikhoandoanhnghiep.TenTaiKhoan == username and resulttaikhoandoanhnghiep.MatKhau == password):
for resultdn in displaydoanhnghiep:
if (resultdn.MaDoanhNghiep == resulttaikhoandoanhnghiep.MaTaiKhoan):
displaydncurrent.TenDoanhNghiep = resultdn.TenDoanhNghiep
displaydncurrent.SoDienThoai = resultdn.SoDienThoai
displaydncurrent.Email = resultdn.Email
displaydncurrent.MoTaDoanhNghiep = resultdn.MoTaDoanhNghiep
displaydncurrent.CoCauDoanhNghiep = resultdn.CoCauDoanhNghiep
displaydncurrent.QuyMoDoanhNghiep = resultdn.QuyMoDoanhNghiep
displaydncurrent.NganhNgheKinhDoanh = resultdn.NganhNgheKinhDoanh
displaydncurrent.DiaChiTruSoChinh = resultdn.DiaChiTruSoChinh
displaydncurrent.save()
displaydncurrent = DoanhNghiepCurrent.objects.first()
return render(request, 'pages/profile.html', {"doanhnghiep":displaydncurrent}) |
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 13 20:56:56 2017
@author: hina
"""
import networkx
asin = '0805047905'
# read the copurchase graph
fhr=open("amazon-books-copurchase.edgelist", 'rb')
copurchaseGraph=networkx.read_weighted_edgelist(fhr)
fhr.close()
# get degree centrality of given asin
dcl = networkx.degree(copurchaseGraph)
dc = dcl[asin]
print ("Degree Centrality:", dc)
# get ego network of given asin at depth 1
ego = networkx.ego_graph(copurchaseGraph, asin, radius=1)
print ("Ego Network:",
"Nodes =", ego.number_of_nodes(),
"Edges =", ego.number_of_edges())
# get clustering coefficient of given asin
cc = networkx.average_clustering(ego)
print ("Clustering Coefficient:", round(cc,2))
# get one-hop neighbors of given asin
ngbs = ego.neighbors(asin)
print ("Number of one-hop neighbors:", len(ngbs))
#print (ngbs)
# use island method on ego network
threshold = 0.90
egotrim = networkx.Graph()
for n1, n2, e in ego.edges(data=True):
if e['weight'] >= threshold:
egotrim.add_edge(n1,n2,e)
print ("Trimmed Ego Network:",
"Threshold=", threshold,
"Nodes =", egotrim.number_of_nodes(),
"Edges =", egotrim.number_of_edges())
|
from datetime import datetime
from app.models.connect import db
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.String(36), primary_key=True)
name = db.Column(db.String(100))
create_time = db.Column(db.DateTime, default=datetime.now())
last_update_time = db.Column(db.DateTime, onupdate=datetime.now(), default=datetime.now())
status = db.Column(db.String(10), nullable=False)
def __init__(self):
self.id = ""
self.name = ""
self.status = ""
|
#!/usr/bin/env python
import sys
import os
from libs.models.math import MainMatrix
from libs.io.file_rw import FileManager
def main():
# Basic input checks
if len(sys.argv) != 2:
sys.exit(f"Wrong program invocation "
f"`{' '.join(map(os.path.basename, (sys.argv)))}`\n"
f"Please call the program as `interpolate <path_to_file>`")
if not os.path.exists(sys.argv[1]):
sys.exit(f"Wrong file path/name: {sys.argv[1]}\n"
f"Please check and re-run the program")
# Read input data
io_manager = FileManager(path_to_file=sys.argv[1])
in_matrix = io_manager.read_matrix_from_file()
# Find nans
matrix = MainMatrix(data_in=in_matrix)
nans = matrix.find_nans()
# Get nearest neighbors for each nan
# and replace it with their average
for nan in nans:
for coords in nan.pairs:
if coords:
nan.nearest_neighbors.append(matrix.get_element(*coords))
nan.set_nn_mean_value()
matrix.set_element(i=nan.i, j=nan.j, value=nan.nn_mean_value)
# write output to file
out_filename = "interpolated_test_data.csv"
io_manager.write_matrix_to_file(
matrix=matrix.matrix, path_to_file=out_filename)
print(f"Successfully created file {out_filename} under {os.getcwd()}")
if __name__ == "__main__":
sys.exit(main())
|
n=3
for i in range(n):
print("*"*(i+1))
for i in range(n):
print("*"*(n-i))
# def removerandsplit(string, word):
# new = string.replace(word, "")
# return new.strip()
# string = "raghav isgood football player"
# r = removerandsplit(string, "player")
# print(r)
def rmvsplt(str, word):
new = str.replace(word, "")
return new.strip()
str = "raghav isgood football player"
k = rmvsplt(str, "raghav")
print(k)
|
#Author - Ajay Amarnath
#Email - amarnathajay@gmail.com
import matplotlib.pylab as plotter
from mpl_toolkits.axes_grid1 import host_subplot
import matplotlib.animation as animation
fig = plotter.figure()
fig.suptitle("Data example for a panning graph", fontsize = 12)
animatedGraph = fig.add_subplot(1, 1, 1)
animatedGraph.set_title('Magnitude vs heading')
animatedGraph.set_xlim(0, 20)
animatedGraph.set_ylim(0, 10)
animatedGraph.grid(True)
animatedGraph.set_xlabel("Hdng (deg)")
animatedGraph.set_ylabel("Magnitude")
xmin = 0.0
xmax = 20.0
x = []
y = []
i = 0
p011, = animatedGraph.plot(x,y,'b-', label="Magnitude")
dataStream = open("data.txt", "r").read()
extractedData = dataStream.split('\n')
xData = []
yData = []
for eachLine in extractedData:
if len(eachLine) > 1:
xFile,yFile = eachLine.split(' ')
xData.append(int(xFile))
yData.append(float(yFile))
#print(xFile, yFile)
def update(self):
global x
global y
global xData
global yData
global i
x.append(int(xData[i]))
y.append(float(yData[i]))
print(x[i], y[i])
p011.set_data(x, y)
if x[i] >= xmax - 1.00:
p011.axes.set_xlim(x[i]-xmax+1.0,x[i]+1.0)
i += 1
return p011
#for eachNum in xData:
# print(xData[eachNum], yData[eachNum])
simulation = animation.FuncAnimation(fig, update, blit=False, frames=359, interval=100, repeat=False)
plotter.show()
|
#!/usr/bin/env ipython
import os
import sys
import struct
import PIL.Image as Image
def check_dir(dir_path):
if os.path.isdir(dir_path) == False:
os.system('mkdir -p %s' % dir_path)
def check_file(file_name, url):
if os.path.isfile('mnist_data/%s' % file_name) == False:
os.chdir('mnist_data')
os.system('wget -O %s.gz \'%s\'' % (file_name, url))
os.system('gunzip %s.gz' % (file_name))
os.chdir('..')
def binary_2_int(b):
i = bytearray(b)
i[0] = b[3]
i[1] = b[2]
i[2] = b[1]
i[3] = b[0]
return struct.unpack('i', i)[0]
class Labels(object):
def __init__(self, label_file):
self.label_f = open(label_file, 'rb')
msb = self.label_f.read(4)
msb = binary_2_int(msb)
if msb != 2049:
raise Exception('wrong label msb')
cnt = self.label_f.read(4)
self.cnt = binary_2_int(cnt)
if self.cnt <= 0:
raise Exception('label cnt less then 0')
self.cur_idx = 0
def __iter__(self, ):
return self
def next(self, ):
if self.cur_idx >= self.cnt:
raise StopIteration()
else:
self.cur_idx += 1
label = ord(self.label_f.read(1))
return label
class Images(object):
def __init__(self, data_file):
self.data_f = open(data_file, 'rb')
msb = self.data_f.read(4)
msb = binary_2_int(msb)
if msb != 2051:
raise Exception('wrong data msb')
cnt = self.data_f.read(4)
self.cnt = binary_2_int(cnt)
if self.cnt <= 0:
raise Exception('label cnt less then 0')
row_cnt = self.data_f.read(4)
self.row_cnt = binary_2_int(row_cnt)
if self.row_cnt <= 0:
raise Exception('row cnt less then 0')
col_cnt = self.data_f.read(4)
self.col_cnt = binary_2_int(col_cnt)
if self.col_cnt <= 0:
raise Exception('col cnt less then 0')
self.cur_idx = 0
def __iter__(self, ):
return self
def next(self, ):
if self.cur_idx >= self.cnt:
raise StopIteration()
else:
self.cur_idx += 1
raw_bytes = self.data_f.read(self.row_cnt * self.col_cnt)
img = Image.frombytes('L', (self.row_cnt, self.col_cnt), raw_bytes)
return img
def unpack(label_file, data_file, out_dir):
check_dir(out_dir)
labels = Labels(label_file)
images = Images(data_file)
if labels.cnt != images.cnt:
raise Exception('labels cnt is not equal images cnt')
idx = 0
for label, img in zip(labels, images):
idx += 1
img.save('%s/%d_%d.jpg' % (out_dir, idx, label))
if idx % 1000 == 0:
print idx
def get_data():
check_dir('mnist_data')
check_file('train-images-idx3-ubyte', 'http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz')
check_file('train-labels-idx1-ubyte', 'http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz')
check_file('t10k-images-idx3-ubyte', 'http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz')
check_file('t10k-labels-idx1-ubyte', 'http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz')
def main():
get_data()
unpack('./mnist_data/train-labels-idx1-ubyte', './mnist_data/train-images-idx3-ubyte', './images/train')
unpack('./mnist_data/t10k-labels-idx1-ubyte', './mnist_data/t10k-images-idx3-ubyte', './images/test')
if __name__ == '__main__':
main()
|
import datetime
__author__ = 'carlos'
class Piece:
def __init__(self, data, index = None):
"""
:param data: Must be a Content object or a string
"""
self.index = index
if isinstance(data, Content):
self.content = data
self.text = None
elif isinstance(data, str):
self.content = None
self.text = data
else:
raise 'Data argument only can be a Content object or a string'
class Content:
def __init__(self, identif, title):
"""
:param identif: content identifier
:param title: content title
:param data: the content data
"""
self.id = id
self.title = title
self.created = datetime.datetime.now()
self.pieces = []
def add_piece(self, index, piece):
self.pieces.insert(index, piece)
def append_piece(self, piece):
self.pieces.append(piece)
def __eq__(self, c):
return self.id == c.id
def __ne__(self, c):
return self.id is not c.id
class Category(Content):
def __init__(self, identif, title):
super().__init__(identif, title)
self.contents = set()
def add(self, content):
self.contents.add(content)
class Factory:
@staticmethod
def instantiate(c_id, title, c_type):
if c_type == Category.__name__:
return Category(c_id, title)
else:
raise 'Unknown type ' + type
|
class CryptoquoteAlreadyEncryptedError(Exception):
"""Exception for Cryptoquote that has already been encrypted."""
pass
class ImproperKeyError(Exception):
"""Key must be 26 unique uppercase alphabetical letters."""
pass
class EmptyText(Exception):
"""Text length must be greater than zero."""
pass
class TooShort(Exception):
"""Text must be certain length"""
pass
class UnknownGraph(Exception):
"""Graph parameter was not single or bigram frequency"""
pass
|
data = open("randomDataRAW.txt, "r"); |
from PIL import Image
import piexif
def metadata(img_path):
zeroth_ifd = {piexif.ImageIFD.Make: u"DJI",
piexif.ImageIFD.XResolution: (72, 1),
piexif.ImageIFD.YResolution: (72, 1),
piexif.ImageIFD.Software: u"piexif",
piexif.ImageIFD.Model: u"FC6310",
}
exif_ifd = {piexif.ExifIFD.DateTimeOriginal: u"2099:09:29 10:10:10",
piexif.ExifIFD.LensMake: u"LensMake",
piexif.ExifIFD.Sharpness: 65535,
piexif.ExifIFD.ExposureTime : (1,320),
piexif.ExifIFD.LensSpecification: ((1, 1), (1, 1), (1, 1), (1, 1)),
piexif.ExifIFD.ISOSpeedRatings : 100,
piexif.ExifIFD.FocalLength : (9,1),
piexif.ExifIFD.MeteringMode: 2,
piexif.ExifIFD.MaxApertureValue: (297,100),
piexif.ExifIFD.ExposureBiasValue : (0,1) ,
piexif.ExifIFD.FocalLengthIn35mmFilm : 24,
# piexif.ExifIFD.ExifVersion : 48,50 ,
}
gps_ifd = {piexif.GPSIFD.GPSVersionID: (2, 0, 0, 0),
piexif.GPSIFD.GPSAltitudeRef: 1,
piexif.GPSIFD.GPSAltitude: (60,1),
piexif.GPSIFD.GPSDateStamp: u"1999:99:99 99:99:99",
piexif.GPSIFD.GPSLatitude : ((22,1),(34,1),(34,1)),
piexif.GPSIFD.GPSLongitude : ((72,1),(32,1),(22,1))
}
first_ifd = {piexif.ImageIFD.Make: u"DJI",
piexif.ImageIFD.XResolution: (40, 1),
piexif.ImageIFD.YResolution: (40, 1),
piexif.ImageIFD.Software: u"piexif"
}
exif_dict = {"0th":zeroth_ifd, "Exif":exif_ifd, "GPS":gps_ifd, "1st":first_ifd}
exif_bytes = piexif.dump(exif_dict)
im = Image.open(img_path)
im.save(img_path, exif=exif_bytes)
# https://www.awaresystems.be/imaging/tiff/tifftags/privateifd/exif.html |
import hashlib
import random
import json as simplejson
import webapp2
from taskstopipeline.helpers import *
from taskstopipeline.models.sharelink import ShareLink
from taskstopipeline.view_models import *
from jinja2 import Environment, PackageLoader
from Crypto.Cipher import AES
from base_handler import BaseHandler
class ShareHandler(BaseHandler): # This is the '/share/<route_share_key>' handler
def get(self, route_share_key):
req_share_key = route_share_key
existing_share_links = ShareLink.query(ShareLink.share_key == req_share_key).fetch()
if len(existing_share_links) == 0:
template_values = {'error_message': 'list not available'}
self.render_template('error.html', template_values)
return
share_link = existing_share_links[0]
assert isinstance(share_link, ShareLink)
vm = ShareHandlerVM(user_id=share_link.user_id) # (user_id=share_link.user_id)
vm.selected_list_id = share_link.list_id
task_list = TaskList(user_id=share_link.user_id)
task_list.id = share_link.list_id
vm.selected_task_list = task_list.get_list_info()
vm.selected_task_list.share_key = share_link.share_key
vm.selected_task_list.task_groups = vm.selected_task_list.get_groups()
vm.is_share = True
assert isinstance(self.request, webapp2.Request)
vm.template_values['share_url'] = self.request.host_url + '/share/' + vm.selected_task_list.share_key
vm.template_values['selected_task_list'] = vm.selected_task_list
vm.template_values['groups_to_show'] = vm.selected_task_list.task_groups
vm.template_values['is_share'] = vm.is_share
# self.render_template('index5.html', vm.template_values)
self.render_template('list_share.html', vm.template_values)
class CreateShareLink(webapp2.RequestHandler): # /share/create theoretically we're only calling this from javascript
def get(self):
user_id_encoded = self.request.get('user_id') # '' #
req_list_id_sent = self.request.get('req_list') #
existing_share_links = ShareLink.query(ShareLink.list_id == req_list_id_sent).fetch()
if len(existing_share_links) > 0:
json_data = {"share_key": existing_share_links[0].share_key}
self.response.headers.add_header('content-type', 'application/json', charset='utf-8')
self.response.out.write(simplejson.dumps(json_data))
return
user_id_decoded = user_id_decoder(encoded_uid=user_id_encoded)
existing_credential_models = UserModel.query(UserModel.user_id == user_id_decoded).fetch()
credential = existing_credential_models[0].credential
if credential is None:
template_values = {'error_message': 'credentials not found'}
env = Environment(loader=PackageLoader('taskstopipeline', 'views'))
view = env.get_template('error.html')
self.response.out.write(view.render(template_values))
return
http = httplib2.Http()
http = credential.authorize(http=http)
service = build('tasks', 'v1', http=http)
raw_task_lists = service.tasklists().list().execute()
available_lists = []
for raw_task_list in raw_task_lists['items']:
tl = TaskList(user_id=user_id_decoded)
tl.id = raw_task_list['id']
tl.title = raw_task_list['title']
available_lists.append(tl)
req_list = next((tl for tl in available_lists if tl.id == req_list_id_sent), None)
if req_list is None:
template_values = {"error_message": "requested list doesn't exist under user"}
env = Environment(loader=PackageLoader('taskstopipeline', 'views'))
view = env.get_template('error.html')
self.response.out.write(view.render(template_values))
return
new_share_key = base64.b64encode(hashlib.sha256(str(random.getrandbits(256))).digest(),
random.choice(['rA', 'aZ', 'gQ', 'hH', 'hG', 'aR', 'DD'])).rstrip('==')[:16]
new_share_link = ShareLink(share_key=new_share_key, user_id=user_id_decoded, list_id=req_list.id)
new_share_link.put()
json_data = {"share_key": new_share_key} # , "user_id": user_id_decoded}
self.response.headers.add_header('content-type', 'application/json', charset='utf-8')
self.response.out.write(simplejson.dumps(json_data))
class DeleteShareLink(webapp2.RequestHandler):
def get(self):
user_id_encoded = self.request.get('user_id') # '' #
list_id_sent = self.request.get('list_id') #
existing_share_links = ShareLink.query(ShareLink.list_id == list_id_sent).fetch() # catching them all
if len(existing_share_links) == 0:
json_data = {"result_message": "no share keys found"}
self.response.headers.add_header('content-type', 'application/json', charset='utf-8')
self.response.out.write(simplejson.dumps(json_data))
return
cipher = AES.new(CRYPTO_SALT)
decode_aes = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(PADDING)
user_id_decoded = decode_aes(cipher, urllib.unquote_plus(user_id_encoded))
existing_credential_models = UserModel.query(UserModel.user_id == user_id_decoded).fetch()
if len(existing_credential_models) == 0:
json_data = {"result_message": "no credentials found"}
self.response.headers.add_header('content-type', 'application/json', charset='utf-8')
self.response.out.write(simplejson.dumps(json_data))
return
credential = existing_credential_models[0]
assert isinstance(credential, UserModel)
delete_count = 0
for share_link in existing_share_links:
if share_link.user_id == credential.user_id:
assert isinstance(share_link, ShareLink)
share_link.key.delete()
delete_count += 1
json_data = {"result_message": "keys deleted: " + str(delete_count)}
self.response.headers.add_header('content-type', 'application/json', charset='utf-8')
self.response.out.write(simplejson.dumps(json_data))
return |
from django.contrib import admin
from .models import CompanyId, MetricEvent, PermissionBuffer
# Register your models here.
admin.site.register(CompanyId)
admin.site.register(MetricEvent)
admin.site.register(PermissionBuffer)
|
#!/usr/bin/env python
import rospy
import numpy as np
from ackermann_msgs.msg import AckermannDriveStamped
from sensor_msgs.msg import LaserScan
class Safety_Node:
def __init__(self):
# subscribe to Ackermann
rospy.Subscriber("ackermann_cmd_mux/output", AckermannDriveStamped,self.ackermann_cmd_input_callback)
rospy.Subscriber("/scan", LaserScan, self.laser_callback)
# publish to Ackermann
self.cmd_pub = rospy.Publisher('/vesc/ackermann_cmd_mux/input/safety', AckermannDriveStamped, queue_size = 10)
self.drive = True
self.min_distance = 0.35 #meters
#self.angle_range = 45*np.pi/180 #setting 45 degrees to look up front, turning them to rads
def laser_callback(self,msg):
#Read from a specific set fo laser shots
ranges = msg.ranges
#get average
average = 0
counter = 0
for i in range(len(ranges)-600, len(ranges)-400):
counter += 1
average += ranges[i]
average /= counter
#If average reading from a set of front laser is equal or less to min_distance:
if average < self.min_distance:
self.drive = False
print ranges[540]
self.ackermann_cmd_input_callback(AckermannDriveStamped())
def ackermann_cmd_input_callback(self, msg):
if self.drive == False:
msg.header.stamp = rospy.Time.now()
msg.drive.speed = -.1
msg.drive.acceleration = -1.5
msg.drive.steering_angle = 0.0
print "stop"
self.cmd_pub.publish(msg)
rospy.Time
msg.drive.speed = -0.000000001
self.cmd_pub.publish(msg)
else:
print "safe"
if __name__ == "__main__":
rospy.init_node("safety_controller")
node = Safety_Node()
rospy.spin() |
import json
import cv2
import numpy as np
from utils import canvas
def selectCroppingRefPoints(event, x, y, flags, param):
# grab references to the global variables
global refPt, cropPt
# Record the mouse position when the left mouse button is clicked
if event == cv2.EVENT_LBUTTONDOWN:
# Add mouse position to the reference points
if refPt is None:
refPt = [(x, y)]
else:
refPt.append((x, y))
# Record the mouse position when the left mouse button is released
elif event == cv2.EVENT_LBUTTONUP:
# Add mouse position to the reference points
refPt.append((x, y))
# Get bounding box given the current reference points, the BB is defined by the smallest
# box that encapsulates all the reference points
reference_points = np.asarray(refPt)
cropPt = [(np.min(reference_points[:,0]), np.min(reference_points[:,1]))]
cropPt.append((np.max(reference_points[:,0]), np.max(reference_points[:,1])))
# Draw a rectangle in yellow showing the last selected area and a rectangle in green
# showing the area that will be cropped
cv2.rectangle(image, cropPt[0], cropPt[1], (0, 255, 0), 2)
cv2.rectangle(image, refPt[len(refPt)-1], refPt[len(refPt)-2], (0, 255, 255), 2)
# Show the current status of the crop design
cv2.imshow("Crop design", image)
# Global containers for the reference points for the cropping task.
# ToDo: Find out if its is possible to avoid the use of global variables in selectCroppingRefPoints()
refPt = []
cropPt = []
def cropImage(image):
# grab references to the global variables
global refPt, cropPt
clone = image.copy()
cv2.namedWindow("Crop design", cv2.WINDOW_NORMAL)
cv2.setMouseCallback("Crop design", selectCroppingRefPoints)
# keep looping until the 'q' key is pressed
while True:
# display the image and wait for a keypress
cv2.imshow("Crop design", image)
key = cv2.waitKey(1) & 0xFF
# if the 'r' key is pressed, reset the cropping region
if key == ord("r"):
return None
elif key == ord("s"):
# if there are two reference points, then crop the region of interest
# from teh image and display it
if len(cropPt) == 2:
roi = clone[cropPt[0][1]:cropPt[1][1], cropPt[0][0]:cropPt[1][0]]
return roi
break
# if the 'c' key is pressed, break from the loop
elif key == ord("c"):
break
# Load the configuration file:
with open('./conf.json', 'r') as myfile:
# load data
data = myfile.read()
loaded_json = json.loads(data)
# Initialize the canvas that we will use for our design
canvas_data = loaded_json['canvas_config']
mosaic = canvas((canvas_data['blocks_per_row'], canvas_data['blocks_per_col']), canvas_data["valid_pieces"])
mosaic.visualizeColorPalette()
# Get the data regarding the pixel-art designs that we want to add to the canvas
designs_data = loaded_json['designs']
for element in designs_data:
# Load the data regarding the current design:
size = (designs_data[element]['size'][0], designs_data[element]['size'][1])
pos_x = designs_data[element]['position'][0]
pos_y = designs_data[element]['position'][1]
keep_white_blocks = designs_data[element]['keep white']
# When the reset button is pressed, cropImage will return None and we load the image again.
# ToDo: originally I wanted to do the reset process inside the cropping function, however, it didn't work.
# The image was reset but then I was unable to draw the reference rectangles. It would be nice to do it inside
# the function and avoid the while loop
roi = None
while roi is None:
image = cv2.imread(designs_data[element]['path'])
# get the region of interest
roi = cropImage(image)
# Reset the containers
refPt.clear()
cropPt.clear()
# Extract the design from the roi
design = mosaic.parseDesign(roi, size)
# Add design to canvas and visualize the result
mosaic.addDesign((pos_x,pos_y), design, keep_white_blocks)
mosaic.visualize()
mosaic.fill()
mosaic.visualize()
mosaic.save()
# close all open windows
cv2.destroyAllWindows()
|
import pyautogui
from PIL import ImageGrab
y = 900
while True:
if ImageGrab.grab().getpixel((555, y))[0] == 1 : pyautogui.click(555, y)
if ImageGrab.grab().getpixel((620, y))[0] == 1 : pyautogui.click(620, y)
if ImageGrab.grab().getpixel((690, y))[0] == 1 : pyautogui.click(690, y)
if ImageGrab.grab().getpixel((750, y))[0] == 1 : pyautogui.click(750, y)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.