text stringlengths 8 6.05M |
|---|
a = 1.13454546466666776744
print (a)
print (type(a))
#chen comment decimal
from decimal import*
getcontext().prec = 30
f= 10/3
print(f)
d= Decimal(10)/Decimal(3)
print(d)
from fractions import*
frac1 = Fraction(6,9)
frac2 = Fraction(5,10)
frac3 = frac1 + frac2
print(frac1)
print(frac2)
print(frac3)
print(type(frac3))
c = complex(2,5)
print(2+5j)
strHowKteam = """HowKteam.com\nFree education\nShare to be better"""
print(strHowKteam)
#nhan so chuoi
strB = strHowKteam*5
print(strB)
strA = "HowkTeam"
strB = "K"
#strB = strA[1:len(strA)]
#strB = strA[None : 5]
#lay ca chuoi
#strB = strA[None : None]
#strB = strA[:]
#strB = strA[None : 5:2]
#strB = strA[None : None:2]
strB = strA[None : 5:-1]
strC = strB in strA
#kt true or false
print(strB)
print(strC)
strAA = "69"
strBB = 69
strCC = int(strAA) + strBB
print(strCC)
strAAA = int(6.9)
print(strAAA)
strAAAA = "HowKteam.com"
strAAAA = strAAAA[None:1] + "0" + strAAAA[2:None]
print(strAAAA)
print(hash(strAAAA))
k = 'Kteam'
result = f'{k} - Free education'
print(result)
r = '1 : {1}, 2 : {0}'.format(111,222)
print(r)
team = 'hteam'
# b = team.center(5)
bteam = team.capitalize()
print(bteam)
team1 = ' co gi hot'
bteam1 = team1.join(['1','2','3'])
bteam2 = team1.strip()
bteam3 = team1.strip('c')
bteam4 = team1.replace('o','kteam',2)
teamString = 'cbco gi hotcbc'
teamStringB = teamString.lstrip('cb')
print(bteam1)
print(bteam2)
print(bteam3)
print(bteam4)
print(teamStringB)
#split
splitA = 'how kteam free education'
splitB = splitA.split(' ',1)
splitC = splitA.split(' ',2)
print(splitA)
print(splitB)
print(splitC)
#parition
partitionA = 'How kteam free education'
partitionB = partitionA.partition('k')
print(partitionA)
print(partitionB)
|
#!/usr/bin/python3
"""
Copyright (c) 2015, Joshua Saxe
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name 'Joshua Saxe' nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL JOSHUA SAXE BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
import requests
from auxiliary import readids
def deletecuckootasks():
"""
Delete tasks from cuckoo
Args:
Raises:
Returns:
None
"""
# Read task ids that were dispatched to cuckoo
taskids, errorids = readids()
taskdeleteurl = "http://localhost:8090/tasks/delete/"
headers = {"Authorization": "Bearer WTAIn0HHtRIUlR9_uJkJDg"}
for taskid in taskids:
taskurl = taskdeleteurl + str(taskid)
r = requests.get(taskurl, headers=headers)
if r.status_code != 200:
print ("[-] Task with id: " + str(taskid) + " could not be deleted")
def deletesamplesdb():
"""
Delete `samples.db` file
Args:
Raises:
Returns:
None
"""
# Get absolute path to samples.db (usually in current directory)
dbpath = "/".join(__file__.split('/')[:-1] + ['samples.db'])
# Delete samples.db
os.system("rm -f {0}".format(dbpath))
def wipe():
"""
The file, `samples.db` created by the shelve database library is deleted
along with all cuckoo tasks from the `taskids` file
Args:
Raises:
Returns:
None
"""
# Delete samples.db
deletesamplesdb()
# Delete cuckoo tasks
deletecuckootasks()
|
#!/usr/bin/python
"""
Tests the algorithms for detecting where the black line, on a white background,
is in relation to the piCamera's frame.
"""
# Run this program from the command line as it allows the window to close
# when 'q' is pressed
# https://www.python.org/dev/peps/pep-0238/
# The future division statement, spelled "from __future__ import division", will change the / operator to mean true division
# throughout the module.
# This is needed for the row and column calculations for rectangle arrays
# to prevent rounding down to zero.
from __future__ import division
# Import needed libraries such as picamera OpenCV and NumPy
import cv2
import numpy as np
import time
from picamera import PiCamera
from picamera.array import PiRGBArray
# Initialize the camera
CAMERA_WIDTH = 640
CAMERA_HEIGHT = 480
camera = PiCamera() # Initialize camera
# resolution defaults to dosplays resolution
camera.resolution = (CAMERA_WIDTH, CAMERA_HEIGHT)
# Can get framerates up to 60fps 640x480
camera.framerate = 10 # If not set then defaults to 30fps
camera.vflip = True
# http://picamera.readthedocs.io/en/release-1.10/api_array.html
# class picamera.array.PiRGBArray(camera, size=None)[source]
# Produces a 3-dimensional RGB array from an RGB capture with the dimensions (rows, columns, plane)
# for example of size (CAMERA_HEIGHT, CAMERA_WIDTH, 3)
rawCapture = PiRGBArray(camera, size=(CAMERA_WIDTH, CAMERA_HEIGHT))
# Allow the camera time to warmup
time.sleep(0.1)
# Initialize rowValues array to do testing such that they are all
# initialised to be white (255)
ROW_LENGTH = 10 # Number of rectangles per row for black/white analysis
COL_LENGTH = 10 # Number of rectangles per column for black/white analysis
MeanValues = np.ones([ROW_LENGTH, COL_LENGTH]) * 255
# Capture frames from the camera
# http://picamera.readthedocs.io/en/release-1.10/api_camera.html
# capture_continuous(output, format=None, use_video_port=False, resize=None, splitter_port=0, burst=False, **options)
# The format, use_video_port, splitter_port, resize, and options
# parameters are the same as in capture()
for frame in camera.capture_continuous(
rawCapture, format="rgb", use_video_port=True):
# grab the raw NumPy array respresenting the image, then intialize the timestap
# and occupied/unoccupied text
image = frame.array
# Convert to gray scale
gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
# Simple thresholding of gray sclae image
# http://www.pyimagesearch.com/2014/09/08/thresholding-simple-image-segmentation-using-opencv/
# (T, threshImage) = cv2.threshold(src, thresh, maxval, type)
# src - source image. This image should be grayscale.
# thresh - is the threshold value which is used to classify the pixel intensities in the grayscale image.
# maxval - is the pixel value used if any given pixel in the image passes the thresh test.
# type - the thresholding method to be used. The type value can be any of:
# cv2.THRESH_BINARY
# cv2.THRESH_BINARY_INV
# cv2.THRESH_TRUNC
# cv2.THRESH_TOZERO
# cv2.THRESH_TOZERO_INV
ret, threshImg = cv2.threshold(gray, 100, 255, cv2.THRESH_BINARY)
# Create a frame for lower middle part of video
# Top left is [0, 0] in [rows, columns]
startRow = int(0.90 * CAMERA_HEIGHT)
stopRow = int(1.00 * CAMERA_HEIGHT) - 1.0
startCol = int(0.40 * CAMERA_WIDTH)
stopCol = int(0.60 * CAMERA_WIDTH) - 1.0
lowerMiddleSquare = threshImg[startRow:stopRow, startCol:stopCol]
# show the frame
# cv2.imshow("ColourFrame", image)
# cv2.imshow("GrayFrame", gray)
cv2.imshow("ThresholdFrame", threshImg)
# cv2.imshow("SmallFrame", lowerMiddleSquare)
# Capture a key press. The function waits argument in ms for any keyboard
# event
key = cv2.waitKey(1) & 0xFF
# Capture number of white/black pixels in ROW_LENGTH rectanges along lower row of threshold frame
# N.B. May want to make this for several rows to track the line further in the horizon and
# allow for sharp 90deg turns.
# Loop over all rows
for j in range(COL_LENGTH):
# Loop over all columns
for i in range(ROW_LENGTH):
# Image region of interest (ROI)
startRow = int((j / COL_LENGTH) * CAMERA_HEIGHT)
stopRow = int(((j + 1) / COL_LENGTH) * CAMERA_HEIGHT) - 1.0
startCol = int((i / ROW_LENGTH) * CAMERA_WIDTH)
stopCol = int(((i + 1) / ROW_LENGTH) * CAMERA_WIDTH) - 1.0
square = threshImg[startRow:stopRow, startCol:stopCol]
# Mean of all the values in rectangular "square" array
MeanValues[j, i] = int(np.mean(square))
# Find index of first minimum mean value per row N.B. Black = 0, White = 255
# As it is the first then if there are two fully black rectangles this could lead to errors
# print("The mean values array: ", MeanValues)
smallSquare = np.argmin(MeanValues[0, 0:(ROW_LENGTH - 1)])
print(
"The rectangle with the most black pixels in top row is: ",
str(smallSquare))
smallSquare = np.argmin(MeanValues[(COL_LENGTH - 1), 0:(ROW_LENGTH - 1)])
print(
"The rectangle with the most black pixels in bottom row is: ",
str(smallSquare))
# time.sleep(10)
# http://picamera.readthedocs.io/en/release-1.10/api_array.html
# Clear the stream in preperation for the next frame
rawCapture.truncate(0)
# if the 'q' key was pressed break from the loop
if key == ord("q"):
break
# simply destroys all windows created
# Can use cv2.destroyWindow(frameName) to destroy a specific window
cv2.destroyAllWindows()
|
#!/usr/bin/env /proj/sot/ska/bin/python
#############################################################################################
# #
# ede_plot.py: plotting evolution of EdE for ACIS S and HRC S grating obs #
# #
# author: t. isobe (tisobe@cfa.harvard.edu) #
# #
# last update: Jun 19, 2018 #
# #
#############################################################################################
import os
import sys
import re
import string
import math
import time
import numpy
import matplotlib as mpl
if __name__ == '__main__':
mpl.use('Agg')
#
#--- reading directory list
#
path = '/data/mta/Script/Grating/EdE_trend/Scripts/house_keeping/dir_list_py'
f = open(path, 'r')
data = [line.strip() for line in f.readlines()]
f.close()
for ent in data:
atemp = re.split(':', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec "%s = %s" %(var, line)
#
#--- append a path to a private folder to python directory
#
sys.path.append(bin_dir)
sys.path.append(mta_dir)
#
#--- converTimeFormat contains MTA time conversion routines
#
import convertTimeFormat as tcnv
import mta_common_functions as mcf
import robust_linear as robust
#
#--- temp writing file name
#
rtail = int(time.time())
zspace = '/tmp/zspace' + str(rtail)
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
def run_ede_plot():
cmd = 'ls ' + data_dir + '*_data > ' + zspace
os.system(cmd)
f = open(zspace, 'r')
data = [line.strip() for line in f.readlines()]
f.close()
mcf.rm_file(zspace)
for file in data:
ede_plots(file)
#---------------------------------------------------------------------------------------------------
#-- ede_plots: plotting time evolution of EdE ---
#---------------------------------------------------------------------------------------------------
def ede_plots(file):
"""
plotting time evolution of EdE
Input: file --- a file name of data
Output: *_plot.png/*_low_res_plot.png --- two plots; one is in 200dpi and another in 40dpi
"""
#
#--- read data
#
[xdata, ydata, yerror] = read_data(file)
#
#--- set plotting range
#
[xmin, xmax, ymin, ymax] = set_min_max(ydata)
ymax = 2100
xname = 'Time (year)'
yname = 'EdE'
label = create_label(file)
[out1, out2] = set_out_names(file)
#
#--- create two figures. One is 200dpi and another for 40dpi. the low res plot is great for the intro page
#
plot_single_panel(xmin, xmax, ymin, ymax, xdata, ydata, yerror, xname, yname, label, out1, resolution=200)
plot_single_panel(xmin, xmax, ymin, ymax, xdata, ydata, yerror, xname, yname, label, out2, resolution=40)
#---------------------------------------------------------------------------------------------------
#-- set_min_max: set plotting range ---
#---------------------------------------------------------------------------------------------------
def set_min_max(ydata):
"""
set plotting range
Input: ydata ---- ydata
Output: [xmin, xmax, ymin, ymax]
"""
xmin = 1999
tlist = tcnv.currentTime()
xmax = tlist[0] + 1
ymin = min(ydata)
ymax = max(ydata)
ydiff = ymax - ymin
ymin -= 0.1 * ydiff
ymax += 0.2 * ydiff
if ymin < 0:
ymin = 0
return [xmin, xmax, ymin, ymax]
#---------------------------------------------------------------------------------------------------
#-- create_label: create a label for the plot from the data file ---
#---------------------------------------------------------------------------------------------------
def create_label(file):
"""
create a label for the plot from the data file
Input: file --- input file name
Output: out --- text
"""
atemp = re.split('\/', file)
line = atemp[len(atemp)-1]
if line == '':
line = file
atemp = re.split('_', line)
inst = atemp[0].upper()
grat = atemp[1].upper()
energy = atemp[2]
energy = energy[0] + '.' + energy[1] + energy[2] + energy[3]
out = 'Line: ' + str(energy) + 'keV : ' + inst + '/' + grat
return out
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
def set_out_names(file):
atemp = re.split('\/', file)
out = atemp[-1]
out1 = web_dir + 'EdE_Plots/' + out.replace('_data', '_plot.png')
out2 = web_dir + 'EdE_Plots/' + out.replace('_data', '_low_res_plot.png')
return [out1, out2]
#---------------------------------------------------------------------------------------------------
#-- read_data: read data from a given file ---
#---------------------------------------------------------------------------------------------------
def read_data(file):
"""
read data from a given file
Input: file --- input file name
Output: date_list --- a list of date
ede_list --- a list of ede value
error_list --- a list of computed ede error
"""
f = open(file, 'r')
data = [line.strip() for line in f.readlines()]
f.close()
date_list = []
ede_list = []
error_list = []
for ent in data:
atemp = re.split('\s+', ent)
if mcf.chkNumeric(atemp[0])== False:
continue
fwhm = float(atemp[2])
ferr = float(atemp[3])
ede = float(atemp[4])
date = atemp[5]
fyear = change_time_format_fyear(date)
date_list.append(fyear)
ede_list.append(ede)
#
#--- the error of EdE is computed using FWHM and its error value
#
error = math.sqrt(ede*ede* ((ferr*ferr) / (fwhm*fwhm)))
error_list.append(error)
return [date_list, ede_list, error_list]
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
def change_time_format_fyear(date):
atemp = re.split(':', date)
year = int(atemp[0])
yday = int(atemp[1])
hh = int(atemp[2])
mm = int(atemp[3])
ss = int(atemp[4])
if tcnv.isLeapYear(year) == 1:
base = 366.0
else:
base = 365.0
fyear = year + (yday + hh/24.0 + mm /1440.0 + ss/886400.0) / base
return fyear
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------------------
def remove_extreme(x, y):
x = numpy.array(x)
y = numpy.array(y)
avg = numpy.mean(y)
std = numpy.std(y)
bot = avg -3.5 * std
top = avg +3.5 * std
ind = [(y > bot) & (y < top)]
x = list(x[ind])
y = list(y[ind])
return [x, y]
#---------------------------------------------------------------------------------------------------
#-- plot_single_panel: plot a single data set on a single panel ---
#---------------------------------------------------------------------------------------------------
def plot_single_panel(xmin, xmax, ymin, ymax, xdata, ydata, yerror, xname, yname, label, outname, resolution=100):
"""
plot a single data set on a single panel
Input: xmin --- min x
xmax --- max x
ymin --- min y
ymax --- max y
xdata --- independent variable
ydata --- dependent variable
yerror --- error in y axis
xname --- x axis label
ynane --- y axis label
label --- a text to indecate what is plotted
outname --- the name of output file
resolution-- the resolution of the plot in dpi
Output: png plot named <outname>
"""
#
#--- fit line --- use robust method
#
xcln = []
ycln = []
for k in range(0, len(xdata)):
try:
val1 = float(xdata[k])
val2 = float(ydata[k])
xcln.append(val1)
ycln.append(val2)
except:
continue
xdata = xcln
ydata = ycln
[xt, yt] = remove_extreme(xdata, ydata)
(sint, slope, serr) = robust.robust_fit(xt, yt)
lslope = '%2.3f' % (round(slope, 3))
#
#--- close everything opened before
#
plt.close('all')
#
#--- set font size
#
mpl.rcParams['font.size'] = 12
props = font_manager.FontProperties(size=9)
#
#--- set plotting range
#
ax = plt.subplot(111)
ax.set_autoscale_on(False)
ax.set_xbound(xmin,xmax)
ax.set_xlim(xmin=xmin, xmax=xmax, auto=False)
ax.set_ylim(ymin=ymin, ymax=ymax, auto=False)
#
#--- plot data
#
plt.plot(xdata, ydata, color='blue', marker='o', markersize=4.0, lw =0)
#
#--- plot error bar
#
plt.errorbar(xdata, ydata, yerr=yerror, lw = 0, elinewidth=1)
#
#--- plot fitted line
#
start = sint + slope * xmin
stop = sint + slope * xmax
plt.plot([xmin, xmax], [start, stop], color='red', lw = 2)
#
#--- label axes
#
plt.xlabel(xname)
plt.ylabel(yname)
#
#--- add what is plotted on this plot
#
xdiff = xmax - xmin
xpos = xmin + 0.5 * xdiff
ydiff = ymax - ymin
ypos = ymax - 0.08 * ydiff
label = label + ': Slope: ' + str(lslope)
plt.text(xpos, ypos, label)
#
#--- set the size of the plotting area in inch (width: 10.0in, height 2.08in x number of panels)
#
fig = matplotlib.pyplot.gcf()
fig.set_size_inches(10.0, 5.0)
#
#--- save the plot in png format
#
plt.savefig(outname, format='png', dpi=resolution)
#--------------------------------------------------------------------
#
#--- pylab plotting routine related modules
#
from pylab import *
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager
import matplotlib.lines as lines
if __name__ == '__main__':
run_ede_plot()
|
from flask import Flask, render_template, request, jsonify, url_for, session, current_app
from app import app
import json, os
def ret_url(filename):
return os.path.join(current_app.root_path, current_app.static_folder, filename)
def get_static_json_file(filename):
url = ret_url(filename)
return json.load(open(url))
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template('index.html')
@app.route('/api/get', methods=['GET'])
def getta():
print("sono qui dentro")
print(request.args["user_id"])
return "ciao"
@app.route('/prova', methods=['GET', 'POST'])
def ciao():
return render_template('prova.html')
@app.route('/api/register', methods=['POST'])
def register():
json_data = request.json
data=get_static_json_file("users.json")
key=json_data["given_name"]+" "+json_data["family_name"]+" <"+json_data["email"]+">"
if key in data:
status = 'this user is already registered'
else:
data[key] = json_data
with open(ret_url("users.json"), "w") as file:
json.dump(data, file)
status = 'success'
return jsonify({'result': status})
@app.route('/api/login',methods=['POST'])
def login():
json_data = request.json
#NON FUNZIONA LA OPEN!
data=get_static_json_file("users.json")
status = False
for user in data.values():
#user["email"] NON FUNZIONA
print user["email"]
if user["email"]==json_data["email"] and user["pass"]==json_data["pass"]:
#session NON FUNZIONA
session['logged_in'] = True
session['email'] = user["email"]
session['name'] = user["given_name"]
session['surname'] = user["family_name"]
status = True
print session
break
return jsonify({'result': status})
@app.route('/api/logout')
def logout():
session.pop('logged_in', None)
return jsonify({'result': 'success'})
|
import time, unittest, os, sys
from selenium import webdriver
from main.activity.desktop_v3.activity_login import *
from main.activity.desktop_v3.activity_inbox_talk import *
from main.page.desktop_v3.inbox_talk.pe_inbox_talk import *
from utils.function.setup import *
from utils.function.logger import *
from utils.lib.user_data import *
class TestInboxTalk(unittest.TestCase):
_site = "live"
def setUp(self):
#self.driver = useDriver("firefox")
#self.driver = webdriver.Chrome("C:\driver\chromedriver\chromedriver.exe")
self.driver = tsetup("phantomjs")
#sys.stdout = Logger()
self.login = loginActivity()
self.inbox_talk = inboxTalkActivity()
def test_initial_checking(self):
print('=====================================')
print('TEST #1 : INITIAL TALK INBOX CHECKING ')
print('=====================================')
self.login.do_login(self.driver, user6['user_id'], user6['email'], user6['password'], self._site)
self.inbox_talk.setObject(self.driver)
print('')
print('--Menguji apakah ada pesan diskusi dalam inbox (is Talk exist)--')
self.inbox_talk.is_talk_discussion_exists(self._site)
print('')
print('--Periksa nilai counter talk dengan jumlah unread message sesungguhnya--')
self.inbox_talk.is_counter_works(self._site)
print('========================================================================')
#TEST #2 Reply talk (3 cycle)
#TEST #3 Delete talk
#TEST #4 Unfollow Talk
def tearDown(self):
print("Test akan berakhir dalam beberapa saat...")
time.sleep(5)
self.driver.quit()
# main
if(__name__ == "__main__"):
unittest.main() |
from CodingRules import Hello
class Person:
def __init__(self, name, age):
self.name = name
self.age = age
def myFunc(abc):
print("Hello my name is " + abc.name)
p1 = Person("John", 36)
p1.myFunc()
class Try:
def __init__(self, name, school):
self.name = name
self.school = school
def fun(self):
print("my name is " + self.name)
q1 = Try("Rupesh", 11)
q1.fun()
q1.school = 22
print(q1.school)
mytuple = ("q", "w", "e")
myit = iter(mytuple)
print(next(myit))
print(next(myit))
print(next(myit))
mystr = "rupesh"
myread = iter(mystr)
print(next(myread))
string = "dash"
for x in string:
print(x)
class MyNumber:
def __iter__(self):
self.a = 2
return self
def __next__(self):
if self.a <= 20:
y = self.a
self.a += 2
return y
else:
raise StopIteration
cla = MyNumber()
itr = iter(cla)
for y in itr:
print(y)
print(Hello.bikes)
a = Hello.train[2]
print(a)
details = {"as":42, "ed":52, "rf":69, "fv":91}
|
import numpy as np
def SubtractDominantMotion(image1, image2):
# Input:
# Images at time t and t+1
# Output:
# mask: [nxm]
# put your implementation here
mask = np.ones(image1.shape, dtype=bool)
return mask
|
class Coordenada:
def __init__(self,x,y):
self.x = x
self.y = y
def distancia(self,otra_coordenada):
x_diff = (self.x - otra_coordenada.x)**2 #
y_diff = (self.y - otra_coordenada.y)**2
return (x_diff + y_diff)**0.5
if __name__ =='__main__':
coord_1 = Coordenada(3,30)
coord_2 = Coordenada(4,8)
print(coord_1.distancia(coord_2))
#definición de clase
"""
class <nombre_de_la_clase>(<super_classe>):
def __init__(self,<params>):#constructor inicin con self
<expresion>#
def <nombre_del_metodo>(self,<params>):#definir la funcionalidad de la clase
< expresion >
""" |
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
from unipath import Path
import path_utilities
import pylab as pl
import numpy as np
import DFS
import sys
import os
''' PARAMETER: a list of all filenames with extensions
RETURNS: a list of filenames without the extensions '''
def remove_all_extensions(filenames):
filenames_no_extensions = []
extensions = []
for filename in filenames:
extension = ""
length = len(filename)
for ch in filename[::-1]:
if ch==".":
break
# "extension" contains just the extension from a filename
extension = ch + extension
length = length - 1
filenames_no_extensions.append(filename[0:length-1])
extensions.append(extension.lower())
#print(filename)
#print(extension)
return filenames_no_extensions, extensions
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a single filename
RETURNS: a list of all tokens with "_" "-" and "._" removed '''
def generate_tokens(filename):
# replace dashes with underscores, and .dashes with underscores
# in each filename
if filename[:2]!="._":
filename = filename.replace("-","_")
# create a list of tokens, splitting by underscores
old_tokens = filename.split("_")
new_tokens = []
# removes all empty tokens
for x in old_tokens:
if x!="":
new_tokens.append(x)
return new_tokens
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a list of filenames
DOES: sorts a dictionary with the counts of each token
RETURNS: a list of sorted tokens and a list of sorted counts '''
def count_and_sort_tokens(filenames, write_path):
# a dict mapping tokens to the count of how many times they appear
token_count_dict = {}
# for each filename
for fn in filenames:
#split the filename into tokens using delimiters like "_"
tokens = set(generate_tokens(fn))
# for each token
for token in tokens:
# if the token is already in our dict
if token in token_count_dict.keys():
# grab the old count
old_count = token_count_dict.get(token)
# increment and update the count in the dict
token_count_dict.update({token:old_count+1})
else:
# otherwise, add a key,value pair with a count of 1
token_count_dict.update({token:1})
sorted_tokens = []
sorted_counts = []
# for each token in the dict, iterating from largest to smallest count
for w in sorted(token_count_dict, key=token_count_dict.get, reverse=False):
# add the token to a sorted list of tokens
sorted_tokens.append(w)
# add the corresponding count to a list of counts
sorted_counts.append(token_count_dict[w])
print(w, token_count_dict[w])
# log-scaled bins
bins = np.logspace(0, 4, 100)
widths = (bins[1:] - bins[:-1])
#print(bins)
#print(widths
# Calculate histogram
hist = np.histogram(sorted_counts, bins=bins)
# normalize by bin width
hist_norm = hist[0]/widths
# plot it!
plt.figure("hist")
plt.clf()
plt.bar(bins[:-1], hist[0], widths)
plt.xscale('log')
plt.yscale('log')
plt.title("Token Document Frequency")
plt.xlabel("# of files a word appears in")
plt.ylabel("Word count")
plt.savefig(os.path.join(write_path, "token_document_frequency_histogram"),dpi=500)
return sorted_tokens, sorted_counts
''' PARAMETER: a list of extensions
DOES: sorts a dictionary with the counts of each extension,
writes the top "num_slices" extensions to a file
RETURNS: a list of sorted tokens and a list of sorted counts '''
def count_and_sort_exts(extensions, num_slices, write_path, dataset_path):
# the name of the top-level directory of the dataset
dataset_name = path_utilities.get_last_dir_from_path(dataset_path)
# a dict mapping tokens to the count of how many times they appear
ext_count_dict = {}
# for each extension
for ext in extensions:
try:
if ext[2]!="._" and ext[-1]!="~" and ext[0]!="_":
# if the extension is already in our dict
if ext in ext_count_dict.keys():
# grab the old count
old_count = ext_count_dict.get(ext)
# increment and update the count in the dict
ext_count_dict.update({ext:old_count+1})
else:
# otherwise, add a key,value pair with a count of 1
ext_count_dict.update({ext:1})
except IndexError:
if ext.isalnum():
# if the extension is already in our dict
if ext in ext_count_dict.keys():
# grab the old count
old_count = ext_count_dict.get(ext)
# increment and update the count in the dict
ext_count_dict.update({ext:old_count+1})
else:
# otherwise, add a key,value pair with a count of 1
ext_count_dict.update({ext:1})
sorted_extensions = []
sorted_counts = []
fd = open(os.path.join(write_path, "all_ext_counts_" + dataset_name + ".txt"), "w")
# for each extension in the dict, iterating from largest to smallest count
for ext in sorted(ext_count_dict, key=ext_count_dict.get, reverse=True):
# add the extension to a sorted list of extensions
sorted_extensions.append(ext)
# add the corresponding count to a list of counts
sorted_counts.append(ext_count_dict[ext])
fd.write(ext + ": " + str(ext_count_dict[ext])+ "\n")
fd.close()
f = open(os.path.join(write_path, "top_exts_" + dataset_name + ".txt" ),'w')
if (len(sorted_extensions) < num_slices):
num_slices = len(sorted_extensions)
for i in range(num_slices):
f.write(sorted_extensions[i] + "\n")
f.close()
return sorted_extensions, sorted_counts
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETERS: a list of extensions, number of pie chart slices, output path
DOES: creates a pie chart '''
def plot_extension_pie(extensions, num_slices,
write_path, dataset_path):
sorted_tuple = count_and_sort_exts(extensions, num_slices,
write_path, dataset_path)
sorted_exts, sorted_counts = sorted_tuple
print("Number of unique extensions: ",len(sorted_exts))
dataset_name = path_utilities.get_last_dir_from_path(dataset_path)
labels = []
sizes = []
if (len(sorted_exts) < num_slices):
num_slices = len(sorted_exts)
for x in range(num_slices):
labels.append(sorted_exts[x])
sizes.append(sorted_counts[x])
plt.figure("pie")
plt.clf()
plt.pie(sizes,labels=labels)
plt.axis('equal')
plt.title(str(num_slices) + " Most Common Extensions in ")
pie_path = os.path.join(write_path, "top_exts_pie_" + dataset_name
+ ".png")
plt.savefig(pie_path,dpi=300)
#=========1=========2=========3=========4=========5=========6=========7=
def plot_extensions(dataset_path, num_extensions):
allpaths = DFS.DFS(dataset_path)
p = Path(os.getcwd()).parent.parent
dataset_name = path_utilities.get_last_dir_from_path(dataset_path)
write_path = os.path.join(p, "cluster-datalake-outputs/", dataset_name + "--output/")
if not os.path.isdir(os.path.join(p,"cluster-datalake-outputs/")):
os.mkdir(os.path.join(p,"cluster-datalake-outputs/"))
if not os.path.isdir(write_path):
os.mkdir(write_path)
# a list of all the file names (without the paths)
filenames = []
for path in allpaths:
filenames.append(path_utilities.get_fname_from_path(path))
filenames_no_ext, exts = remove_all_extensions(filenames)
plot_extension_pie(exts, num_extensions, write_path, dataset_path)
'''
filenames_path = os.path.join(write_path, "filenames_"
+ dataset_name + ".txt")
f = open(os.path.join(filenames_path, "w")
for x in filenames_no_ext:
f.write(x+ "\n")
#print(generate_tokens(x))
f.close()
sorted_tokens, sorted_counts = count_and_sort_tokens(filenames_no_ext)
tokens_path = os.path.join(write_path, "tokens_" + dataset_name
+ ".txt")
token_file = open(tokens_path, "w")
for token in sorted_tokens:
token_file.write(token + "\n")
token_file.close()
'''
# MAIN FUNCTION
def main():
dataset_path = sys.argv[1]
num_extensions = sys.argv[2]
plot_extensions(dataset_path, num_extensions)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import time
import signal
import sys
import datetime
import argparse
import os
import getpass
from os.path import expanduser
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
FROM_TIME_DATE = 0
SPECIFY_HR = 0
SPECIFY_NOTE = 0
SPECIFY_PROJECT = 0
START_TIME = 0
ACTIVITY_CHOICE = 0
CHOICE = []
NONEXIST_PROJECT_FLAG = False
def signal_handler(sig, frame):
print('You pressed Ctrl+C!')
sys.exit(0)
def click_url(driver, erp_link):
try:
driver = webdriver.Firefox()
driver.get(erp_link)
driver.set_page_load_timeout(10)
return driver
except:
print("Either the URL could not be found or\
network has been disabled.")
print("Kindly check the above condition and\
re-run the program. Thank You")
driver.quit()
return False
def click_erp_user_id(*argv):
try:
elem = WebDriverWait(argv[0], 10)\
.until(EC.presence_of_element_located((By.ID, "login_email")))
elem.clear()
elem.send_keys(argv[1])
elem.send_keys(Keys.RETURN)
return True
except:
print("timeout erp user id")
return False
def enter_erp_user_pwd(*argv):
''' track user_credentials '''
try:
elem = WebDriverWait(argv[0], 10)\
.until(EC.presence_of_element_located((By.ID, "login_password")))
elem.clear()
elem.send_keys(argv[1])
elem.send_keys(Keys.RETURN)
time.sleep(1)
verify_credentials = argv[0].find_element_by_xpath\
("/html/body/div/div[1]/div/div[2]/div/div/div/div[2]/section[1]/div[1]/form/div/span")\
.text.lower()
print("login_status=", verify_credentials)
if verify_credentials == 'success':
return True
else:
time.sleep(5)
verify_credentials = argv[0].find_element_by_xpath\
("/html/body/div/div[1]/div/div[2]/div/div/div/div[2]/section[1]/div[1]/form/div/span")\
.text.lower()
if verify_credentials != 'success':
return False
except:
print("timeout password")
return False
def create_new_time_sheet(*argv):
try:
print("entering new timesheet")
elem = WebDriverWait(argv[0], 10)\
.until(EC.presence_of_element_located((By.ID, "navbar-search")))
time.sleep(1)
elem.clear()
time.sleep(1.5)
elem.send_keys("New Timesheet")
time.sleep(2.0)
elem.send_keys(Keys.ARROW_DOWN)
time.sleep(0.5)
elem.send_keys(Keys.RETURN)
entered_text = argv[0].find_element_by_css_selector\
(".form-group-sm > div:nth-child(1) > span:nth-child(3)").text
print("Timesheet Entered=", entered_text)
if entered_text != "New Timesheet":
return False
print("Trrr")
except:
print("timesheet timeout of time")
return False
def select_activity_type(*argv):
try:
print("enters activity_type")
WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, ".rows > div:nth-child(1) > div:nth-child(1) > div:nth-child(2)")))\
.click()
elem_item = WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, ".editable-row > div:nth-child(2) > div:nth-child(1) > div:nth-child(1)\
> div:nth-child(1) > div:nth-child(1) > input:nth-child(1)")))
elem_item.clear()
time.sleep(0.5)
elem_item.send_keys(argv[1])
time.sleep(0.5)
entered_text = argv[0].find_element_by_css_selector\
(".editable-row > div:nth-child(2) > div:nth-child(1) > div:nth-child(1) >\
div:nth-child(1) > div:nth-child(1) > span:nth-child(3)").text
print("Activity Entered=", entered_text)
if entered_text != argv[1]:
return False
time.sleep(1)
elem_item.send_keys(Keys.ENTER, Keys.RETURN)
except:
print("activity timeout")
return False
def select_date(*argv):
try:
print("enters date")
WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, ".rows > div:nth-child(1) > div:nth-child(1) >\
div:nth-child(3)"))).click()
elem_pass2 = WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, ".editable-row > div:nth-child(3) > div:nth-child(1) >\
div:nth-child(1) > input:nth-child(1)")))
elem_pass2.clear()
time.sleep(0.5)
elem_pass2.send_keys(argv[1], Keys.END, Keys.SPACE, Keys.RETURN)
time.sleep(0.5)
elem_pass2.send_keys(Keys.END, Keys.BACK_SPACE, "15", Keys.RETURN)
time.sleep(0.5)
elem_pass2.send_keys(Keys.END, Keys.BACK_SPACE, Keys.BACK_SPACE, Keys.BACK_SPACE,\
Keys.BACK_SPACE, Keys.BACK_SPACE, Keys.BACK_SPACE, Keys.BACK_SPACE,\
Keys.BACK_SPACE, argv[2], Keys.RETURN)
except TimeoutException:
print("date timeout")
return False
def select_hrs(*argv):
try:
print("enters hrs")
elem_pass = WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, ".editable-row > div:nth-child(4) > div:nth-child(1) >\
div:nth-child(1) > input:nth-child(1)")))
elem_pass.clear()
time.sleep(0.5)
elem_pass.send_keys(argv[1])
time.sleep(0.5)
elem_pass.click()
except TimeoutException:
print("hr timeout")
return False
def check_selected_project(driver):
global NONEXIST_PROJECT_FLAG
try:
close_button = driver.find_element_by_xpath\
("/html/body/div[17]/div[2]/div/div[1]/div/div[2]/div/button[1]/span").text
if close_button:
driver.find_element_by_xpath\
("/html/body/div[16]/div[2]/div/div[1]/div/div[2]/div/button[1]").click()
NONEXIST_PROJECT_FLAG = True
return False
else:
return True
except:
return True
def select_project(*argv):
global NONEXIST_PROJECT_FLAG
try:
print("enters select_project")
elem_pass = WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, ".editable-row > div:nth-child(5) > div:nth-child(1) >\
div:nth-child(1) > div:nth-child(1) > div:nth-child(1) > input:nth-child(1)")))
elem_pass.clear()
time.sleep(0.5)
elem_pass.send_keys(argv[1])
time.sleep(1.5)
elem_pass.send_keys(Keys.ENTER, Keys.RETURN)
time.sleep(1)
if check_selected_project(argv[0]) == False:
return False
except:
print("project timeout")
return False
def select_note(*argv):
try:
print("enters note")
elem_pass = WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, "div.note-editable:nth-child(4)")))
elem_pass.clear()
time.sleep(0.5)
elem_pass.send_keys(argv[1])
print("name")
time.sleep(1)
except TimeoutException:
print("note timeout")
return False
def select_save_button(*argv):
try:
save_button = argv[0].find_element_by_css_selector("button.btn:nth-child(7)\
> span:nth-child(2)").text.lower()
print("save_button=", save_button)
if save_button == 'save':
print("entering save")
elem_pass = WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, "button.btn:nth-child(7)")))
elem_pass.click()
else:
print("save button not foundd")
return False
except:
print("save timeout failed")
return False
def select_submit_button(*argv):
try:
time.sleep(1.5)
submit_button = argv[0].find_element_by_css_selector\
("button.btn:nth-child(7) > span:nth-child(2)").text.lower()
print("submit_button=", submit_button)
if submit_button == 'submit':
print("entering submit")
time.sleep(2.5)
argv[0].find_element_by_xpath("/html/body/div[1]/div/div[2]/\
div[1]/div/div/div[2]/button[2]").click()
elif argv[0].find_element_by_xpath("/html/body/div[16]/div[2]/div"):
print("Conflict found")
time.sleep(0.5)
print("close button click")
argv[0].find_element_by_xpath("/html/body/div[16]/div[2]/div/div[1]/\
div/div[2]/div/button[1]").click()
time.sleep(0.5)
print("click timesheet of the user")
argv[0].find_element_by_xpath("/html/body/div[1]/header/div/div/\
div[1]/ul/li[2]/a").click()
time.sleep(2.5)
print("click the conflicting draft and will try to submit it")
argv[0].find_element_by_xpath("/html/body/div[1]/div/div[3]/div[2]/\
div[2]/div/div[3]/div[2]/div[1]/div[3]/div/div[5]/div[3]/div/div/div[1]/div[2]").click()
time.sleep(2.5)
argv[0].find_element_by_xpath("/html/body/div[1]/div/div[2]/div[1]/\
div/div/div[2]/button[2]").click()
return True
else:
print("submit button not found")
return False
except:
print("submit timeout failed")
return False
def confirm_submission(*argv):
try:
WebDriverWait(argv[0], 10).until(EC.presence_of_element_located\
((By.CSS_SELECTOR, "div.modal-dialog:nth-child(2) > div:nth-child(1) > div:nth-child(1) >\
div:nth-child(1) > div:nth-child(2) > div:nth-child(1) > button:nth-child(2)"))).click()
except:
print("confirm submission timeout failed")
return False
def refresh_page(driver):
try:
print("execute refresh")
driver.refresh()
print("pass refresh")
except:
print("refresh fail")
return False
return True
def get_input():
input_text = input()
return input_text
def validate_page(function_name=[], *argv):
global NONEXIST_PROJECT_FLAG
count = 1
repeat_flag = False
print("entering validate page")
redo_flag = False
while function_name(*argv) == False:
print("Failed function_name", function_name)
print("function_repeat_count=", count)
if function_name == select_project and NONEXIST_PROJECT_FLAG == True:
break
if function_name == click_erp_user_id or function_name == enter_erp_user_pwd\
or function_name == select_save_button or function_name == select_submit_button\
or function_name == confirm_submission:
redo_flag = True
refresh_page(argv[0])
break
refresh_page(argv[0])
count = count + 1
if count >= 6:
repeat_flag = True
break
print("exiting valid page")
if redo_flag == True:
return None
if NONEXIST_PROJECT_FLAG == True:
return False
if repeat_flag == True:
argv[0].close()
return False
return True
def fill_up_user_entries(driver):
global ACTIVITY_CHOICE, CHOICE, FROM_TIME_DATE, START_TIME,\
SPECIFY_HR, SPECIFY_PROJECT, SPECIFY_NOTE, NONEXIST_PROJECT_FLAG
count = True
count1 = True
print("Entering Activity type")
if validate_page(select_activity_type, driver, CHOICE[ACTIVITY_CHOICE]) == False:
print("False")
count1 = False
if (count and count1) == False:
return False
print("Entering the selected date")
if validate_page(select_date, driver, FROM_TIME_DATE, START_TIME) == False:
count1 = False
if (count and count1) == False:
return False
print("Entering the selected hrs")
if validate_page(select_hrs, driver, SPECIFY_HR) == False:
count1 = False
if (count and count1) == False:
return False
print("Entering the name of the selected project")
if validate_page(select_project, driver, SPECIFY_PROJECT) == False:
if NONEXIST_PROJECT_FLAG == True:
return False
count1 = False
if (count and count1) == False:
return False
print("Entering notes")
if validate_page(select_note, driver, SPECIFY_NOTE) == False:
count1 = False
if (count and count1) == False:
return False
return True
def get_activity_name():
choice_input = get_input()
return choice_input
def validate_activity_choice(choice_input):
global CHOICE
try:
if 0 <= int(choice_input) <= len(CHOICE) - 1:
return True
else:
print("Enter one of the option as shown")
return False
except:
print("Incorrect Input.Enter only Numbers")
return False
def get_date_input():
print("Enter the date, month and year (dd/mm/yy)")
date_input = get_input()
date = date_input.split("/")
return date
def validate_date(input_date):
try:
datetime.datetime(year=int(input_date[2]), month=int(input_date[1]), day=int(input_date[0]))
return True
except:
print("Incorrect Date or Valid Date mentioned is not present in that Year.\
Please Try Again")
return False
def get_time():
time = get_input()
return time
def validate_time(time):
try:
time = float(time)
return True
except:
print("Incorrect Input.Enter only Numbers")
return False
def get_project_name():
project_name = get_input()
return project_name
def validate_project_name(project_name):
if len(project_name) == 0:
print("Project Name should not be left blank")
return False
return True
def check_file_contents(contents):
if len(contents) == 1:
print("only URL is set")
return 0
elif len(contents) == 2:
print("only user name and password is set")
return 1
elif len(contents) == 3:
print("Both URL and user credentials are set")
return 2
def validate_note(note):
if (len(note) == 0):
print("Note should not be left blank")
return False
return True
def perform_operation(contents):
global ACTIVITY_CHOICE, CHOICE, FROM_TIME_DATE, START_TIME,\
SPECIFY_HR, SPECIFY_PROJECT, SPECIFY_NOTE
driver = 0
fill_up_flag = False
signal.signal(signal.SIGINT, signal_handler)
print('Press Ctrl+C')
file_data = check_file_contents(contents)
if file_data == 1:
print_url_error()
return False
while True:
if file_data == 2:
user_name = contents[1]
else:
print("Enter user name:")
user_name = get_input()
if len(user_name) == 0:
print("User Name should not be left blank")
continue
break
if file_data == 2:
user_pwd = contents[2]
else:
print("Enter user password:")
user_pwd = getpass.getpass()
erp_link = contents[0]
print("url", erp_link)
while True:
print("Select the Activity Type:")
print("\n")
CHOICE = ["Development", "Testing", "Wlan Testing", "Networking", "building source code",\
"Planning", "study", "Support", "Build and integration", "Training", "Research",\
"Integration and release", "Communication", "Execution", "Proposal Writing"]
for i in range(0, len(CHOICE)):
print(i, ".", CHOICE[i])
ACTIVITY_CHOICE = get_activity_name()
if validate_activity_choice(ACTIVITY_CHOICE) == False:
continue
break
ACTIVITY_CHOICE = int(ACTIVITY_CHOICE)
while True:
input_date = get_date_input()
if validate_date(input_date) == False:
continue
date = input_date[0]
month = input_date[1]
year = input_date[2]
break
FROM_TIME_DATE = date + "." + month + "." + year
while True:
print("Enter Start Time")
START_TIME = get_time()
if validate_time(START_TIME) == False:
continue
break
while True:
print("Enter how many hours worked:")
SPECIFY_HR = get_time()
if validate_time(SPECIFY_HR) == False:
continue
break
while True:
print("Specify the name of the project you are currently working on:")
SPECIFY_PROJECT = get_project_name()
if validate_project_name(SPECIFY_PROJECT) == False:
continue
break
while True:
print("Any Note to be added about the working project")
SPECIFY_NOTE = get_input()
if validate_note(SPECIFY_NOTE) == False:
continue
break
while True:
save_count = 0
submit_count = 0
confirm_sub_count = 0
credientials_count = 0
fill_up_flag = False
try:
print("opening driver")
driver = click_url(driver, erp_link)
if driver == False:
break
while True:
if credientials_count >= 5:
break
print("Enterting User Name")
if validate_page(click_erp_user_id, driver, user_name) == None:
credientials_count = credientials_count + 1
continue
print("Entering User Password")
if validate_page(enter_erp_user_pwd, driver, user_pwd) == None:
credientials_count = credientials_count + 1
continue
else:
break
if credientials_count >= 5:
print("The Credentials that you have entered is wrong.")
print("Please Enter Valid Credentials and re-run the program.Thank you")
driver.close()
break
print("Creating New Timesheet")
if validate_page(create_new_time_sheet, driver) == False:
continue
while True:
if fill_up_user_entries(driver) == False:
if NONEXIST_PROJECT_FLAG == True:
break
fill_up_flag = True
break
time.sleep(1)
if save_count >= 5 or submit_count >= 5 or confirm_sub_count >= 5:
break
if validate_page(select_save_button, driver) == None:
save_count = save_count + 1
continue
time.sleep(2.5)
if validate_page(select_submit_button, driver) == None:
print("Repeat the following entries again")
submit_count = submit_count + 1
continue
time.sleep(2.5)
if validate_page(confirm_submission, driver) == None:
confirm_sub_count = confirm_sub_count + 1
continue
else:
break
if fill_up_flag == True or save_count >= 5 or\
submit_count >= 5 or confirm_sub_count >= 5:
driver.close()
continue
elif NONEXIST_PROJECT_FLAG == True:
print("You have entered incorrect project name or\
the ERP could not find the project that you are working for.")
print("Please enter valid project Name and re-run the program. Thank You")
driver.close()
break
time.sleep(3)
driver.close()
except:
print("Connection Closed or Browser Closed")
continue
break
print("Thank you for using Timesheet Automation")
def validate_cmd_password(contents):
print("length=", len(contents))
if len(contents) != 2:
print("Error. Please give username along with password.Type the following command")
print("make setpassword username=yourusername password=yourpassword")
return False
return True
def validate_cmd_url(contents):
print("length=", len(contents))
if len(contents) != 1:
print("Error. Please set the link correctly.Type the following command")
print("make setlink link=yourerplinkaddress")
return False
return True
def add_newline(list_content):
for i in range(0, len(list_content)):
list_content[i] = list_content[i] + "\n"
return list_content
def write_to_file(args, args_type):
text = args_type
print("text=", text)
print("texttype=", type(text))
if args_type == args.setpassword:
if validate_cmd_password(text) == False:
return False
elif args_type == args.setlink:
if validate_cmd_url(text) == False:
return False
path = expanduser("~") + '/erp_timesheet/erp_link.txt'
read_file_contents = read_file()
if read_file_contents == False or read_file_contents == []:
f = open(path, "w+")
text = add_newline(text)
f.writelines(text)
print("written")
elif read_file_contents != False:
file_contents = check_url(read_file_contents)
f = open(path, "w+")
if file_contents == False:
if check_url(text) == False:
text = add_newline(text)
else:
text = add_newline(text)
text = text + read_file_contents
f.writelines(text)
elif file_contents != False:
if check_url(text) == False:
content = []
print("readfile_contlist", read_file_contents)
print("readfile_contonly", read_file_contents[0])
content.append(read_file_contents[0])
text = add_newline(text)
content = content + text
print("content=", content)
f.writelines(content)
elif check_url(text) != False:
content = []
text = add_newline(text)
content = content + text
read_file_contents.pop(0)
content = content + read_file_contents
print("content=", content)
f.writelines(content)
f.close()
def read_file():
try:
path = expanduser("~") + '/erp_timesheet/erp_link.txt'
print(path)
f = open(path, "r")
contents = f.readlines()
f.close()
print("contents in file=", contents)
return contents
except:
return False
def check_url(contents):
for line in contents:
if line.find('http') >= 0:
read_file_contents = line
print("readurlline=", read_file_contents)
return read_file_contents
return False
def print_url_error():
print("The ERP link is not set. Kindly set the ERP link address")
print("ERP link can be set by running the command 'make setlink link=your ERP Link'")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--setlink", nargs='+')
parser.add_argument("--setpassword", nargs='+')
parser.set_defaults(func=write_to_file)
args = parser.parse_args()
if args.setlink:
args.func(args, args.setlink)
elif args.setpassword:
args.func(args, args.setpassword)
else:
contents = read_file()
if contents == False:
print_url_error()
elif check_url(contents) == False:
print_url_error()
else:
perform_operation(contents)
|
# -*- coding: utf-8 -*-
import copy
from datetime import datetime
from bl.temperature.meta import Meta
from bl.temperature.parser import TemperatureDataParser
from bl.temperature.preprocessor import TemperatureDataPreprocessor
from bl.temperature.processor import TemperatureDataProcessor
class TemperatureModeHandler:
@staticmethod
def _test_temperature_mode(meta):
"""
High level processing method.
:param meta: An object of type dict containing all the meta
information and data that is required for the test to be processed.
:return: An object of type dict containing the test result and all the
resulting values and meta information.
"""
data_chunks = []
for log in meta.logs:
parser = TemperatureDataParser(
sensors_count=log.sensors_count,
digits=meta.round_to,
base64='.txt' not in log.file)
data, date, time = parser.parse(log.file)
try:
if not meta.date or \
datetime.strptime(meta.date, '%d.%m.%Y') < \
datetime.strptime(date, '%d.%m.%Y'):
meta.date = date
meta.time = time
except ValueError:
print('Date parsing error, %d' % date)
meta.time = time
data_chunks.append(data)
preprocessor = TemperatureDataPreprocessor()
processor = TemperatureDataProcessor()
result = {}
for chunk in preprocessor.get_merged_chunk(
data_chunks=data_chunks,
meta=meta):
result = processor.process(chunk, copy.deepcopy(meta))
if result['done']:
break
return result
@staticmethod
def handle(data):
"""
Top level method to handle temperature mode data.
:param data: Mode data as an object of type dict.
:return: Resulting object, containing resolution and values.
"""
payload = Meta(data)
validation_status = payload.validate()
if validation_status['valid']:
result = TemperatureModeHandler._test_temperature_mode(payload)
else:
result = {
'done': False,
'errors': validation_status['errors']
}
return result
|
#!/usr/bin/env python
# coding: utf-8
# 中行对账
import pandas as pd
import numpy as np
import re
# 整理表格
class DealExcelBOC(object):
def __init__(self,nc_path,bank_path):
self.nc_path = nc_path
self.bank_path = bank_path
def dealNC(self):
# read
nc_boc = pd.read_excel(self.nc_path,header=None)
nc_boc = nc_boc.dropna(how='all')
# deal year/head/tail
year = nc_boc.iloc[0,0]
init_period = nc_boc.iloc[2,:] # 暂时保存期初行
month_year_sum = nc_boc.tail(2) # 暂时保存本月及本年累计行
# drop useless rows
nc_boc.columns = nc_boc.iloc[1,:]
nc_boc = nc_boc.drop([0,1,2])
nc_boc = nc_boc.head(len(nc_boc)-2)
time = str(year) + '-' + nc_boc['月'].astype(str) + '-' + nc_boc['日'].astype(str)
nc_boc.insert(0,'日期',pd.to_datetime(time,format='%Y-%m-%d').astype(str).str.slice(0,10))
nc_boc.reset_index(drop=True,inplace=True)
# 提取交易时间
time_pattern1 = re.compile(r'\d{4}-\d+-\d+')
time_pattern2 = re.compile(r'\d{4}\.\d+\.\d+')
time_pattern3 = re.compile(r'\d+\.\d+')
transac_time = nc_boc['摘要'].copy()
for i in range(len(transac_time)):
time1 = time_pattern1.findall(transac_time[i]) #[2019-07-01]
if time1 !=[]:
transac_time[i] = time1[0]
else:
time2 = time_pattern2.findall(transac_time[i]) #[2019.8.2]
if time2!=[]:
transac_time[i] = time2[0]
else:
time3 = time_pattern3.findall(transac_time[i]) #[8.2] #[2019.7]
try:
if len(str(time3[0]).split('.')[0])==4:
transac_time[i] = None
else:
transac_time[i] = str(year) + '.' + time3[0]
except IndexError:
transac_time[i] = None
nc_boc.insert(6,'交易日期',transac_time)
nc_boc['交易日期']=pd.to_datetime(transac_time,format='%Y-%m-%d')
# 生成对账标记
nc_boc.insert(0,"银行索引",'')
nc_boc.insert(0,'对账一致',None)
# 转换字段类型
nc_boc.columns = list(map(lambda x: str(x).strip(),nc_boc.columns))
nc_boc.loc[:,['银行账户名称','摘要']] = nc_boc[['银行账户名称','摘要']].apply(lambda s: s.str.strip().str.replace('[ ()()]',''))
nc_boc.loc[:,['借方','贷方','余额']] = nc_boc[['借方','贷方','余额']].apply(lambda s: s.astype(np.float64))
nc_boc.drop(['月','日'],axis=1,inplace=True)
return nc_boc
def dealBANK(self):
# read
boc = pd.read_excel(self.bank_path,header=None)
boc = boc.dropna(how='all')
if boc.iloc[0,0]=='组织':
boc.columns = boc.loc[0,:]
boc = boc.drop(0)
need_fields = ["组织","银行","账号","币种","交易日期","收入","支出","当前余额",
"用途","对方户名","付款人开户行名","收款人开户行名","交易附言",
"用途_原","交易附言", "来源","业务类型","资金系统单据号",]
for col in need_fields:
if col not in boc.columns:
boc[col] = None
boc['交易日期'] = pd.to_datetime(boc['交易日期'])
strip_fields = ["组织","账号","币种","用途","对方户名","备注","业务类型"]
boc.loc[:,strip_fields] = boc[strip_fields].apply(lambda s: s.str.strip().str.replace('[ ()()]',''))
else:
# drop useless rows
for row in boc.index:
for col in boc.columns:
if str(boc.loc[row,col]).strip()=='交易日期[ Transaction Date ]':
header_row = row
# print(header_row)
break
boc.columns = boc.loc[header_row,:]
boc = boc.loc[header_row+1:,:]
# transform columns
boc.columns = list(map(lambda x: str(x).strip(),boc.columns))
if "本方账号" not in boc.columns:
boc['本方账号'] = None
if "余额" not in boc.columns:
boc['余额'] = None
rename_dict = {
"交易日期[ Transaction Date ]": "交易日期",
"交易后余额[ After-transaction balance ]":"当前余额",
"摘要[ Reference ]":"用途",
"交易类型[ Transaction Type ]":"业务类型",
"交易货币[ Trade Currency ]":"币种",
"付款人开户行名[ Payer account bank ]":"付款人开户行名",
"收款人开户行名[ Beneficiary account bank ]":"收款人开户行名",
"交易附言[ Remark ]":"交易附言",
"用途[ Purpose ]":"用途_原",
"备注[ Remarks ]":"备注",
}
boc.rename(columns=rename_dict,inplace=True)
boc['交易日期'] = pd.to_datetime(boc['交易日期'].str.slice(0,8),format='%Y-%m-%d')
income = np.where(boc["交易金额[ Trade Amount ]"]>=0,boc["交易金额[ Trade Amount ]"],0)
payment = np.where(boc["交易金额[ Trade Amount ]"]<=0,boc["交易金额[ Trade Amount ]"].abs(),0)
our_account = np.where(boc["交易金额[ Trade Amount ]"]<=0,boc["付款人账号[ Debit Account No. ]"],boc["收款人账号[ Payee's Account Number ]"])
your_account = np.where(boc["交易金额[ Trade Amount ]"]>0,boc["付款人账号[ Debit Account No. ]"],boc["收款人账号[ Payee's Account Number ]"])
otherside = np.where(boc["交易金额[ Trade Amount ]"]<=0,boc["收款人名称[ Payee's Name ]"],boc["付款人名称[ Payer's Name ]"])
boc['收入'] = income
boc['支出'] = payment
boc['账号'] = our_account
boc['对方账号'] = your_account
boc['对方户名'] = otherside
# boc['备注'] = boc[['交易附言[ Remark ]','用途[ Purpose ]']].fillna('').sum(1)
boc["银行"] = 'BOC-中国银行'
boc["来源"] = 'U-BOC'
boc['币种'] = 'CNY-人民币'
boc['资金系统单据号'] = None
boc['组织'] = None
# drop useless columns
need_fields = ["组织","银行","账号","对方账号","币种","交易日期","收入","支出","当前余额",
"用途","对方户名","付款人开户行名","收款人开户行名","交易附言",
"用途_原","交易附言", "来源","业务类型","资金系统单据号",]
boc = boc[need_fields]
strip_fields = ["组织","账号","对方账号","币种","用途","对方户名","交易附言","业务类型","交易附言","用途_原"]
boc.loc[:,strip_fields] = boc[strip_fields].apply(lambda s: s.str.strip().str.replace('[ ()()]',''))
# 对账标记
boc.insert(0,"NC索引",None)
boc.insert(0,'对账一致',None)
boc.reset_index(inplace=True)
boc.sort_values(['index'])
boc.drop(['index'],axis=1,inplace=True)
num_fields = ['收入','支出','当前余额']
for col in num_fields:
try:
boc.loc[:,col] = boc[col].replace({'-':None}).astype(np.float64)
except ValueError:
boc.loc[:,col] = boc[col].replace({'-':None}).str.replace(',','').astype(np.float64)
return boc
# 对账规则
class CheckBOC(object):
def __init__(self,nc_boc,boc,nc_file_name,boc_file_name,save_path=None):
self.nc_boc = nc_boc
self.boc = boc
self.nc_file_name = nc_file_name
self.boc_file_name = boc_file_name
self.save_path = save_path
# income items
def rec_mortgage(self):
'''
收取银行按揭
eg:2019-08-02收郭代晓;徐学君[眉山]蘭台府-蘭台府一期-10-2301银行按揭
rule:
1. NC<->银行:借贷金额相同
2. 交易时间相同
3. 银行——付款人名称[ Payer's Name ]: 郭代晓
'''
regex_mortgage = re.compile(r'.*收.*银行按揭$')
is_mortgage = self.nc_boc['摘要'].str.match(regex_mortgage)
nc_mortgage = self.nc_boc[is_mortgage]
for nc_idx in nc_mortgage.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']== self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_mortgage = self.boc[(cond1 & cond2)]
for idx in boc_mortgage.index:
if boc_mortgage.loc[idx,"对方户名"] in self.nc_boc.loc[nc_idx,'摘要']: # 对方单位为 nc摘要中的姓名
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def pos_to_bank(self):
'''
POS转银行存款
eg: 8.1中行POS6569转银行存款
1. 借贷金额相同
2. 交易时间相同
3. 银行——付款人名称[ Payer's Name ]:银联商务股份有限公司客户备付金
4. 银行——交易附言[ Remark ]:0731-0731费0元
> 3 4 择其一,看哪个更适用,暂用3
'''
regex_pos_tobank = re.compile(r'.*中行POS\d+转银行存款')
is_pos_tobank = self.nc_boc['摘要'].str.match(regex_pos_tobank)
nc_pos_tobank = self.nc_boc[is_pos_tobank]
for nc_idx in nc_pos_tobank.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']==self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_pos_tobank = self.boc[(cond1 & cond2)]
for idx in boc_pos_tobank.index:
if boc_pos_tobank.loc[idx,"对方户名"]== "银联商务股份有限公司客户备付金":
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def rec_pos(self):
'''
POS到账
eg: 0808-0808POS到账<br>
rule:
1. 借贷金额相同
2. 银行——付款人名称[ Payer's Name ]:银联商务股份有限公司客户备付金
3. 银行——交易附言[ Remark ]:0808-0808费0元
'''
regex_pos = re.compile(r'\d+-\d+POS到账.*|\d+-\d+pos到账.*')
is_pos = self.nc_boc['摘要'].str.match(regex_pos)
nc_pos = self.nc_boc[is_pos]
for nc_idx in nc_pos.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
boc_pos = self.boc[(cond1)]
for idx in boc_pos.index:
note_cond = re.findall(r'\d+-\d+',str(boc_pos.loc[idx,'交易附言']))
substract_cond = re.findall(r'\d+-\d+',self.nc_boc.loc[nc_idx,'摘要'])
if note_cond==substract_cond:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def rec_loans(self):
'''
收到归还借款
eg: 收到yangxj-杨欣洁归还F0403-因公临时借款<br>
rule:
1. 借贷金额相同
2. 银行——付款人名称[ Payer's Name ]:杨欣洁
'''
regex_rec_loans = re.compile(r'.*收到.*归还.*借款')
is_rec_loans = self.nc_boc['摘要'].str.match(regex_rec_loans)
nc_rec_loans = self.nc_boc[is_rec_loans]
for nc_idx in nc_rec_loans.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
boc_rec_loans = self.boc[(cond1)]
for idx in boc_rec_loans.index:
if boc_rec_loans.loc[idx,"对方户名"] in self.nc_boc.loc[nc_idx,'摘要']:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def rec_appointment_building(self):
'''
收楼款/收定金/收预约金
eg:
2019-09-02收郭胜财[乐山]青江蘭台-一期-产权车位-车位1层196定金
2019-09-04收商永志;刘雨妃[乐山]青江蘭台-一期-2号楼-2002楼款
2019-09-04收唐松柏;翟鹏群[乐山]青江蘭台-一期-7号楼-2903预约金
rule1:
1. 借贷金额相同
2. 交易时间相同
3. 银行——对方单位:郭胜财/商永志/唐松柏
rule2:
> nc为多笔金额,银行为汇总数
1. 交易时间相同【X 差一天】
2. 银行——对方户名:银联商务股份有限公司客户备付金
3. 汇总nc金额
4. 银行金额=nc汇总金额
5. 银行——用途:0905-0905费0元【1.改用5.作为rule3】
'''
# rule1
regex_appointment_building = re.compile(r'.*收.*定金.*|.*收.*楼款.*|.*收.*预约金.*|.*收.*垫付款.*')
is_appointment_building = self.nc_boc['摘要'].str.match(regex_appointment_building)
nc_appointment_building = self.nc_boc[is_appointment_building]
for nc_idx in nc_appointment_building.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']== self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_appointment_building = self.boc[(cond1 & cond2)]
for idx in boc_appointment_building.index:
if boc_appointment_building.loc[idx,"对方户名"] in self.nc_boc.loc[nc_idx,'摘要']: # 对方单位为 nc摘要中的姓名
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
# rule2
nc_sum_appointment_building = nc_appointment_building.groupby(['交易日期'])['借方'].sum().reset_index().rename(columns={"借方":"借方和"})
for sum_idx in nc_sum_appointment_building.index:
time_cond = (self.boc['交易日期']==nc_sum_appointment_building.loc[sum_idx,'交易日期'])
otherside_cond = (str(self.boc['对方户名']).strip()=="银联商务股份有限公司客户备付金")
boc_appointment_building = self.boc[(time_cond&otherside_cond)]
for idx in boc_appointment_building.index:
if boc_appointment_building.loc[idx,'收入'] == nc_sum_appointment_building.loc[sum_idx,'借方和']:
idxs_cond = (nc_appointment_building['交易日期']==nc_sum_appointment_building.loc[sum_idx,'交易日期'])
nc_idxs = nc_appointment_building[idxs_cond].index
self.nc_boc.loc[nc_idxs,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idxs,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = ";".join(map(str,nc_idxs.values))
# rule3
nc_sum_appointment_building = nc_appointment_building.groupby(['交易日期'])['借方'].sum().reset_index().rename(columns={"借方":"借方和"})
for sum_idx in nc_sum_appointment_building.index:
purpose_cond = (self.boc['用途'].str.match(r'\d{4}-\d{4}费'))
otherside_cond = (self.boc['对方户名'].str.strip()=="银联商务股份有限公司客户备付金")
boc_appointment_building = self.boc[(purpose_cond&otherside_cond)]
for idx in boc_appointment_building.index:
if boc_appointment_building.loc[idx,'收入'] == nc_sum_appointment_building.loc[sum_idx,'借方和']:
idxs_cond = (nc_appointment_building['交易日期']==nc_sum_appointment_building.loc[sum_idx,'交易日期'])
nc_idxs = nc_appointment_building[idxs_cond].index
self.nc_boc.loc[nc_idxs,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idxs,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = ";".join(map(str,nc_idxs.values))
def rec_pfund(self):
'''
收取公积金
eg: 2019-08-02收管齐意[眉山]蘭台府-蘭台府一期-01-1004公积金
rule1:
> 逐笔比对
1. 借贷金额相同
2. 交易时间相同
3. 银行——对方单位名称:姓名/个人住房公积金委托贷款资金-个人住房公积金委托贷款资金
rule2:
> nc中为多笔金额,工行中为汇总数
1. 汇总NC中收取的公积金
2. 交易时间相同
3. 银行——对方单位: 个人住房公积金委托贷款资金-个人住房公积金委托贷款资金
4. NC汇总数=银行金额
资金表:
eg:
2019-09-05收高雨蝶[眉山]凯旋国际公馆-凯旋府二期-30号楼-1-1002公积金
rule:
1.借贷金额相同
2.银行——用途:高雨蝶个人住房公积金
3.银行——对方用户:为空
'''
# rule1
regex_pfund = re.compile(r'.*收.*公积金$')
is_pfund = self.nc_boc['摘要'].str.match(regex_pfund)
nc_pfund = self.nc_boc[is_pfund]
for nc_idx in nc_pfund.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']== self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_pfund = self.boc[(cond1 & cond2)]
for idx in boc_pfund.index:
if str(boc_pfund.loc[idx,'对方户名']) in self.nc_boc.loc[nc_idx,'摘要']: # 对方单位为 nc摘要中的姓名
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
elif str(boc_pfund.loc[idx,'对方户名'])=="个人住房公积金委托贷款资金-个人住房公积金委托贷款资金":
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
else: # 对方户名为空
person_name = re.findall('(.*)个人住房公积金',boc_pfund.loc[idx,'用途'])[0] #['高雨蝶']
if person_name in self.nc_boc.loc[nc_idx,'摘要']:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
# rule2
total_pfund = nc_pfund['借方'].sum()
cond1 = (self.boc['收入']==total_pfund) #借贷金额相同
try:
cond2 = (self.boc['交易日期']== nc_pfund['交易日期'][0]) #交易时间相同
boc_pfund = self.boc[(cond1&cond2)]
except (IndexError,KeyError): #nc_pfund 为 empty df
boc_pfund = self.boc[cond1]
for idx in boc_pfund.index:
if boc_pfund.loc[idx,'对方户名']=="个人住房公积金委托贷款资金-个人住房公积金委托贷款资金":
self.nc_boc.loc[nc_pfund.index,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = ';'.join(map(str,nc_pfund.index.values))
def rec_fee(self):
'''
收手续费/代收费用
eg:
2019-09-07收赵亮;左雪莲[眉山]凯旋国际公馆-一期-4号楼商业-106附1代收费用
2019-09-07收赵亮;左雪莲[眉山]凯旋国际公馆-一期-4号楼商业-106附1手续费
rule:
1. 借贷金额相同
2. 交易时间相同
3. 银行——对方户名: 姓名
'''
regex_fee = re.compile(r'.*收.*代收费用|.*收.*手续费')
is_fee = self.nc_boc['摘要'].str.match(regex_fee)
nc_fee = self.nc_boc[is_fee]
for nc_idx in nc_fee.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']== self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_fee = self.boc[(cond1 & cond2)]
for idx in boc_fee.index:
if str(boc_fee.loc[idx,"对方户名"]) in self.nc_boc.loc[nc_idx,'摘要']: # 对方单位为 nc摘要中的姓名
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def rec_firmamount(self):
'''
eg:
2019-09-07收领地集团股份有限公司往来款
rule:
1. 借贷金额相同
2. 交易时间相同
3. 银行——对方单位:领地集团股份有限公司
'''
regex_rec_firmamount = re.compile(r'.*收.*往来款.*')
is_rec_firmamount = (self.nc_boc['摘要'].str.match(regex_rec_firmamount))
nc_rec_firmamount = self.nc_boc[is_rec_firmamount]
for nc_idx in nc_rec_firmamount.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期'] == self.nc_boc.loc[nc_idx,'交易日期'])
boc_rec_firmamount = self.boc[(cond1&cond2)]
for idx in boc_rec_firmamount.index:
otherside_cond = (str(boc_rec_firmamount.loc[idx,"对方户名"]) in self.nc_boc.loc[nc_idx,'摘要'])
if otherside_cond:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
# payments
def prepay_amount(self):
'''
支付预付款
eg:支付奥的斯电梯(中国)有限公司眉山领地.蘭台府商住小区(一期)高层电梯设备采购合同预付款<br>
rule:<br>
1. 借贷金额相同
2. 银行——收款人名称[ Payee's Name ]:奥的斯电梯(中国)有限公司
'''
regex_prepayments = re.compile(r'支付.*预付款$')
is_prepayments = self.nc_boc['摘要'].str.match(regex_prepayments)
nc_prepayments = self.nc_boc[is_prepayments]
for nc_idx in nc_prepayments.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
boc_prepayments = self.boc[(cond1)]
for idx in boc_prepayments.index:
if boc_prepayments.loc[idx,"对方户名"] in self.nc_boc.loc[nc_idx,'摘要']: # 对方单位为 nc摘要中的名称
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def pay_bankfee(self):
'''
支付中行手续费
eg: 8.22中行7014手续费 <br>
rule1:
> 逐笔比对
1. 借贷金额相同
2. 交易时间相同
2. 银行——付款人开户行名[ Payer account bank ]:中国银行眉山新区支行 【中国银行】<br>
银行——收款人开户行名[ Beneficiary account bank ]:为空
rule2:
> 汇总比对,nc/银行各自可能有汇总/分散金额情况
1. nc——摘要:中行手续费
2. 银行——付款人开户行名[ Payer account bank ]:中国银行眉山新区支行 【中国银行】<br>
银行——收款人开户行名[ Beneficiary account bank ]:为空
3. 汇总nc手续费金额=汇总银行金额
弃用rule:
> 银行——摘要[ Reference ]:转账汇款手续费 【有些摘要不适用】
eg: 【资金系统】
1297手续费
rule1:
> 逐笔比对
rule2:
> nc为单笔金额,银行为多笔
1. 银行——用途:询证函、验资证明、开户证明、结算资信证明、工商验资E线通
2. 银行——对方户名:空
2. 汇总银行金额
3. nc金额=银行汇总
支付银行手续费
eg: 8.7中行6569手续费
9.5 银行手续费
rule:
> 逐笔比对
> nc是一个总额,银行为多笔金额
1. 交易时间相同
2. 银行-摘要:收费/SMSP Service Charge/对公跨行转账汇款手续费
3. 汇总银行金额等于NC贷方金额
'''
regex_pay_bankfee = re.compile(r'.*[中银]行.*手续费$')
is_pay_bankfee = self.nc_boc['摘要'].str.match(regex_pay_bankfee)
nc_pay_bankfee = self.nc_boc[is_pay_bankfee]
# rule1: sum
nc_total_bankfee = nc_pay_bankfee['贷方'].sum()
payer_cond = self.boc["付款人开户行名"].str.contains('中国银行')
benef_cond = self.boc["收款人开户行名"].isnull()
boc_pay_bankfee = self.boc[(payer_cond & benef_cond)]
boc_total_bankfee = boc_pay_bankfee['支出'].sum()
if nc_total_bankfee==boc_total_bankfee:
self.nc_boc.loc[nc_pay_bankfee.index,'对账一致'] = 'yes'
self.boc.loc[boc_pay_bankfee.index,'对账一致'] = 'yes'
self.nc_boc.loc[nc_pay_bankfee.index,'银行索引'] = ';'.join(map(str,boc_pay_bankfee.index.values))
self.boc.loc[boc_pay_bankfee.index,'NC索引'] = ';'.join(map(str,nc_pay_bankfee.index.values))
# rule1_:sum
for nc_idx in nc_pay_bankfee.index:
cond1 = (self.boc['交易日期'] ==self.nc_boc.loc[nc_idx, '交易日期']) # 交易时间相同
cond2 = (self.boc['用途'].str.contains('收费|ServiceCharge|手续费'))
boc_pay_bankfee = self.boc[(cond1 & cond2)]
if boc_pay_bankfee['支出'].sum() == self.nc_boc.loc[nc_idx, '贷方']:
self.nc_boc.loc[nc_idx, '对账一致'] = 'yes'
self.boc.loc[boc_pay_bankfee.index, '对账一致'] = 'yes'
self.nc_boc.loc[nc_idx, '银行索引'] = ';'.join(map(str, boc_pay_bankfee.index.values))
self.boc.loc[boc_pay_bankfee.index, 'NC索引'] = nc_idx
# rule2:
for nc_idx in nc_pay_bankfee.index:
cond1 = (self.boc['支出'] == self.nc_boc.loc[nc_idx, '贷方']) # 借贷金额相同
cond2 = (self.boc['交易日期'] ==self.nc_boc.loc[nc_idx, '交易日期']) # 交易时间相同
boc_pay_bankfee = self.boc[(cond1 & cond2)]
for idx in boc_pay_bankfee.index:
payer_cond = ('中国银行' in boc_pay_bankfee.loc[idx, "付款人开户行名"])
benef_cond = (boc_pay_bankfee.loc[idx, "收款人开户行名"] is np.nan)
purpose_cond1 = ("收费" in boc_pay_bankfee.loc[idx, '用途'])
purpose_cond2 = ("ServiceCharge" in boc_pay_bankfee.loc[idx, '用途'])
purpose_cond3 = ("手续费" in boc_pay_bankfee.loc[idx, '用途'])
cond1 = (payer_cond&benef_cond&purpose_cond1)
cond2 = (payer_cond&benef_cond&purpose_cond2)
cond3 = (payer_cond&benef_cond&purpose_cond3)
if cond1 or cond2 or cond3:
self.nc_boc.loc[nc_idx, '对账一致'] = 'yes'
self.boc.loc[idx, '对账一致'] = 'yes'
self.nc_boc.loc[nc_idx, '银行索引'] = idx
self.boc.loc[idx, 'NC索引'] = nc_idx
# 资金系统rule1:
regex_pay_bankfee = re.compile(r'.*\d+手续费$')
is_pay_bankfee = self.nc_boc['摘要'].str.match(regex_pay_bankfee)
nc_pay_bankfee = self.nc_boc[is_pay_bankfee]
for nc_idx in nc_pay_bankfee.index:
cond1 = (self.boc['用途'].str.startswith("询证函|验资证明|开户证明|结算资信证明|工商验资E线通|收费项目"))
cond2 = (self.boc["对方户名"].isnull())
boc_pay_bankfee = self.boc[(cond1&cond2)]
boc_sum_pay_bankfee = boc_pay_bankfee.groupby(['交易日期'])['支出'].sum().reset_index().rename(columns={'支出':'支出和'})
for sum_idx in boc_sum_pay_bankfee.index:
if boc_sum_pay_bankfee.loc[sum_idx,'支出和']==nc_pay_bankfee.loc[nc_idx,'贷方']:
boc_idxs_cond = (boc_pay_bankfee['交易日期']==boc_sum_pay_bankfee.loc[sum_idx,'交易日期'])
boc_idxs = boc_pay_bankfee[boc_idxs_cond].index
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[boc_idxs,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = ';'.join(map(str,boc_idxs.values))
self.boc.loc[boc_idxs,'NC索引'] = nc_idx
# 资金系统rule2:
for nc_idx in nc_pay_bankfee.index:
cond1 = (self.boc['支出'] == self.nc_boc.loc[nc_idx, '贷方']) # 借贷金额相同
cond2 = (self.boc["对方户名"].isnull())
boc_pay_bankfee = self.boc[(cond1 & cond2)]
for idx in boc_pay_bankfee.index:
purpose_cond = (
boc_pay_bankfee.loc[idx, '用途'].startswith("询证函|验资证明|开户证明|结算资信证明|工商验资E线通|收费项目"))
if purpose_cond:
self.nc_boc.loc[nc_idx, '对账一致'] = 'yes'
self.boc.loc[idx, '对账一致'] = 'yes'
self.nc_boc.loc[nc_idx, '银行索引'] = idx
self.boc.loc[idx, 'NC索引'] = nc_idx
def pay_progressamount(self):
'''
支付合同进度款
eg:支付华优建筑设计院有限责任公司成都分公司眉山领地 蘭台府项目人防设计合同进度款<br>
rule1:<br>
> 逐笔比对
1. 借贷金额相同
2. 银行——收款人名称[ Payee's Name ]:华优建筑设计院有限责任公司成都分公司
<br>
eg: 支付乐山市银河建筑工程有限公司领地凯旋府土石方工程施工合同进度款<br>
rule2:
> 双边汇总,nc可能为多笔金额,银行也为多笔金额,且只有总数等同
1. 汇总nc贷方金额
2. 银行——收款人名称[ Payee's Name ]:乐山市银河建筑工程有限公司
3. 汇总银行金额
4. nc总额=银行总额
'''
# rule1:
regex_progress_payment = re.compile(r'支付.*进度款$')
is_progress_payment = self.nc_boc['摘要'].str.match(regex_progress_payment)
nc_progress_payment = self.nc_boc[is_progress_payment]
for nc_idx in nc_progress_payment.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
boc_progress_payment = self.boc[(cond1)]
for idx in boc_progress_payment.index:
otherside_cond = (boc_progress_payment.loc[idx,"对方户名"] in self.nc_boc.loc[nc_idx,'摘要'])
if otherside_cond: # 对方单位为 nc摘要中的名称
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
# self.nc_boc.loc[nc_idx,'银行索引'] = idx
# self.boc.loc[idx,'NC索引'] = nc_idx
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
# rule2:
nc_sum_progress = nc_progress_payment.groupby('摘要').agg({'贷方':'sum'}).reset_index() #注意摘要不同,分组求和
nc_sum_progress.rename(columns={'贷方':'贷方和'},inplace=True)
for sum_idx in nc_sum_progress.index:
boc_idxs = []
for idx in self.boc.index:
if str(self.boc.loc[idx,"对方户名"]) in nc_sum_progress.loc[sum_idx,'摘要']:
boc_idxs.append(idx)
# print(boc.loc[boc_idxs,'交易金额[ Trade Amount ]'].sum())
# print(nc_sum_progress.loc[nc_idx,'贷方和'])
# 5020382.89
# 5020382.890000001
boc_sum_progress = self.boc.loc[boc_idxs]['支出'].sum()
if np.around(boc_sum_progress,2)==np.around(nc_sum_progress.loc[sum_idx,'贷方和'],2):
idx_cond1 = (nc_progress_payment['摘要']==nc_sum_progress.loc[sum_idx,'摘要'])
nc_idxs = nc_progress_payment[(idx_cond1)].index
self.nc_boc.loc[nc_idxs,'对账一致'] = 'yes'
self.boc.loc[boc_idxs,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idxs,'银行索引'] = ';'.join(map(str,boc_idxs))
self.boc.loc[boc_idxs,'NC索引'] = ';'.join(map(str,nc_idxs.values))
def capital_pool(self):
'''
资金池归集——现有规则在金额相同时可能和付集团往来款匹配相交叉
eg: 资金池归集 <br>
rule:
1. 借贷金额相同
2. 银行——收款人名称[ Payee's Name ]:领地集团股份有限公司
2. 银行——用途[ Purpose ]:资金池归集+CC00000DKO
> 没有更好的规则,依赖金额的差异性;与付集团往来款基本相同,但集团往来款有时间字段进行约束。
'''
is_capital_pool = (self.nc_boc['摘要'].str.strip()=='资金池归集')
nc_capital_pool = self.nc_boc[is_capital_pool]
for nc_idx in nc_capital_pool.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
boc_capital_pool = self.boc[(cond1)]
for idx in boc_capital_pool.index:
purpose_cond = ('资金池归集' in boc_capital_pool.loc[idx,"用途_原"])
receiver_cond = (boc_capital_pool.loc[idx,"对方户名"].startswith('领地集团股份有限公司'))
if purpose_cond and receiver_cond:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def pay_group(self):
'''
付集团往来款
eg: 2019-08-12付集团往来款<br>
rule:
1. 借贷金额相同
2. 交易时间相同
3. 银行——收款人名称[ Payee's Name ]:领地集团股份有限公司 【含】
'''
regex_pay_group = re.compile(r'.*付集团往来款')
is_pay_group = self.nc_boc['摘要'].str.match(regex_pay_group)
nc_pay_group = self.nc_boc[is_pay_group]
for nc_idx in nc_pay_group.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
cond2 = (self.boc['交易日期']==self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_pay_group = self.boc[(cond1 & cond2)]
for idx in boc_pay_group.index:
if boc_pay_group.loc[idx,"对方户名"].startswith('领地集团股份有限公司'):
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def pay_firmamount(self):
'''
eg:
2019-09-07支付领地集团股份有限公司往来款
rule:
1. 借贷金额相同
2. 交易时间相同
3. 银行——对方单位:领地集团股份有限公司
'''
regex_pay_firmamount = re.compile(r'.*付.*往来款.*')
is_pay_firmamount = (self.nc_boc['摘要'].str.match(regex_pay_firmamount))
nc_pay_firmamount = self.nc_boc[is_pay_firmamount]
for nc_idx in nc_pay_firmamount.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
cond2 = (self.boc['交易日期'] == self.nc_boc.loc[nc_idx,'交易日期'])
boc_pay_firmamount = self.boc[(cond1&cond2)]
for idx in boc_pay_firmamount.index:
otherside_cond = (str(boc_pay_firmamount.loc[idx,"对方户名"]) in self.nc_boc.loc[nc_idx,'摘要'])
if otherside_cond:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def bank_transfer(self):
'''
跨行转账
eg: 8.26中行7014到工行1862<br>
8.8中行6569到农行0752监管资金<br>
rule:<br>
1. 借贷金额相同
2. 交易时间相同
3. 银行——摘要:OBSS022843398687GIRO000000000000【没法用】
4. 银行——付款人开户行名[ Payer account bank ]:中国银行眉山新区支行 【中国银行】<br>
银行——收款人开户行名[ Beneficiary account bank ]:中国工商银行眉山市分行业务处理中心 【工商银行】
'''
regex_bank_transfer = re.compile(r'.*行\d{4}到.*行\d{4}$|.*行\d{4}到.*行\d{4}监管资金$')
is_bank_transfer = self.nc_boc['摘要'].str.match(regex_bank_transfer)
nc_bank_transfer = self.nc_boc[is_bank_transfer]
bank_name = {
'中行':'中国银行',
'工行':'工商银行',
'农行':'农业银行',
'建行':'建设银行'
}
for nc_idx in nc_bank_transfer.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
cond2 = (self.boc['交易日期']== self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_bank_transfer = self.boc[(cond1 & cond2)]
bank_regex = re.compile(r'\d+\.\d+(.*)\d{4}到(.*)\d{4}')
from_to = bank_regex.search(self.nc_boc.loc[nc_idx,'摘要']).groups()
from_bank = from_to[0]
to_bank = from_to[1]
for idx in boc_bank_transfer.index:
from_cond = bank_name.get(from_bank) in boc_bank_transfer.loc[idx,'付款人开户行名']
to_cond = bank_name.get(to_bank) in boc_bank_transfer.loc[idx,'收款人开户行名']
if from_cond and to_cond: # 跨行起始一致
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
for nc_idx in nc_bank_transfer.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']== self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_bank_transfer = self.boc[(cond1 & cond2)]
bank_regex = re.compile(r'\d+\.\d+(.*)\d{4}到(.*)\d{4}')
from_to = bank_regex.search(self.nc_boc.loc[nc_idx,'摘要']).groups()
from_bank = from_to[0]
to_bank = from_to[1]
for idx in boc_bank_transfer.index:
from_cond = bank_name.get(from_bank) in boc_bank_transfer.loc[idx,'付款人开户行名']
to_cond = bank_name.get(to_bank) in boc_bank_transfer.loc[idx,'收款人开户行名']
if from_cond and to_cond: # 跨行起始一致
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def inner_transfer(self):
'''
内部转款
eg:
内部转账(10482~9037)
rule:
1. 借贷金额相同
2. 银行——对方账号:22910801040009037
3. 银行——交易附言: 往来款-CC00000GEL.
eg2:【资金系统】
2019-09-02内部转款
rule:
1. 借贷金额相同
2. 交易时间相同
3. 暂无其他可用约束条件
'''
regex_inner_transfer = re.compile(r'.*内部转[账帐款].*')
is_inner_transfer = self.nc_boc['摘要'].str.match(regex_inner_transfer)
nc_inner_transfer = self.nc_boc[is_inner_transfer]
# rule1
for nc_idx in nc_inner_transfer.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
boc_inner_transfer = self.boc[(cond1)]
for idx in boc_inner_transfer.index:
bank_number = re.findall(r'(\d+)[~-——](\d+)',self.nc_boc.loc[nc_idx,'摘要']) #[('10482', '9037')]
try:
otherside_cond = (str(boc_inner_transfer.loc[idx,"对方账号"]).endswith(bank_number[0][1]))
substract_cond1 = ("往来款" in boc_inner_transfer.loc[idx,'用途'])
substract_cond2 = ("往来款" in boc_inner_transfer.loc[idx,'用途_原'])
if (otherside_cond and substract_cond1) or (otherside_cond and substract_cond2):
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
except IndexError:
pass
# rule1_
for nc_idx in nc_inner_transfer.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
boc_inner_transfer = self.boc[(cond1)]
for idx in boc_inner_transfer.index:
bank_number = re.findall(r'(\d+)[~-——](\d+)',self.nc_boc.loc[nc_idx,'摘要']) #[('10482', '9037')]
try:
otherside_cond = (str(boc_inner_transfer.loc[idx,"对方账号"]).endswith(bank_number[0][1]))
substract_cond1 = ("往来款" in boc_inner_transfer.loc[idx,'用途'])
substract_cond2 = ("往来款" in boc_inner_transfer.loc[idx,'用途_原'])
if (otherside_cond and substract_cond1) or (otherside_cond and substract_cond2):
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
except IndexError:
pass
# # 资金系统汇总情形: 这个和银行手续费有冲突 !!!
# nc_inner_transfer_cond = (nc_inner_transfer['贷方'].notnull() & nc_inner_transfer['贷方']!=0. & nc_inner_transfer['贷方'].astype(str).str.strip()!='')
# for nc_idx in nc_inner_transfer[nc_inner_transfer_cond].index:
# boc_sum_cond = (self.boc['支出'].notnull() & self.boc['支出']!=0. & self.boc['支出'].str.strip()!='')
# boc_inner_transfer = self.boc[boc_sum_cond]
# boc_sum_inner_transfer = boc_inner_transfer.groupby(
# ['交易日期', '对方户名'])['支出'].sum().reset_index().rename(columns={'支出': '支出和'})
# for sum_idx in boc_sum_inner_transfer.index:
# if boc_sum_inner_transfer.loc[sum_idx,'支出和']==nc_inner_transfer.loc[nc_idx,'贷方']:
# boc_idxs_cond1 = (boc_inner_transfer['交易日期']== boc_sum_inner_transfer.loc[sum_idx,'交易日期'])
# boc_idxs = boc_inner_transfer[(boc_idxs_cond1)].index
# self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
# self.boc.loc[boc_idxs,'对账一致'] = 'yes'
# self.nc_boc.loc[nc_idx,'银行索引'] = ';'.join(map(str,boc_idxs.values))
# self.boc.loc[boc_idxs,'NC索引'] = nc_idx
# # 资金系统汇总情形_:
# nc_inner_transfer_cond = (nc_inner_transfer['借方'].notnull(
# ) & nc_inner_transfer['借方'] != 0. & nc_inner_transfer['借方'].astype(str).str.strip() != '')
# for nc_idx in nc_inner_transfer[nc_inner_transfer_cond].index:
# boc_sum_cond = (self.boc['收入'].notnull() & self.boc['收入'] != 0. & self.boc['收入'].str.strip()!='')
# boc_inner_transfer = self.boc[boc_sum_cond]
# boc_sum_inner_transfer = boc_inner_transfer.groupby(
# ['交易日期', '对方户名'])['收入'].sum().reset_index().rename(columns={'收入': '收入和'})
# for sum_idx in boc_sum_inner_transfer.index:
# if boc_sum_inner_transfer.loc[sum_idx,'收入和']==nc_inner_transfer.loc[nc_idx,'借方']:
# boc_idxs_cond1 = (boc_inner_transfer['交易日期']== boc_sum_inner_transfer.loc[sum_idx,'交易日期'])
# boc_idxs = boc_inner_transfer[(boc_idxs_cond1)].index
# self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
# self.boc.loc[boc_idxs,'对账一致'] = 'yes'
# self.nc_boc.loc[nc_idx,'银行索引'] = ';'.join(map(str,boc_idxs.values))
# self.boc.loc[boc_idxs,'NC索引'] = nc_idx
# 资金系统rule1
for nc_idx in nc_inner_transfer.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
cond2 = (self.boc['交易日期']==self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_inner_transfer = self.boc[(cond1&cond2)]
for idx in boc_inner_transfer.index:
time_cond = (boc_inner_transfer.loc[idx,'交易日期']==self.nc_boc.loc[nc_idx,'交易日期'])
if time_cond:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
# 资金系统rule2
for nc_idx in nc_inner_transfer.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']==self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_inner_transfer = self.boc[(cond1&cond2)]
for idx in boc_inner_transfer.index:
time_cond = (boc_inner_transfer.loc[idx,'交易日期']==self.nc_boc.loc[nc_idx,'交易日期'])
if time_cond:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def deal_laowang(self):
'''
处理王镜淇
eg: 8.16收到王镜淇款项/2019-08-20付王镜淇往来款<br>
rule:
1. 借贷金额相同
2. 交易时间相同
3. 银行——付款人名称[ Payer's Name ]:王镜淇<br>
银行——收款人名称[ Payee's Name ]:王镜淇
'''
is_laowang = self.nc_boc['摘要'].str.contains('王镜淇')
nc_laowang = self.nc_boc[is_laowang]
for nc_idx in nc_laowang.index:
cond1 = (self.boc['收入']==self.nc_boc.loc[nc_idx,'借方']) #借贷金额相同
cond2 = (self.boc['交易日期']==self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_laowang = self.boc[(cond1 & cond2)]
for idx in boc_laowang.index:
if boc_laowang.loc[idx,"对方户名"] in self.nc_boc.loc[nc_idx,'摘要']:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
for nc_idx in nc_laowang.index:
cond1 = (self.boc['支出']==self.nc_boc.loc[nc_idx,'贷方']) #借贷金额相同
cond2 = (self.boc['交易日期']==self.nc_boc.loc[nc_idx,'交易日期']) #交易时间相同
boc_laowang = self.boc[(cond1 & cond2)]
for idx in boc_laowang.index:
if boc_laowang.loc[idx,"对方户名"] in self.nc_boc.loc[nc_idx,'摘要']:
self.nc_boc.loc[nc_idx,'对账一致'] = 'yes'
self.boc.loc[idx,'对账一致'] = 'yes'
self.nc_boc.loc[nc_idx,'银行索引'] = idx
self.boc.loc[idx,'NC索引'] = nc_idx
def export_excel(self):
nc_rows_counts = self.nc_boc['对账一致'].value_counts(dropna=False)
boc_rows_counts = self.boc['对账一致'].value_counts(dropna=False)
try:
nc_yes_rows = nc_rows_counts['yes']
except KeyError:
nc_yes_rows = 0
nc_notmatch_rows = nc_rows_counts.sum()-nc_yes_rows
try:
boc_yes_rows = boc_rows_counts['yes']
except KeyError:
boc_yes_rows = 0
boc_notmatch_rows = boc_rows_counts.sum()-boc_yes_rows
print('\n')
print("+--------------------------------------------------+")
print("¦ RESULTS ¦")
print("+--------------------------------------------------+")
print("¦ EXCEL ¦ NC_BOC ¦ BOC ¦")
print("+--------------------------------------------------+")
print("¦ TOTAL ¦{0:^18}¦{1:^20}¦".format(nc_rows_counts.sum(),boc_rows_counts.sum()))
print("+--------------------------------------------------+")
print("¦ MATCH ¦{0:^18}¦{1:^20}¦".format(nc_yes_rows,boc_yes_rows))
print("+--------------------------------------------------+")
print("¦ NOTMATCH ¦{0:^18}¦{1:^20}¦".format(nc_notmatch_rows,boc_notmatch_rows))
print("+--------------------------------------------------+")
print('\n')
self.nc_boc['交易日期'] = self.nc_boc['交易日期'].astype(str).str.slice(0,10)
self.boc['交易日期'] = self.boc['交易日期'].astype(str).str.slice(0,10)
save_file = self.save_path + '\\' + self.nc_file_name + '+' + self.boc_file_name + '.xlsx'
print("结果保存至:\n\t%s\n" %(save_file))
# self.nc_boc.to_excel(self.save_path + '/nc_boc.xlsx')
# self.boc.to_excel(self.save_path + '/boc.xlsx')
writer = pd.ExcelWriter(save_file,engine='xlsxwriter')
self.nc_boc.to_excel(writer,sheet_name=self.nc_file_name,startrow=1,startcol=1,header=False,index=False)
self.boc.to_excel(writer,sheet_name=self.boc_file_name,startrow=1,startcol=1,header=False,index=False)
workbook = writer.book
nc_sheet = writer.sheets[self.nc_file_name]
boc_sheet = writer.sheets[self.boc_file_name]
header_format = workbook.add_format({
"bold":True,
"bg_color":'#67d8ef',
'font_size':15,
'font_name':"微软雅黑",
"align":'center',
'border':2,
})
cell_format = workbook.add_format({
"font_size":12,
"font_name":"微软雅黑",
"border":1,
"border_color":'#67d8ef',
"align":"left",
})
yes_format = workbook.add_format({
"bg_color":"#ffff00",
"font_size":12,
"font_name":"微软雅黑",
"border":1,
"border_color":'#67d8ef',
"align":"left"
})
# nc
# row format
nc_rows,nc_cols = self.nc_boc.shape
for i in range(nc_rows+5):
nc_sheet.set_row(i,22,cell_format)
yes_index = self.nc_boc[self.nc_boc['对账一致']=='yes'].index+1
for i in yes_index:
nc_sheet.set_row(i,22,yes_format)
# col format
nc_sheet.set_column(0,nc_cols+5,22)
nc_sheet.write_row('B1',self.nc_boc.columns,header_format)
nc_sheet.write_column('A2',self.nc_boc.index,header_format)
nc_sheet.freeze_panes(1,1)
nc_sheet.set_tab_color('#FF9900')
#boc
# row format
boc_rows,boc_cols = self.boc.shape
for i in range(boc_rows+5):
boc_sheet.set_row(i,22,cell_format)
yes_index = self.boc[self.boc['对账一致']=='yes'].index+1
for i in yes_index:
boc_sheet.set_row(i,22,yes_format)
# col format
boc_sheet.set_column(0,boc_cols+5,22)
boc_sheet.write_row('B1',self.boc.columns,header_format)
boc_sheet.write_column('A2',self.boc.index,header_format)
boc_sheet.freeze_panes(1,1)
boc_sheet.set_tab_color('#FF9900')
writer.save()
def doall(self):
self.rec_mortgage()
self.pos_to_bank()
self.rec_pos()
self.rec_loans()
self.rec_appointment_building()
self.rec_pfund()
self.rec_fee()
self.rec_firmamount()
self.prepay_amount()
self.pay_bankfee()
self.pay_progressamount()
self.pay_group()
self.pay_firmamount()
self.capital_pool()
self.bank_transfer()
self.inner_transfer()
self.deal_laowang()
self.export_excel()
def __call__(self):
return self.doall()
|
import requests
import json
from django.db import models
from sources.models import StockExchange
class Stock(models.Model):
company = models.CharField(max_length=500, null = True)
symbol = models.CharField(max_length=50)
exchange = models.ForeignKey(StockExchange, on_delete=models.CASCADE,unique=False)
bid = models.FloatField(default = None, null = True)
ask = models.FloatField(default = None, null = True)
last = models.FloatField(default = None, null = True)
volume = models.FloatField(default = None, null = True)
high = models.FloatField(default = None, null = True)
low = models.FloatField(default = None, null = True)
open_price = models.FloatField(default = None, null = True)
close_price = models.FloatField(default = None, null = True)
def __str__(self):
return self.symbol
def update_data(self):
TRADIER_API_KEY = 'XCp8C02gIfnzIW99aTTU4jnPQGVJ'
s = requests.Session()
s.headers.update({'Authorization':'Bearer ' + TRADIER_API_KEY, 'Accept':'application/json'})
url = 'https://api.tradier.com/v1/markets/quotes'
params = {"symbols":self.symbol}
r = s.get(url, params=params)
content = json.loads(r.text)
quote = content["quotes"]["quote"]
self.bid = quote["bid"]
self.ask = quote["ask"]
self.last = quote["last"]
self.volume = quote["volume"]
self.high = quote["high"]
self.low = quote["low"]
self.open_price = quote["open"]
self.close_price = quote["close"]
self.save()
|
from urllib.parse import urlparse
from django.http import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from django.contrib import auth, messages
from django.views.generic.base import View
from accounts import *
from .forms import SignUpForm
def signupbefore(request):
if request.method == "GET":
return render(request, 'accounts/signup_before.html')
def signup(request):
if request.method == 'POST':
# todo : 입력 받은 내용을 이용해서 회원 객체 생성
signup_form = SignUpForm(request.POST)
if signup_form.is_valid():
user_instance = signup_form.save(commit=False)
user_instance.set_password(signup_form.cleaned_data['password'])
user_instance.save()
return render(request, 'accounts/signup_complete.html', {'username':user_instance.username})
else:
messages.warning(request, '비밀번호 입력이 일치하지 않습니다')
referer_url = request.META.get('HTTP_REFERER')
path = urlparse(referer_url).path
return HttpResponseRedirect(path)
else:
# todo: from 객체를 만들어서 전달.
signup_form = SignUpForm()
return render(request, 'accounts/signup.html', {'form': signup_form.as_p() }) |
import django_filters
from .models import *
class ticketFilter(django_filters.FilterSet):
class Meta:
model = Ticket
fields = '__all__' |
import numpy as np
import pandas as pd
import re
import math
import multiprocessing
from abc import ABC, abstractmethod
from sklearn import metrics
from copy import deepcopy
from bokeh.layouts import gridplot, layout
from bokeh import events
from bokeh.plotting import figure, output_notebook, show
from bokeh.models import ColumnDataSource, Circle, HoverTool, TapTool, LabelSet, Rect, LinearColorMapper, MultiLine, Patch, Patches, CustomJS, Text, Title
from itertools import product
from sklearn.model_selection import ParameterGrid
from sklearn import preprocessing
from ..utils import color_scale, dict_perc
from bokeh.models.widgets import DataTable, Div, TableColumn
from bokeh.models.annotations import Title
from bokeh.plotting import ColumnDataSource, figure, output_notebook, show
from scipy import interp
from sklearn import metrics
from sklearn.utils import resample
from ..bootstrap import Perc, BC, BCA
from ..plot import scatter, scatterCI, boxplot, distribution, permutation_test, roc_calculate, roc_plot, roc_calculate_boot, roc_plot_boot
from ..utils import binary_metrics
from itertools import combinations
class BaseCrossVal(ABC):
"""Base class for crossval: kfold."""
@abstractmethod
def __init__(self, model, X, Y, param_dict, folds=10, n_mc=1, n_boot=0, n_cores=-1, ci=95):
# Store basic inputs
self.model = model
self.X = X
self.Y = Y
self.num_param = len(param_dict)
self.folds = folds
self.n_boot = n_boot
self.n_mc = n_mc
# Note; if self.mc is 0, change to 1
if self.n_mc == 0:
self.n_mc = 1
self.ci = ci
# Save param_dict
# Make sure each parameter is in a list
for key, value in param_dict.items():
if not isinstance(value, (list, tuple, np.ndarray)):
param_dict[key] = [value]
self.param_dict = param_dict
self.param_list = list(ParameterGrid(param_dict))
# Create a second dict, with parameters with more than 1 variable e.g. n_comp = [1, 2]
self.param_dict2 = {}
for key, value in param_dict.items():
if len(value) > 1:
self.param_dict2 = {**self.param_dict2, **{key: value}}
self.param_list2 = list(ParameterGrid(self.param_dict2))
# Calculate x_scores_ if it exists
if "model.x_scores_" in self.model.bootlist:
self.x_scores_calc = True
# if n_cores = -1, set n_cores to max_cores
max_num_cores = multiprocessing.cpu_count()
self.n_cores = n_cores
if self.n_cores > max_num_cores:
self.n_cores = -1
print("Number of cores set too high. It will be set to the max number of cores in the system.", flush=True)
if self.n_cores == -1:
self.n_cores = max_num_cores
print("Number of cores set to: {}".format(max_num_cores))
@abstractmethod
def calc_ypred(self):
"""Calculates ypred full and ypred cv."""
pass
@abstractmethod
def calc_ypred_epoch(self):
"""Calculates ypred full and ypred cv."""
pass
@abstractmethod
def calc_stats(self):
"""Calculates binary statistics from ypred full and ypred cv."""
pass
def _format_table(self, stats_list):
"""Make stats pretty (pandas table -> proper names in columns)."""
table = pd.DataFrame(stats_list).T
param_list_string = []
for i in range(len(self.param_list)):
param_list_string.append(str(self.param_list[i]))
table.columns = param_list_string
return table
def run(self):
"""Runs all functions prior to plot."""
# Check that param_dict is not for epochs
# Epoch is a special case
print("Running ...")
check_epoch = []
for i in self.param_dict2.keys():
check_epoch.append(i)
if check_epoch == ["epochs"]:
# Get epoch max
epoch_list = []
for i in self.param_list2:
for k, v in i.items():
epoch_list.append(v)
# Print and Calculate
self.calc_ypred_epoch()
print("returning stats at 'x' epoch interval during training until epoch={}.".format(epoch_list[-1]))
else:
self.calc_ypred()
self.calc_stats()
print("Done!")
def plot_projections(self, param=None, label=None, size=12, scatter2=False):
x_scores_full = self.x_scores_full[::-1][0][0]
x_scores_cv = np.median(self.x_scores_cv[::-1][0], axis=0)
scatterplot = scatter2
num_x_scores = len(x_scores_full.T)
sigmoid = False
# If there is only 1 x_score, Need to plot x_score vs. peak (as opposided to x_score[i] vs. x_score[j])
if num_x_scores == 1:
pass
else:
comb_x_scores = list(combinations(range(num_x_scores), 2))
# Width/height of each scoreplot
width_height = int(950 / num_x_scores)
circle_size_scoreplot = size / num_x_scores
label_font = str(13 - num_x_scores) + "pt"
# Create empty grid
grid = np.full((num_x_scores, num_x_scores), None)
# Append each scoreplot
for i in range(len(comb_x_scores)):
# Make a copy (as it overwrites the input label/group)
label_copy = deepcopy(label)
group_copy = self.Y.copy()
# Scatterplot
x, y = comb_x_scores[i]
xlabel = "LV {} ({:0.1f}%)".format(x + 1, 10)
ylabel = "LV {} ({:0.1f}%)".format(y + 1, 10)
gradient = 1
max_range = max(np.max(np.abs(x_scores_full[:, x])), np.max(np.abs(x_scores_cv[:, y])))
new_range_min = -max_range - 0.05 * max_range
new_range_max = max_range + 0.05 * max_range
new_range = (new_range_min, new_range_max)
grid[y, x] = scatter(x_scores_full[:, x].tolist(), x_scores_full[:, y].tolist(), label=label_copy, group=group_copy, title="", xlabel=xlabel, ylabel=ylabel, width=width_height, height=width_height, legend=False, size=circle_size_scoreplot, label_font_size=label_font, hover_xy=False, xrange=new_range, yrange=new_range, gradient=gradient, ci95=True, scatterplot=scatterplot, extraci95_x=x_scores_cv[:, x].tolist(), extraci95_y=x_scores_cv[:, y].tolist(), extraci95=True)
# Append each distribution curve
group_dist = np.concatenate((self.Y, (self.Y + 2)))
for i in range(num_x_scores):
score_dist = np.concatenate((x_scores_full[:, i], x_scores_cv[:, i]))
xlabel = "LV {} ({:0.1f}%)".format(i + 1, 10)
grid[i, i] = distribution(score_dist, group=group_dist, kde=True, title="", xlabel=xlabel, ylabel="density", width=width_height, height=width_height, label_font_size=label_font, sigmoid=sigmoid)
# Append each roc curve
for i in range(len(comb_x_scores)):
x, y = comb_x_scores[i]
# Get the optimal combination of x_scores based on rotation of y_loadings_
theta = math.atan(1)
x_rotate = x_scores_full[:, x] * math.cos(theta) + x_scores_full[:, y] * math.sin(theta)
x_rotate_boot = x_scores_cv[:, x] * math.cos(theta) + x_scores_cv[:, y] * math.sin(theta)
# ROC Plot with x_rotate
fpr, tpr, tpr_ci = roc_calculate(group_copy, x_rotate, bootnum=100)
fpr_boot, tpr_boot, tpr_ci_boot = roc_calculate(group_copy, x_rotate_boot, bootnum=100)
grid[x, y] = roc_plot(fpr, tpr, tpr_ci, width=width_height, height=width_height, xlabel="1-Specificity (LV{}/LV{})".format(x + 1, y + 1), ylabel="Sensitivity (LV{}/LV{})".format(x + 1, y + 1), legend=False, label_font_size=label_font, roc2=True, fpr2=fpr_boot, tpr2=tpr_boot, tpr_ci2=tpr_ci_boot)
# Bokeh grid
fig = gridplot(grid.tolist())
output_notebook()
show(fig)
def plot(self, metric="r2q2", scale=1, color_scaling="tanh", rotate_xlabel=True, model="kfold", legend="bottom_right", color_beta=[10, 10, 10], ci=95, diff1_heat=True):
"""Create a full/cv plot using based on metric selected.
Parameters
----------
metric : string, (default "r2q2")
metric has to be either "r2q2", "auc", "acc", "f1score", "prec", "sens", or "spec".
"""
# Check model is parametric if using 'r2q2'
if metric == "r2q2":
if self.model.parametric is False:
print("metric changed from 'r2q2' to 'auc' as the model is non-parametric.")
metric = "auc"
# Plot based on the number of parameters
if len(self.param_dict2) == 1:
fig = self._plot_param1(metric=metric, scale=scale, rotate_xlabel=rotate_xlabel, model=model, legend=legend, ci=ci)
elif len(self.param_dict2) == 2:
fig = self._plot_param2(metric=metric, scale=scale, color_scaling=color_scaling, model=model, legend=legend, color_beta=color_beta, ci=ci, diff1_heat=diff1_heat)
else:
raise ValueError("plot function only works for 1 or 2 parameters, there are {}.".format(len(self.param_dict2)))
# Show plot
output_notebook()
show(fig)
def _plot_param1(self, metric="r2q2", scale=1, rotate_xlabel=True, model="kfold", title_align="center", legend="bottom_right", ci=95):
"""Used for plot function if the number of parameters is 1."""
# Get ci
if self.n_mc > 1:
std_list = []
for i in range(len(self.param_list)):
std_full_i = dict_perc(self.full_loop[i], ci=ci)
std_cv_i = dict_perc(self.cv_loop[i], ci=ci)
std_full_i = {k + "full": v for k, v in std_full_i.items()}
std_cv_i = {k + "cv": v for k, v in std_cv_i.items()}
std_cv_i["R²"] = std_full_i.pop("R²full")
std_cv_i["Q²"] = std_cv_i.pop("R²cv")
std_combined = {**std_full_i, **std_cv_i}
std_list.append(std_combined)
self.table_std = self._format_table(std_list) # Transpose, Add headers
# Choose metric to plot
metric_title = np.array(["ACCURACY", "AIC", "AUC", "BIC", "F1-SCORE", "PRECISION", "R²", "SENSITIVITY", "SPECIFICITY", "SSE"])
metric_list = np.array(["acc", "aic", "auc", "bic", "f1score", "prec", "r2q2", "sens", "spec", "sse"])
metric_idx = np.where(metric_list == metric)[0][0]
# get full, cv, and diff
full = self.table.iloc[2 * metric_idx + 1]
cv = self.table.iloc[2 * metric_idx]
diff = abs(full - cv)
full_text = self.table.iloc[2 * metric_idx + 1].name
cv_text = self.table.iloc[2 * metric_idx].name
if metric == "r2q2":
diff_text = "| R²-Q² |"
y_axis_text = "R² & Q²"
full_legend = "R²"
cv_legend = "Q²"
else:
diff_text = full_text[:-4] + "diff"
y_axis_text = full_text[:-4]
if model == "kfold":
full_legend = "FULL"
cv_legend = "CV"
else:
full_legend = "TRAIN"
cv_legend = "TEST"
full_text = full_text[:-4] + "train"
cv_text = full_text[:-5] + "test"
# round full, cv, and diff for hovertool
full_hover = []
cv_hover = []
diff_hover = []
for j in range(len(full)):
full_hover.append("%.2f" % round(full[j], 2))
cv_hover.append("%.2f" % round(cv[j], 2))
diff_hover.append("%.2f" % round(diff[j], 2))
# get key, values (as string) from param_dict (key -> title, values -> x axis values)
for k, v in self.param_dict2.items():
key_title = k
key_xaxis = k
values = v
values_string = [str(i) for i in values]
values_string = []
for i in values:
if i == 0:
values_string.append(str(i))
elif 0.0001 > i:
values_string.append("%0.2e" % i)
elif 10000 < i:
values_string.append("%0.2e" % i)
else:
values_string.append(str(i))
# if parameter starts with n_ e.g. n_components change title to 'no. of components', xaxis to 'components'
if key_title.startswith("n_"):
key_xaxis = key_xaxis[2:]
key_xaxis = key_xaxis.title()
key_title = "no. of " + key_xaxis
else:
key_title = key_title.replace("_", " ")
key_title = key_title.title()
key_xaxis = key_title
# if key_xaxis.endswith("s") == True:
# key_xaxis = key_xaxis[:-1]
# store data in ColumnDataSource for Bokeh
data = dict(full=full, cv=cv, diff=diff, full_hover=full_hover, cv_hover=cv_hover, diff_hover=diff_hover, values_string=values_string)
source = ColumnDataSource(data=data)
# fig1_yrange = (min(diff) - max(0.1 * (min(diff)), 0.07), max(diff) + max(0.1 * (max(diff)), 0.07))
# fig1_xrange = (min(cv) - max(0.1 * (min(cv)), 0.07), max(cv) + max(0.1 * (max(cv)), 0.07))
fig1_title = diff_text + " vs. " + cv_text
# Plot width/height
width = int(485 * scale)
height = int(405 * scale)
# fig1_yrange = (min(diff) - max(0.1 * (min(diff)), 0.07), max(diff) + max(0.1 * (max(diff)), 0.07))
# fig1_xrange = (min(cv) - max(0.1 * (min(cv)), 0.07), max(cv) + max(0.1 * (max(cv)), 0.07))
# x_range=(min(cv_score) - 0.03, max(cv_score) + 0.03), y_range=(min(diff_score) - 0.03, max(diff_score) + 0.03)
# Figure 1 (DIFFERENCE (R2 - Q2) vs. Q2)
fig1 = figure(x_axis_label=cv_text, y_axis_label=diff_text, title=fig1_title, tools="tap,pan,wheel_zoom,box_zoom,reset,save,lasso_select,box_select", plot_width=width, plot_height=height, x_range=(min(cv) - 0.03, max(cv) + 0.03), y_range=(min(diff) - 0.03, max(diff) + 0.03))
# Figure 1: Add a line
fig1_line = fig1.line(cv, diff, line_width=2, line_color="black", line_alpha=0.25)
# Figure 1: Add circles (interactive click)
fig1_circ = fig1.circle("cv", "diff", size=12, alpha=0.7, color="green", source=source)
fig1_circ.selection_glyph = Circle(fill_color="green", line_width=2, line_color="black")
fig1_circ.nonselection_glyph.fill_color = "green"
fig1_circ.nonselection_glyph.fill_alpha = 0.4
fig1_circ.nonselection_glyph.line_color = "white"
# Figure 1: Add hovertool
fig1.add_tools(HoverTool(renderers=[fig1_circ], tooltips=[(key_xaxis, "@values_string"), (full_text, "@full_hover"), (cv_text, "@cv_hover"), (diff_text, "@diff_hover")]))
# Figure 1: Extra formating
fig1.axis.major_label_text_font_size = "8pt"
if metric is "r2q2" or metric is "auc":
fig1.title.text_font_size = "12pt"
fig1.xaxis.axis_label_text_font_size = "10pt"
fig1.yaxis.axis_label_text_font_size = "10pt"
else:
fig1.title.text_font_size = "10pt"
fig1.xaxis.axis_label_text_font_size = "9pt"
fig1.yaxis.axis_label_text_font_size = "9pt"
# Figure 2: full/cv
fig2_title = y_axis_text + " over " + key_title
fig2 = figure(x_axis_label=key_xaxis, y_axis_label=y_axis_text, title=fig2_title, plot_width=width, plot_height=height, x_range=pd.unique(values_string), tools="pan,wheel_zoom,box_zoom,reset,save,lasso_select,box_select")
# Figure 2: Add Confidence Intervals if n_mc > 1
if self.n_mc > 1:
# get full, cv, and diff
full_std = self.table_std.iloc[2 * metric_idx + 1]
cv_std = self.table_std.iloc[2 * metric_idx]
lower_ci_full = pd.Series(name=full_std.name, dtype="object")
upper_ci_full = pd.Series(name=full_std.name, dtype="object")
for key, values in full_std.iteritems():
lower_ci_full[key] = values[0]
upper_ci_full[key] = values[1]
lower_ci_cv = pd.Series(name=cv_std.name, dtype="object")
upper_ci_cv = pd.Series(name=cv_std.name, dtype="object")
for key, values in cv_std.iteritems():
lower_ci_cv[key] = values[0]
upper_ci_cv[key] = values[1]
# Plot as a patch
x_patch = np.hstack((values_string, values_string[::-1]))
y_patch_r2 = np.hstack((lower_ci_full, upper_ci_full[::-1]))
y_patch_q2 = np.hstack((lower_ci_cv, upper_ci_cv[::-1]))
fig2.patch(x_patch, y_patch_q2, alpha=0.10, color="blue")
# kfold monte-carlo does not have ci for R2
if model is not "kfold":
fig2.patch(x_patch, y_patch_r2, alpha=0.10, color="red")
# Figure 2: add full
fig2_line_full = fig2.line(values_string, full, line_color="red", line_width=2)
fig2_circ_full = fig2.circle("values_string", "full", line_color="red", fill_color="white", fill_alpha=1, size=8, source=source, legend=full_legend)
fig2_circ_full.selection_glyph = Circle(line_color="red", fill_color="white", line_width=2)
fig2_circ_full.nonselection_glyph.line_color = "red"
fig2_circ_full.nonselection_glyph.fill_color = "white"
fig2_circ_full.nonselection_glyph.line_alpha = 0.4
# Figure 2: add cv
fig2_line_cv = fig2.line(values_string, cv, line_color="blue", line_width=2)
fig2_circ_cv = fig2.circle("values_string", "cv", line_color="blue", fill_color="white", fill_alpha=1, size=8, source=source, legend=cv_legend)
fig2_circ_cv.selection_glyph = Circle(line_color="blue", fill_color="white", line_width=2)
fig2_circ_cv.nonselection_glyph.line_color = "blue"
fig2_circ_cv.nonselection_glyph.fill_color = "white"
fig2_circ_cv.nonselection_glyph.line_alpha = 0.4
# Add hovertool and taptool
fig2.add_tools(HoverTool(renderers=[fig2_circ_full], tooltips=[(full_text, "@full_hover")], mode="vline"))
fig2.add_tools(HoverTool(renderers=[fig2_circ_cv], tooltips=[(cv_text, "@cv_hover")], mode="vline"))
fig2.add_tools(TapTool(renderers=[fig2_circ_full, fig2_circ_cv]))
# Figure 2: Extra formating
fig2.axis.major_label_text_font_size = "8pt"
if metric is "r2q2" or metric is "auc":
fig2.title.text_font_size = "12pt"
fig2.xaxis.axis_label_text_font_size = "10pt"
fig2.yaxis.axis_label_text_font_size = "10pt"
else:
fig2.title.text_font_size = "10pt"
fig2.xaxis.axis_label_text_font_size = "9pt"
fig2.yaxis.axis_label_text_font_size = "9pt"
# Rotate
if rotate_xlabel is True:
fig2.xaxis.major_label_orientation = np.pi / 2
# Figure 2: legend
if legend == None or legend == False:
fig2.legend.visible = False
else:
fig2.legend.location = legend
fig2.legend.location = legend
# Hide legend if it is clicked
# def show_hide_legend(legend=fig2.legend[0]):
# legend.visible = not legend.visible
# print(py2js(show_hide_legend))
# fig2.js_on_event(events.DoubleTap, CustomJS.from_py_func(show_hide_legend))
# Center title
if title_align == "center":
fig1.title.align = "center"
fig2.title.align = "center"
# Create a grid and output figures
grid = np.full((1, 2), None)
grid[0, 0] = fig1
grid[0, 1] = fig2
fig = gridplot(grid.tolist(), merge_tools=True)
return fig
def _plot_param2(self, metric="r2q2", xlabel=None, orientation=0, alternative=False, scale=1, heatmap_xaxis_rotate=90, color_scaling="tanh", line=False, model="kfold", title_align="center", legend="bottom_right", color_beta=[10, 10, 10], ci=95, diff1_heat=True):
# legend always None
legend = None
# check color_beta
if type(color_beta) != list:
raise ValueError("color_beta needs to be a list of 3 values e.g. [10, 10, 10]")
if len(color_beta) != 3:
raise ValueError("color_beta needs to be a list of 3 values e.g. [10, 10, 10]")
# Get ci
if self.n_mc > 1:
std_list = []
for i in range(len(self.param_list)):
std_full_i = dict_perc(self.full_loop[i], ci=ci)
std_cv_i = dict_perc(self.cv_loop[i], ci=ci)
std_full_i = {k + "full": v for k, v in std_full_i.items()}
std_cv_i = {k + "cv": v for k, v in std_cv_i.items()}
std_cv_i["R²"] = std_full_i.pop("R²full")
std_cv_i["Q²"] = std_cv_i.pop("R²cv")
std_combined = {**std_full_i, **std_cv_i}
std_list.append(std_combined)
self.table_std = self._format_table(std_list) # Transpose, Add headers
metric_list = np.array(["acc", "aic", "auc", "bic", "f1score", "prec", "r2q2", "sens", "spec", "sse"])
metric_idx = np.where(metric_list == metric)[0][0]
# get full, cv, and diff
full_score = self.table.iloc[2 * metric_idx + 1]
cv_score = self.table.iloc[2 * metric_idx]
diff_score = abs(full_score - cv_score)
full_title = self.table.iloc[2 * metric_idx + 1].name
cv_title = self.table.iloc[2 * metric_idx].name
diff_title = full_title[:-4] + "diff"
if diff1_heat == False:
diff_heat_title = diff_title
diff_heat_score = diff_score
else:
diff_heat_title = "1 - " + full_title[:-4] + "diff"
diff_heat_score = 1 - diff_score
y_axis_text = full_title[:-4]
if metric is "r2q2":
full_title = 'R²'
cv_title = 'Q²'
diff_title = "| R² - Q² |"
if diff1_heat == False:
diff_heat_title = diff_title
else:
diff_heat_title = "1 - | R² - Q² |"
y_axis_text = "R² & Q²"
if model == "kfold":
full_legend = "FULL"
cv_legend = "CV"
else:
full_legend = "TRAIN"
cv_legend = "TEST"
full_title = full_title[:-4] + "train"
cv_title = full_title[:-5] + "test"
if metric is "r2q2":
full_title = 'R²'
cv_title = 'Q²'
# round full, cv, and diff for hovertool
full_hover = []
cv_hover = []
diff_hover = []
for j in range(len(full_score)):
full_hover.append("%.2f" % round(full_score[j], 2))
cv_hover.append("%.2f" % round(cv_score[j], 2))
diff_hover.append("%.2f" % round(diff_score[j], 2))
# If n_mc > 1
if self.n_mc > 1:
# get full, cv, and diff
full_std = self.table_std.iloc[2 * metric_idx + 1]
cv_std = self.table_std.iloc[2 * metric_idx]
lower_ci_full = pd.Series(name=full_std.name, dtype="object")
upper_ci_full = pd.Series(name=full_std.name, dtype="object")
for key, values in full_std.iteritems():
lower_ci_full[key] = values[0]
upper_ci_full[key] = values[1]
lower_ci_cv = pd.Series(name=cv_std.name, dtype="object")
upper_ci_cv = pd.Series(name=cv_std.name, dtype="object")
for key, values in cv_std.iteritems():
lower_ci_cv[key] = values[0]
upper_ci_cv[key] = values[1]
# Get key/values
param_keys = []
param_values = []
for key, value in sorted(self.param_dict2.items()):
param_keys.append(key)
# value_to_string = list(map(str, value))
# param_values.append(value_to_string)
values_string = []
for i in value:
if i == 0:
values_string.append(str(i))
elif 0.0001 > i:
values_string.append("%0.2e" % i)
elif 10000 < i:
values_string.append("%0.2e" % i)
else:
values_string.append(str(i))
param_values.append(values_string)
param_keys_title = []
param_keys_axis = []
for i in param_keys:
if i.startswith("n_"):
i_xaxis = i[2:]
i_xaxis = i_xaxis.title()
i_title = "no. of " + i_xaxis
else:
i_title = i.replace("_", " ")
i_title = i_title.title()
i_xaxis = i_title
param_keys_title.append(i_title)
param_keys_axis.append(i_xaxis)
# Get key/value combinations
comb = list(product(param_values[0], param_values[1]))
key0_value = [val[0] for val in comb]
key1_value = [val[1] for val in comb]
key0_unique = param_values[0]
key1_unique = param_values[1]
table = self.table
param_dict = self.param_dict2
param_list = self.param_list2
# diff_alpha_input = diff_score.copy()
# for i in diff_alpha_input.index:
# diff_alpha_input[i] = -diff_alpha_input[i]
full_alpha = color_scale(full_score, method=color_scaling, beta=color_beta[0])
cv_alpha = color_scale(cv_score, method=color_scaling, beta=color_beta[1])
diff_alpha = color_scale(diff_heat_score, method=color_scaling, beta=color_beta[2])
# diff_alpha = 1.1 - diff_alpha
# Text for heatmaps
full_text = []
cv_text = []
diff_text = []
for i in range(len(key0_value)):
full_text.append("%.2f" % round(full_score[i], 2))
cv_text.append("%.2f" % round(cv_score[i], 2))
diff_text.append("%.2f" % round(diff_score[i], 2))
# Information for line plot
line_key0_value = []
for i in range(len(key0_value)):
line_key0_value.append(key0_unique)
line_key1_value = []
for i in range(len(key1_value)):
line_key1_value.append(key1_unique)
# Extra for n_mc
if self.n_mc > 1:
# Information for line plot
monte_line_key0_value = []
for i in range(len(key0_value)):
monte_line_key0_value.append(list(np.hstack((key0_unique, key0_unique[::-1]))))
monte_line_key1_value = []
for i in range(len(key1_value)):
monte_line_key1_value.append(list(np.hstack((key1_unique, key1_unique[::-1]))))
line0_full = []
line0_cv = []
for i in range(len(key0_value)):
line0_full_i = []
line0_cv_i = []
for j in range(len(key0_value)):
if key0_value[i] == key0_value[j]:
line0_full_i.append(full_score[j])
line0_cv_i.append(cv_score[j])
line0_full.append(line0_full_i)
line0_cv.append(line0_cv_i)
line1_full = []
line1_cv = []
for i in range(len(key1_value)):
line1_full_i = []
line1_cv_i = []
for j in range(len(key1_value)):
if key1_value[i] == key1_value[j]:
line1_full_i.append(full_score[j])
line1_cv_i.append(cv_score[j])
line1_full.append(line1_full_i)
line1_cv.append(line1_cv_i)
# Extra for n_mc
if self.n_mc > 1:
monte_line1_full = []
monte_line1_cv = []
for i in range(len(key1_value)):
line1_full_i_upper = []
line1_full_i_lower = []
line1_cv_i_upper = []
line1_cv_i_lower = []
for j in range(len(key1_value)):
if key1_value[i] == key1_value[j]:
line1_full_i_upper.append(upper_ci_full[j])
line1_full_i_lower.append(lower_ci_full[j])
line1_cv_i_upper.append(upper_ci_cv[j])
line1_cv_i_lower.append(lower_ci_cv[j])
monte_line1_full.append(list(np.hstack((line1_full_i_lower, line1_full_i_upper[::-1]))))
monte_line1_cv.append(list(np.hstack((line1_cv_i_lower, line1_cv_i_upper[::-1]))))
# Extra for n_mc
if self.n_mc > 1:
monte_line0_full = []
monte_line0_cv = []
for i in range(len(key0_value)):
line0_full_i_upper = []
line0_full_i_lower = []
line0_cv_i_upper = []
line0_cv_i_lower = []
for j in range(len(key0_value)):
if key0_value[i] == key0_value[j]:
line0_full_i_upper.append(upper_ci_full[j])
line0_full_i_lower.append(lower_ci_full[j])
line0_cv_i_upper.append(upper_ci_cv[j])
line0_cv_i_lower.append(lower_ci_cv[j])
monte_line0_full.append(list(np.hstack((line0_full_i_lower, line0_full_i_upper[::-1]))))
monte_line0_cv.append(list(np.hstack((line0_cv_i_lower, line0_cv_i_upper[::-1]))))
# Scatterplot color and size based on key0 and key1
color_key0 = []
for i in range(len(key0_value)):
for j in range(len(key0_unique)):
if key0_value[i] == key0_unique[j]:
color_key0.append(j / (len(key0_unique) - 1))
scaler_size = preprocessing.MinMaxScaler(feature_range=(4, 20))
size_prescale_key1 = []
for i in range(len(key1_value)):
for j in range(len(key1_unique)):
if key1_value[i] == key1_unique[j]:
size_prescale_key1.append(j / (len(key1_unique) - 1))
scatter_size_key1 = scaler_size.fit_transform(np.array(size_prescale_key1)[:, np.newaxis])
scatter_size_key1 = scatter_size_key1 * scale
# Extra
key0_value_text = len(key1_value) * [key1_value[-1]]
key1_value_text = len(key0_value) * [key0_value[-1]]
line1_cv_text = []
for i in line1_cv:
line1_cv_text.append(i[-1])
line0_cv_text = []
for i in line0_cv:
line0_cv_text.append(i[-1])
line1_full_text = []
for i in line1_full:
line1_full_text.append(i[-1])
line0_full_text = []
for i in line0_full:
line0_full_text.append(i[-1])
ptext_is = ["Learning Rate", "Nodes", "Neurons", "Momentum", "Decay", "Components", "Batch Size", "Gamma", "C", "Estimators", "Max Features", "Max Depth", "Min Samples Split", "Min Samples Leaf", "Max Leaf Nodes"]
ptext_change = ["LR", "Node", "Neur", "Mom", "Dec", "Comp", "Bat", "Gam", "C", "Est", "Feat", "Dep", "SSpl", "SLea", "LNod"]
ptext = []
for i in param_keys_axis:
val = "fill"
for j in range(len(ptext_is)):
if i == ptext_is[j]:
val = ptext_change[j]
if val == "fill":
val = i[:3]
ptext.append(val + " = ")
line1_cv_score_text = []
for i in key1_value:
line1_cv_score_text.append(ptext[1] + i)
line0_cv_score_text = []
for i in key0_value:
line0_cv_score_text.append(ptext[0] + i)
diff_score_neg = 1 - diff_score
# Store information in dictionary for bokeh
data = dict(
key0_value=key1_value,
key1_value=key0_value,
full_score=full_score,
cv_score=cv_score,
diff_score=diff_score,
diff_heat_score=diff_heat_score,
diff_score_neg=diff_score_neg,
full_alpha=full_alpha,
cv_alpha=cv_alpha,
diff_alpha=diff_alpha,
line_key0_value=line_key0_value,
line_key1_value=line_key1_value,
line0_full=line0_full,
line0_cv=line0_cv,
line1_full=line1_full,
line1_cv=line1_cv,
full_text=full_text,
cv_text=cv_text,
diff_text=diff_text,
key0_value_text=key0_value_text,
key1_value_text=key1_value_text,
line0_cv_text=line0_cv_text,
line1_cv_text=line1_cv_text,
line1_cv_score_text=line1_cv_score_text,
line1_full_text=line1_full_text,
line0_full_text=line0_full_text,
line0_cv_score_text=line0_cv_score_text,
)
if self.n_mc > 1:
data["lower_ci_full"] = lower_ci_full
data["upper_ci_full"] = upper_ci_full
data["lower_ci_cv"] = lower_ci_cv
data["upper_ci_cv"] = lower_ci_cv
data["monte_line_key1_value"] = monte_line_key1_value
data["monte_line_key0_value"] = monte_line_key0_value
data["monte_line1_full"] = monte_line1_full
data["monte_line1_cv"] = monte_line1_cv
data["monte_line0_full"] = monte_line0_full
data["monte_line0_cv"] = monte_line0_cv
source = ColumnDataSource(data=data)
# Heatmap FULL
p1 = figure(title=full_title, tools="tap, save", x_range=key0_unique, y_range=key1_unique, x_axis_label=param_keys_axis[0], y_axis_label=param_keys_axis[1])
p1_render = p1.rect("key1_value", "key0_value", 0.9, 0.9, color="red", alpha="full_alpha", line_color=None, source=source)
p1_render.selection_glyph = Rect(fill_color="red", fill_alpha="full_alpha", line_width=int(3 * scale), line_color="black")
p1_render.nonselection_glyph.fill_alpha = "full_alpha"
p1_render.nonselection_glyph.fill_color = "red"
p1_render.nonselection_glyph.line_color = "white"
# Heatmap CV
p2 = figure(title=cv_title, tools="tap, save", x_range=key0_unique, y_range=key1_unique, x_axis_label=param_keys_axis[0], y_axis_label=param_keys_axis[1])
p2_render = p2.rect("key1_value", "key0_value", 0.9, 0.9, color="blue", alpha="cv_alpha", line_color=None, source=source)
p2_render.selection_glyph = Rect(fill_color="blue", fill_alpha="cv_alpha", line_width=int(3 * scale), line_color="black")
p2_render.nonselection_glyph.fill_alpha = "cv_alpha"
p2_render.nonselection_glyph.fill_color = "blue"
p2_render.nonselection_glyph.line_color = "white"
# Heatmap Diff
p3 = figure(title=diff_heat_title, tools="tap, save", x_range=key0_unique, y_range=key1_unique, x_axis_label=param_keys_axis[0], y_axis_label=param_keys_axis[1])
p3_render = p3.rect("key1_value", "key0_value", 0.9, 0.9, color="green", alpha="diff_alpha", line_color=None, source=source)
p3_render.selection_glyph = Rect(fill_color="green", fill_alpha="diff_alpha", line_width=int(3 * scale), line_color="black")
p3_render.nonselection_glyph.fill_alpha = "diff_alpha"
p3_render.nonselection_glyph.fill_color = "green"
p3_render.nonselection_glyph.line_color = "white"
# Extra for heatmaps
p1.plot_width = int(320 * scale)
p1.plot_height = int(257 * scale)
p1.grid.grid_line_color = None
p1.axis.axis_line_color = None
p1.axis.major_tick_line_color = None
p1.axis.major_label_text_font_size = str(8 * scale) + "pt"
p1.axis.major_label_standoff = 0
p1.xaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p1.yaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p1.title.text_font_size = str(14 * scale) + "pt"
p1.xaxis.major_label_orientation = math.radians(heatmap_xaxis_rotate)
p2.plot_width = int(320 * scale)
p2.plot_height = int(257 * scale)
p2.grid.grid_line_color = None
p2.axis.axis_line_color = None
p2.axis.major_tick_line_color = None
p2.axis.major_label_text_font_size = str(8 * scale) + "pt"
p2.axis.major_label_standoff = 0
p2.xaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p2.yaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p2.title.text_font_size = str(14 * scale) + "pt"
p2.xaxis.major_label_orientation = math.radians(heatmap_xaxis_rotate)
p3.plot_width = int(320 * scale)
p3.plot_height = int(257 * scale)
p3.grid.grid_line_color = None
p3.axis.axis_line_color = None
p3.axis.major_tick_line_color = None
p3.axis.major_label_text_font_size = str(8 * scale) + "pt"
p3.axis.major_label_standoff = 0
p3.xaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p3.yaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p3.title.text_font_size = str(14 * scale) + "pt"
p3.xaxis.major_label_orientation = math.radians(heatmap_xaxis_rotate)
text = False
# Adding text to heatmaps
if text is True:
# if heatmap rect is dark, use light text and vise versa
color_mapper_diff = LinearColorMapper(palette=["#000000", "#010101", "#fdfdfd", "#fefefe", "#ffffff"], low=0, high=1)
label1 = LabelSet(x="key0_value", y="key1_value", text="full_text", level="glyph", x_offset=-10 * scale, y_offset=-10 * scale, source=source, render_mode="canvas", text_font_size=str(7.5 * scale) + "pt", text_color={"field": "full_alpha", "transform": color_mapper_diff})
label2 = LabelSet(x="key0_value", y="key1_value", text="cv_text", level="glyph", x_offset=-10 * scale, y_offset=-10 * scale, source=source, render_mode="canvas", text_font_size=str(7.5 * scale) + "pt", text_color={"field": "cv_alpha", "transform": color_mapper_diff})
label3 = LabelSet(x="key0_value", y="key1_value", text="diff_text", level="glyph", x_offset=-10 * scale, y_offset=-10 * scale, source=source, render_mode="canvas", text_font_size=str(7.5 * scale) + "pt", text_color={"field": "diff_alpha", "transform": color_mapper_diff})
p1.add_layout(label1)
p2.add_layout(label2)
p3.add_layout(label3)
p1.add_tools(HoverTool(renderers=[p1_render, p2_render, p3_render], tooltips=[(full_title, "@full_text")]))
p2.add_tools(HoverTool(renderers=[p1_render, p2_render, p3_render], tooltips=[(cv_title, "@cv_text")]))
p3.add_tools(HoverTool(renderers=[p1_render, p2_render, p3_render], tooltips=[(diff_title, "@diff_text")]))
sc_title = diff_title + " vs. " + cv_title
# Scatterplot
p4 = figure(title=sc_title, x_axis_label=cv_title, y_axis_label=diff_title, tools="tap,pan,wheel_zoom,box_zoom,reset,save,lasso_select,box_select", x_range=(min(cv_score) - 0.03, max(cv_score) + 0.03), y_range=(min(diff_score) - 0.03, max(diff_score) + 0.03))
color_mapper_scatter = LinearColorMapper(palette="Inferno256", low=0, high=1)
p4_render = p4.circle("cv_score", "diff_score", size=8 * scale, alpha=0.6, color="green", source=source)
p4_render.selection_glyph = Circle(fill_color="green", line_width=int(2 * scale), line_color="black")
p4_render.nonselection_glyph.fill_color = "green"
p4_render.nonselection_glyph.fill_alpha = 0.4
p4_render.nonselection_glyph.line_color = "white"
p4.add_tools(HoverTool(renderers=[p4_render], tooltips=[(full_title, "@full_text"), (cv_title, "@cv_text"), (diff_title, "@diff_text")]))
p4.plot_width = int(320 * scale)
p4.plot_height = int(257 * scale)
p4.axis.major_label_text_font_size = str(8 * scale) + "pt"
p4.xaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p4.yaxis.axis_label_text_font_size = str(12 * scale) + "pt"
p4.title.text_font_size = str(14 * scale) + "pt"
# Line plot 1
l1_range_special = []
if len(key0_unique) > 2:
l1_range_special.append([" "])
if len(key0_unique) > 5:
l1_range_special.append([l1_range_special[-1][0] + " "])
if len(key0_unique) > 8:
l1_range_special.append([l1_range_special[-1][0] + " "])
another_val = len(key0_unique) - 8
if another_val > 0:
for i in range(another_val):
if i % 3 == 0:
l1_range_special.append([l1_range_special[-1][0] + " "])
l1_xrange = pd.unique(key0_unique)
l1_xrange2 = np.append(l1_xrange, l1_range_special)
l1_title = y_axis_text + " over " + param_keys_title[0]
y_range_min = min(cv_score) - min(cv_score) * 0.1
y_range_max = max(full_score) + max(full_score) * 0.05
p5 = figure(title=l1_title, x_axis_label=param_keys_axis[0], y_axis_label=y_axis_text, plot_width=int(320 * scale), plot_height=int(257 * scale), x_range=l1_xrange2, tools="pan,wheel_zoom,box_zoom,reset,save,lasso_select,box_select", y_range=(y_range_min, y_range_max))
p5.quad(top=[1000], bottom=[-1000], left=[l1_xrange[-1]], right=[1000], color="white")
# p5.outline_line_color = "white"
if self.n_mc > 1:
p5_render_patch2 = p5.patches("monte_line_key0_value", "monte_line1_cv", alpha=0, color="blue", source=source)
p5_render_patch2.selection_glyph = Patches(fill_alpha=0.2, fill_color="blue", line_color="white")
p5_render_patch2.nonselection_glyph.fill_alpha = 0
p5_render_patch2.nonselection_glyph.line_color = "white"
# kfold monte-carlo does not have ci for R2
if model is not "kfold":
p5_render_patch1 = p5.patches("monte_line_key0_value", "monte_line1_full", alpha=0, color="red", source=source)
p5_render_patch1.selection_glyph = Patches(fill_alpha=0.2, fill_color="red", line_color="white")
p5_render_patch1.nonselection_glyph.fill_alpha = 0
p5_render_patch1.nonselection_glyph.line_color = "white"
p5_render_1 = p5.multi_line("line_key0_value", "line1_full", line_color="red", line_width=2 * scale, source=source)
p5_render_1.selection_glyph = MultiLine(line_color="red", line_alpha=0.8, line_width=2 * scale)
p5_render_1.nonselection_glyph.line_color = "red"
p5_render_1.nonselection_glyph.line_alpha = 0.05 / len(key1_unique)
p5_render_2 = p5.circle("key1_value", "full_score", line_color="red", fill_color="white", size=8 * scale, source=source, legend=full_legend)
p5_render_2.selection_glyph = Circle(line_color="red", fill_color="white")
p5_render_2.nonselection_glyph.line_color = "red"
p5_render_2.nonselection_glyph.fill_color = "white"
p5_render_2.nonselection_glyph.line_alpha = 0.7 / len(key1_unique)
p5_render_3 = p5.multi_line("line_key0_value", "line1_cv", line_color="blue", line_width=2 * scale, source=source)
p5_render_3.selection_glyph = MultiLine(line_color="blue", line_alpha=0.8, line_width=2 * scale)
p5_render_3.nonselection_glyph.line_color = "blue"
p5_render_3.nonselection_glyph.line_alpha = 0.05 / len(key1_unique)
p5_render_4 = p5.circle("key1_value", "cv_score", line_color="blue", fill_color="white", size=8 * scale, source=source, legend=cv_legend)
p5_render_4.selection_glyph = Circle(line_color="blue", fill_color="white")
p5_render_4.nonselection_glyph.line_color = "blue"
p5_render_4.nonselection_glyph.fill_color = "white"
p5_render_4.nonselection_glyph.line_alpha = 0.7 / len(key1_unique)
# text
text_here = 8 * scale
text_line_font = str(text_here) + "pt"
p5_render_5 = p5.text(x="key1_value_text", y="line1_cv_text", text="line1_cv_score_text", source=source, text_font_size=text_line_font, text_color="blue", x_offset=8 * scale, y_offset=6 * scale, text_alpha=0)
p5_render_5.selection_glyph = Text(text_color="blue", text_alpha=1, text_font_size=text_line_font)
p5_render_5.nonselection_glyph.text_alpha = 0
p5_render_6 = p5.text(x="key1_value_text", y="line1_full_text", text="line1_cv_score_text", source=source, text_font_size=text_line_font, text_color="red", x_offset=8 * scale, y_offset=6 * scale, text_alpha=0)
p5_render_6.selection_glyph = Text(text_color="red", text_alpha=1, text_font_size=text_line_font)
p5_render_6.nonselection_glyph.text_alpha = 0
# p5_render_5.selection_glyph.text_alpha = 0.6
p5.add_tools(HoverTool(renderers=[p5_render_2], tooltips=[(full_title, "@full_text")]))
p5.add_tools(HoverTool(renderers=[p5_render_4], tooltips=[(cv_title, "@cv_text")]))
p5.add_tools(TapTool(renderers=[p5_render_2, p5_render_4]))
# Line plot 2
l2_range_special = []
if len(key1_unique) > 2:
l2_range_special.append([" "])
if len(key1_unique) > 5:
l2_range_special.append([l2_range_special[-1][0] + " "])
if len(key1_unique) > 8:
l2_range_special.append([l2_range_special[-1][0] + " "])
another_val = len(key1_unique) - 8
if another_val > 0:
for i in range(another_val):
if i % 3 == 0:
l2_range_special.append([l2_range_special[-1][0] + " "])
l2_xrange = pd.unique(key1_unique)
l2_xrange2 = np.append(l2_xrange, l2_range_special)
l1_title = y_axis_text + " over " + param_keys_title[0]
l2_title = y_axis_text + " over " + param_keys_title[1]
y_range_min = min(cv_score) - min(cv_score) * 0.1
y_range_max = max(full_score) + max(full_score) * 0.05
p6 = figure(title=l2_title, x_axis_label=param_keys_axis[1], y_axis_label=y_axis_text, plot_width=int(320 * scale), plot_height=int(257 * scale), x_range=l2_xrange2, tools="tap,pan,wheel_zoom,box_zoom,reset,save,lasso_select,box_select", y_range=(y_range_min, y_range_max))
p6.quad(top=[1000], bottom=[-1000], left=[l2_xrange[-1]], right=[1000], color="white")
if self.n_mc > 1:
p6_render_patch2 = p6.patches("monte_line_key1_value", "monte_line0_cv", alpha=0, color="blue", source=source)
p6_render_patch2.selection_glyph = Patches(fill_alpha=0.1, fill_color="blue", line_color="white")
p6_render_patch2.nonselection_glyph.fill_alpha = 0
p6_render_patch2.nonselection_glyph.line_color = "white"
# kfold monte-carlo does not have ci for R2
if model is not "kfold":
p6_render_patch1 = p6.patches("monte_line_key1_value", "monte_line0_full", alpha=0, color="red", source=source)
p6_render_patch1.selection_glyph = Patches(fill_alpha=0.1, fill_color="red", line_color="white")
p6_render_patch1.nonselection_glyph.fill_alpha = 0
p6_render_patch1.nonselection_glyph.line_color = "white"
p6_render_1 = p6.multi_line("line_key1_value", "line0_full", line_color="red", line_width=2 * scale, source=source, legend=full_legend)
p6_render_1.selection_glyph = MultiLine(line_color="red", line_alpha=0.8, line_width=2 * scale)
p6_render_1.nonselection_glyph.line_color = "red"
p6_render_1.nonselection_glyph.line_alpha = 0.05 / len(key0_unique)
p6_render_2 = p6.circle("key0_value", "full_score", line_color="red", fill_color="white", size=8 * scale, source=source)
p6_render_2.selection_glyph = Circle(line_color="red", fill_color="white")
p6_render_2.nonselection_glyph.line_color = "red"
p6_render_2.nonselection_glyph.fill_color = "white"
p6_render_2.nonselection_glyph.line_alpha = 0.7 / len(key0_unique)
p6_render_3 = p6.multi_line("line_key1_value", "line0_cv", line_color="blue", line_width=2 * scale, source=source, legend=cv_legend)
p6_render_3.selection_glyph = MultiLine(line_color="blue", line_alpha=0.8, line_width=2 * scale)
p6_render_3.nonselection_glyph.line_color = "blue"
p6_render_3.nonselection_glyph.line_alpha = 0.05 / len(key0_unique)
p6_render_4 = p6.circle("key0_value", "cv_score", line_color="blue", fill_color="white", size=8 * scale, source=source)
p6_render_4.selection_glyph = Circle(line_color="blue", fill_color="white")
p6_render_4.nonselection_glyph.line_color = "blue"
p6_render_4.nonselection_glyph.fill_color = "white"
p6_render_4.nonselection_glyph.line_alpha = 0.7 / len(key0_unique)
# Text
text_here = 8 * scale
text_line_font = str(text_here) + "pt"
p6_render_5 = p6.text(x="key0_value_text", y="line0_cv_text", text="line0_cv_score_text", source=source, text_font_size=text_line_font, text_color="blue", x_offset=8 * scale, y_offset=6 * scale, text_alpha=0)
p6_render_5.selection_glyph = Text(text_color="blue", text_alpha=1, text_font_size=text_line_font)
p6_render_5.nonselection_glyph.text_alpha = 0
p6_render_6 = p6.text(x="key0_value_text", y="line0_full_text", text="line0_cv_score_text", source=source, text_font_size=text_line_font, text_color="red", x_offset=8 * scale, y_offset=6 * scale, text_alpha=0)
p6_render_6.selection_glyph = Text(text_color="red", text_alpha=1, text_font_size=text_line_font)
p6_render_6.nonselection_glyph.text_alpha = 0
p6.add_tools(HoverTool(renderers=[p6_render_2], tooltips=[("AUC_full", "@full_text")]))
p6.add_tools(HoverTool(renderers=[p6_render_4], tooltips=[("AUC_CV", "@cv_text")]))
p6.add_tools(TapTool(renderers=[p6_render_2, p6_render_4]))
# Figure: legend
if legend == None or legend == False:
p5.legend.visible = False
p6.legend.visible = False
else:
p5.legend.location = legend
p6.legend.location = legend
# Center title
if title_align == "center":
p1.title.align = "center"
p2.title.align = "center"
p3.title.align = "center"
p4.title.align = "center"
p5.title.align = "center"
p6.title.align = "center"
fig = gridplot([[p1, p2, p3], [p4, p5, p6]], merge_tools=True, toolbar_location="left", toolbar_options=dict(logo=None))
p5.xaxis.major_label_orientation = math.radians(heatmap_xaxis_rotate)
p6.xaxis.major_label_orientation = math.radians(heatmap_xaxis_rotate)
p1.title.text_font_size = str(12 * scale) + "pt"
p2.title.text_font_size = str(12 * scale) + "pt"
p3.title.text_font_size = str(12 * scale) + "pt"
p4.title.text_font_size = str(12 * scale) + "pt"
p5.title.text_font_size = str(12 * scale) + "pt"
p6.title.text_font_size = str(12 * scale) + "pt"
p1.xaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p2.xaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p3.xaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p4.xaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p5.xaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p6.xaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p1.yaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p2.yaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p3.yaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p4.yaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p5.yaxis.axis_label_text_font_size = str(10 * scale) + "pt"
p6.yaxis.axis_label_text_font_size = str(10 * scale) + "pt"
return fig
|
import random
npart = 2
lim_caja = 10
const_F = 10
cant_pasos = 200
dt = 2
xmax = 15
ymax = 15
xmin = 0
ymin = 0
vels = [[3, 2], [2, 2]]
vy = [2, 2]
# Funcion que nos da una lista de listas
# de posiciones iniciales
def pos_ini(npart):
x = []
y = []
coord_ini = []
n = 0
while n < npart:
n += 1
xs = random.randint(0, 16)
ys = random.randint(0, 16)
# Evitamos que haya coord iguales
if xs not in x:
if ys not in y:
x.append(xs)
y.append(ys)
coord_ini.append([xs, ys])
return coord_ini
pos = pos_ini(npart)
print("pos ini {}".format(pos))
print("vels ini {}".format(vels))
def pos_vel_nuevas(cant_pasos):
j = [[3, 2], [2, 2]]
n = 0
count = 0
while n < cant_pasos:
n += 1
for i in pos:
count += 1
print("\nparticula {}".format(count))
x = i[0]
y = i[1]
print("x {}".format(x))
print("y {}".format(y))
print(j[0][0])
#estudiamos nuestra pos en x
i[0] += j[0][0] * dt
ifx = i[0]
# estudiamos nuestra pos en y
i[1] += j[0][1] * dt
ify = i[1]
print("xf{} {}".format(count, ifx))
print("yf{} {}".format(count, ify))
# Estudiamos las posiciones de c part
# en x
if ifx >= xmax or ifx < 0 :
if ifx > xmax:
print("se pasó x por arriba")
j[0][0] = (j[0][0]) * -1
i[0] += j[0][0] * dt
print(i[0])
elif ifx < 0:
print("se pasó x por abajo")
j[0][0] = (j[0][0]) * -1
i[0] += j[0][0] * dt
print(i[0])
# Estudiamos las posiciones de c part
# en y
if ify >= ymax or ify < 0:
if ify > ymax:
print("se pasó y por abajo")
j[0][1] = (j[0][1]) * -1
i[1] += j[0][1] * dt
print(i[1])
elif ify < 0:
print("se pasó y por abajo")
j[0][1] = (j[0][1]) * -1
i[1] += j[0][1] * dt
print(i[1])
print("i {}".format(i))
# for j in vels:
#
# # estudiamos nuestra pos en x
# i[0] += j[0] * dt
# x = i[0]
# # estudiamos nuestra pos en y
# i[1] += j[1] * dt
# y = i[1]
# print("x {}".format(x))
# print("y {}".format(y))
# if i[0] > xmax:
# j[0] = -(j[0])
# i[0] += j[0] * dt
# elif i[1] > ymax:
# j[1] = -(j[1])
# i[1] += j[1] * dt
# print(count)
# elif i[0] < 0:
# print("epa")
# print(j[0])
# j[0] = (j[0])*-1
# print(j[0])
# i[0] += j[0] * dt
# elif i[1] < 0:
# j[1] = (j[1])*-1
# i[1] += j[1] * dt
return pos
print (pos_vel_nuevas(cant_pasos))
def velocidades_choque(xy):
Vx = 2
for i in xy:
for X in [i][0]:
if X > xmax:
Vx = -(Vx)
X = X - 2 * (X - xmax)
print (X)
# print velocidades_choque(pos)
# Funcion que se fija si particula choco
# con los bordes
# def choque(x, xmax, ymax ):
# for i in x:
# if x[i] > xmax:
# x[i]
|
class Solution:
# @param A : string
# @param B : list of strings
# @return an integer
def wordBreak(self, A, B):
min_word_len = float("inf")
max_word_len = float("-inf")
dictionary = {}
# putting the words in a hash for faster lookups, plus getting min and max word length
# so we don't waste time looking up substrings that are too small or too large
for word in B:
dictionary[word] = 1
min_word_len = min(min_word_len, len(word))
max_word_len = max(max_word_len, len(word))
# to keep track of positions where the substring to the left can be successfully divided into words
we_can_do_it_to_here = [False] * (len(A) + 1)
we_can_do_it_to_here[0] = True
for i in range(min_word_len, len(A) + 1):
for j in range(max(0, i - max_word_len), i - min_word_len + 1):
if we_can_do_it_to_here[j] and A[j:i] in dictionary:
we_can_do_it_to_here[i] = True
break
return int(we_can_do_it_to_here[-1])
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 27 15:52:09 2017
@author: dgratz
"""
from glob import glob
import numpy as np
import matplotlib.pyplot as plt
import re
# def graphDSS(foldername):
# dssFiles = glob(foldername+'/*_dss0.tsv')
# for dssFile in dssFiles:
# data = np.genfromtxt(dssFile, delimiter='\t',skip_header=1)
# for i in range(data.shape[1]):
# plt.bar(i,data[0,i])
def graphDT(foldername):
dtFiles = glob(foldername+'/*_dt0.tsv')
for dtFile in dtFiles:
with open(dtFile, 'r') as f:
line = list(filter(None,re.split('\t|cell._./',f.readline().strip())))
clLoc = line.index('vOld/cl')
peakLoc = line.index('vOld/peak')
data = np.genfromtxt(dtFile, delimiter='\t',skip_header=1)
print(data.shape[0])
plt.scatter(range(data.shape[0]),data[:,clLoc])
def graphDVARS(foldername):
dvarsFiles = glob(foldername+'/*_dvars.tsv')
for dvarsFile in dvarsFiles:
with open(dvarsFile, 'r') as f:
line = f.readline().strip().split('\t')
vLoc = line.index('vOld')
tLoc = line.index('t')
data = np.genfromtxt(dvarsFile, delimiter='\t',skip_header=1)
plt.plot(data[:,tLoc],data[:,vLoc])
def graphAll(foldername):
plt.figure(0)
graphDVARS(foldername)
plt.xlim([0,100])
# plt.figure(1)
# graphDT(foldername)
plt.show()
|
from dataclasses import dataclass, field
from typing import Dict, List, Optional
@dataclass
class CostSnapshot:
system_name: str
station_name: str
timestamp: str
buy_price: int
stock: int
sell_price: int
demand: int
market_id: Optional[int] = None
star_pos: Optional[List[float]] = None
station_type: Optional[str] = None
system_address: Optional[int] = None
dist_from_star_ls: Optional[float] = None
station_allegiance: Optional[str] = None
@dataclass
class Commodity:
name: str
best_buys: List[CostSnapshot]
best_sales: List[CostSnapshot]
@dataclass
class StockSummary:
commodities: List[Commodity] = field(default_factory=list)
@dataclass
class Station:
market_id: int
star_pos: List[float]
station_name: str
station_type: str
system_address: int
system_name: str
timestamp: str
dist_from_star_ls: Optional[float] = None
station_allegiance: Optional[str] = None
@dataclass
class DockSummary:
stations: Dict[str, Station] = field(default_factory=dict)
|
from eshop_products.models import Product
from django.shortcuts import render, Http404, redirect
from django.views.generic import ListView, DetailView
from .forms import UserFavouriteProductForm
from .models import UserFavouriteProducts
from django.contrib.auth.decorators import login_required
# Create your views here.
class FavouriteProducts(ListView):
template_name = 'products/product_list.html'
paginate_by = 6
def get_queryset(self):
user_id = self.request.user.id
prodcut = UserFavouriteProducts.objects.filter(owner_id=user_id).first()
if prodcut is None:
return Product.objects.filter(active=True,userfavouriteproducts__owner_id=user_id)
return Product.objects.filter(userfavouriteproducts__owner_id=user_id).distinct()
@login_required(login_url='/login')
def add_favourite_product(request):
UserFavouriteProductsForm = UserFavouriteProductForm(request.POST or None)
if UserFavouriteProductsForm.is_valid():
productId = UserFavouriteProductsForm.cleaned_data.get('productId')
product = Product.objects.filter(id=productId).first()
if product is None:
raise Http404('محصولی با این مشخصات یافت نشد')
UserFavouriteProducts.objects.create(owner_id=request.user.id, product_id=product.id)
return redirect('/products')
|
from __future__ import absolute_import
from celery import shared_task
from time import sleep
from .send_mail import send_mail
@shared_task
def test():
sleep(10)
return 'Hello'
@shared_task
def run_sigma(id): #id is used to fetch config and data
#TODO: Call sigma and fetch data
sleep(5)
send_mail('YOUR USER NAME@YOUR EMAIL DOMAIN', id) #will fetch email from db
|
import json
from collections import Counter
class Elorating:
ELO_RESULT_WIN = 1
ELO_RESULT_LOSS = -1
ELO_RESULT_TIE = 0
ELO_RATING_DEFAULT = 1500
ratingA = 0
ratingB = 0
def __init__(self, ratingA=ELO_RATING_DEFAULT, ratingB=ELO_RATING_DEFAULT):
self.ratingA = ratingA
self.ratingB = ratingB
def setResult(self, result):
scoreAwin = self.computeScore(self.ratingB, self.ratingA)
scoreBwin = self.computeScore(self.ratingA, self.ratingB)
# print(scoreAwin)
# print(scoreBwin)
return scoreAwin
# score_adjust = 0
# if result == self.ELO_RESULT_WIN:
# score_adjust = 1
# elif result == self.ELO_RESULT_LOSS:
# score_adjust = 0
# else:
# score_adjust = 0.5
#
# self.ratingA = self.ratingA + self.computeK(self.ratingA) * (score_adjust - scoreAwin)
# self.ratingB = self.ratingB + self.computeK(self.ratingB) * (score_adjust - scoreBwin)
def computeK(self, rating):
if rating >= 2400:
return 16
elif rating >= 2100:
return 24
else:
return 36
def computeScore(self, rating1, rating2):
return 1 / (1 + pow(10, (rating1 - rating2) / 400))
pass
def rateCalcu(rateA,rateB):
eloating=Elorating(rateA,rateB)
return abs(2*eloating.setResult(-1)-1)
def userCalcuList(listA,listB):
delta=0
tempDict={}
for caseA in listA:
for caseB in listB:
if(caseB["case_type"]==caseA["case_type"]):
delta+=rateCalcu(caseA["Score"],caseB["Score"])
tempDict[caseB["case_type"]]=rateCalcu(caseA["Score"],caseB["Score"])
tempDict["delta"]=delta
return tempDict
def userCalcu(listA,listB):
delta=0
for caseA in listA:
for caseB in listB:
if(caseB["case_type"]==caseA["case_type"]):
delta+=rateCalcu(caseA["Score"],caseB["Score"])
return delta
def caseTypeRecFunc():
with open('./realData/UserAbilityTypeLevel.json', 'r', encoding='utf8')as fp:
json_data = json.load(fp)
# 创建字典
info_dict={}
for key in json_data:
userList=[]
for user_case in json_data[key]:
temp_dict={}
temp_dict["case_type"]=user_case["case_type"]
temp_dict["Score"]=0.4*user_case["algorthmScore"]+0.4*user_case["debugScore"]+0.2*user_case["firstConsScore"]
userList.append(temp_dict)
info_dict[key]=userList
# print(info_dict)
# print(rateCalcu(1500,1600))
result_dict={}
for keyA in info_dict:
temp_dict={}
list_dict={}
for keyB in info_dict:
if(keyA!=keyB):
temp_dict[keyB]=userCalcu(info_dict[keyA],info_dict[keyB])
list_dict[keyB]=userCalcuList(info_dict[keyA],info_dict[keyB])
end_dict={}
temp_list = sorted(temp_dict, key=temp_dict.__getitem__)
temp_list.reverse()
temp_list=temp_list[0:5]
for element in temp_list:
end_dict[element]=list_dict[element]
result_dict[keyA]=end_dict
# dumps 将数据转换成字符串
info_json = json.dumps(result_dict, sort_keys=False, indent=4, separators=(',', ': '), ensure_ascii=False)
# 显示数据类型
print(type(info_json))
f = open('./realData/caseTypeRec.json', 'w', encoding='utf8')
f.write(info_json)
def userAbilityCalcu(dictA,dictB):
delta=0
for keyA in dictA:
for keyB in dictB:
if(keyA==keyB):
delta+=1-rateCalcu(dictA[keyA]*1000,dictB[keyB]*1000)
return delta
def userAbilityCalcuList(dictA,dictB):
delta=0
tempDict={}
for keyA in dictA:
for keyB in dictB:
if(keyA==keyB):
delta+=1-rateCalcu(dictA[keyA]*1000,dictB[keyB]*1000)
tempDict[keyB] = 1-rateCalcu(dictA[keyA]*1000,dictB[keyB]*1000)
tempDict["delta"] = delta
return tempDict
def abilityRecFunc():
with open('./realData/UserAbilityLevel.json', 'r', encoding='utf8')as fp:
json_data = json.load(fp)
# 创建字典
info_dict={}
for keyA in json_data:
temp_dict={}
list_dict={}
for keyB in json_data:
if(keyA!=keyB):
temp_dict[keyB]=userAbilityCalcu(json_data[keyA],json_data[keyB])
list_dict[keyB]=userAbilityCalcuList(json_data[keyA],json_data[keyB])
end_dict={}
temp_list = sorted(temp_dict, key=temp_dict.__getitem__)
temp_list.reverse()
temp_list=temp_list[0:5]
for element in temp_list:
end_dict[element]=list_dict[element]
info_dict[keyA]=end_dict
# dumps 将数据转换成字符串
info_json = json.dumps(info_dict, sort_keys=False, indent=4, separators=(',', ': '), ensure_ascii=False)
# 显示数据类型
print(type(info_json))
f = open('./realData/abilityRec.json', 'w', encoding='utf8')
f.write(info_json)
# caseTypeRecFunc()
abilityRecFunc() |
import setuptools
#scripts=['change_set_subfield'] ,
setuptools.setup(
name='iSeqs2',
version='0.0.3',
author="Alessandro Coppe",
author_email="",
description="Another bunch of scripts I use in my Next Generation Sequencing Bioinformatics Analyses",
url="https://github.com/alexcoppe/iSeqs2",
packages=["iseqs2"],
scripts=["scripts/build_leukemia_genes_list/build_leukemia_genes_list.py",
"scripts/filter_by_normal_expression/filter_by_normal_expression.py",
"scripts/filter_strelka2/filter_strelka2.py",
"scripts/remove_snps_from_cosmic/remove_snps_from_cosmic.py",
"scripts/filter_vcf_by_gene_names/filter_vcf_by_gene_names.py",
"scripts/filter_varscan2_vcfs/filter_varscan2_vcfs.py",
"scripts/remove_germline_variants_from_varscan/remove_germline_variants_from_varscan.py",
"scripts/nucleotides_frequency_from_bam_positions/nucleotides_frequency_from_bam_positions.py",
"scripts/join_tables_from_snpsift_extractFields/join_tables_from_snpsift_extractFields.py"],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
import numpy as np
import time
import sys
from itertools import permutations
def main():
args = sys.argv[1:]
N = 100 #number of iterations
residual_converg = 1e-8
for arg in args:
print("\n" + arg)
a,b = read_csv_file(arg)
jacobi(a, b, N, residual_converg)
def read_csv_file (file_name):
my_data = np.genfromtxt(file_name, delimiter=',')
n = my_data.shape[1]
A = np.delete(my_data, n-1, 1)
b = my_data[:,n-1]
return A, b
def jacobi(A, b, N, rc):
ig = [0] * len(A)
residual = np.linalg.norm(np.matmul(A,ig)-b)
# Create a vector of the diagonal elements of A
# and subtract them from A
D = np.diag(A)
R = A - np.diagflat(D)
# Iterate for N times
i = 0
start_time = time.time_ns()
while (i < N and residual > rc):
ig = (b - np.dot(R,ig)) / D
residual = np.linalg.norm(np.matmul(A,ig)-b)
i = i + 1
print("EXECUTION TIME:" + " %s nano seconds " % (time.time_ns() - start_time))
if __name__ == '__main__':
main() |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__version__ = '1.0.1'
import ujson
import json
import datetime
from datetime import timedelta
from sanic import Blueprint
from sanic import response
from sanic.request import Request
from sanic_jwt import inject_user, scoped
from sanic.log import logger
from web_backend.nvlserver.helper.request_wrapper import populate_response_format
from web_backend.nvlserver.helper.process_request_args import proc_arg_to_int
from web_backend.nvlserver.module.rent.service import(
get_rent_alram_list,update_rent_element_alarm
)
from .service import (
get_traceable_object_list, get_traceable_object_dropdown_list,
get_traceable_object_list_count, create_traceable_object_element,add_vehicle_timer_time_slots,
get_traceable_object_element, update_traceable_object_element, delete_traceable_object_element,get_vehicle_wise_rent_statics,
# TRACEABLE OBJECT TYPE IMPORTS
get_traceable_object_type_list, get_traceable_object_type_dropdown_list,
get_traceable_object_type_list_count, create_traceable_object_type_element,
get_traceable_object_type_element, update_traceable_object_type_element, delete_traceable_object_type_element,
get_traceable_object_brand_dropdown_list, get_traceable_object_model_dropdown_list, update_traceable_object_alram_time_element, add_vehicle_wise_rent_statics,get_vehicle_wise_rent_statics_count, update_traceable_object_alram_status
)
from web_backend.nvlserver.module.hw_command.service import (
create_user_hw_command_element, get_user_hw_command_state_by_traceable_object_id
)
from web_backend.nvlserver.module.hw_action.service import get_hw_action_element
from web_backend.nvlserver.module.hw_module.service import get_hw_module_element,get_hw_module_element_by_traceable_object_id
api_traceable_object_blueprint = Blueprint('api_traceable_object', url_prefix='/api/traceable_object')
@api_traceable_object_blueprint.route('/', methods=['GET'])
@inject_user()
@scoped(['traceable_object:read'], require_all=True, require_all_actions=True)
async def api_traceable_object_list_get(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
size = proc_arg_to_int(request.args.get('size', '1'), 1)
page = proc_arg_to_int(request.args.get('page', '1'), 1)
name = request.args.get('name', None)
offset = (page - 1) * size
if request.method == 'GET':
try:
if user:
if user.get('user_id', None):
if user.get('account_type_name') == 'admin':
traceable_object_list = await get_traceable_object_list(
request, user_id=None, name=name, limit=size, offset=offset)
traceable_object_count = await get_traceable_object_list_count(
request, user_id=None, name=name)
else:
traceable_object_list = await get_traceable_object_list(
request, user_id=user.get('user_id'), name=name, limit=size, offset=offset)
traceable_object_count = await get_traceable_object_list_count(
request, user_id=user.get('user_id'), name=name)
if traceable_object_list:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
res_data_formatted = await populate_response_format(
traceable_object_list, traceable_object_count, size=size, page=page)
ret_val['data'] = res_data_formatted
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = {}
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/vehicle_wise_rent_statics', methods=['GET'])
@inject_user()
@scoped(['traceable_object:read'], require_all=True, require_all_actions=True)
async def api_traceable_object_list_get(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
size = proc_arg_to_int(request.args.get('size', '1'), 1)
page = proc_arg_to_int(request.args.get('page', '1'), 1)
name = request.args.get('name', None)
offset = (page - 1) * size
if request.method == 'GET':
try:
if user:
if user.get('user_id', None):
if user.get('account_type_name') == 'admin':
traceable_object_list = await get_vehicle_wise_rent_statics(
request, user_id=None, name=name, limit=size, offset=offset)
traceable_object_count = await get_vehicle_wise_rent_statics_count(
request, user_id=None, name=name)
else:
traceable_object_list = await get_vehicle_wise_rent_statics(
request, user_id=user.get('user_id'), name=name, limit=size, offset=offset)
traceable_object_count = await get_vehicle_wise_rent_statics_count(
request, user_id=user.get('user_id'), name=name)
if traceable_object_list:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
res_data_formatted = await populate_response_format(
traceable_object_list, traceable_object_count, size=size, page=page)
ret_val['data'] = res_data_formatted
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = {}
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/dropdown', methods=['GET'])
@inject_user()
@scoped(['traceable_object:query_dropdown'], require_all=True, require_all_actions=True)
async def api_traceable_object_list_dropdown_get(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
name = request.args.get('name', None)
user_id = proc_arg_to_int(request.args.get('user_id', ['0'])[0], 0)
if request.method == 'GET':
try:
if user:
if user.get('user_id', None):
if user.get('account_type_name') == 'admin':
user_id = user_id
else:
user_id = user.get('user_id')
traceable_object_list = await get_traceable_object_dropdown_list(
request, user_id=user_id, name=name)
if traceable_object_list:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object_list
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = []
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/', methods=['POST'])
@inject_user()
@scoped(['traceable_object:create'], require_all=True, require_all_actions=True)
async def api_traceable_object_post(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'POST':
try:
if user:
if user.get('user_id'):
name = request.json.get('name', None)
traceable_object_type_id = request.json.get('traceable_object_type_id', None)
note = request.json.get('note', '')
show_on_map = request.json.get('show_on_map', False)
action = request.json.get('action', False)
collision_avoidance_system = request.json.get('collision_avoidance_system', False)
active = request.json.get('active', True)
consumption = request.json.get('consumption', '0')
registration_number = request.json.get('registration_number', '0')
vin_number = request.json.get('vin_number', '0')
vehicle_brand = request.json.get('vehicle_brand', '')
vehicle_brand_id = request.json.get('vehicle_brand_id', 0)
vehicle_model = request.json.get('vehicle_model', '')
vehicle_model_id = request.json.get('vehicle_model_id', 0)
vehicle_year = request.json.get('vehicle_year', '')
vehicle_default_throttle = request.json.get('vehicle_default_throttle', 60)
meta_information = {
'consumption': consumption,
'registration_number': registration_number,
'vin_number': vin_number,
'vehicle_brand': vehicle_brand,
'vehicle_model_id': vehicle_model_id,
'vehicle_brand_id': vehicle_brand_id,
'vehicle_model': vehicle_model,
'vehicle_year': vehicle_year,
'vehicle_default_throttle': vehicle_default_throttle
}
if user.get('account_type_name') == 'user':
user_id = user.get('user_id')
else:
user_id = request.json.get('user_id', user.get('user_id'))
if None not in [name]:
traceable_object = await create_traceable_object_element(
request, name=name, traceable_object_type_id=traceable_object_type_id,
user_id=user_id, note=note, meta_information=meta_information,
show_on_map=show_on_map,
action=action, collision_avoidance_system=collision_avoidance_system,
active=active)
# print(traceable_object)
if traceable_object:
ret_val['data'] = traceable_object
ret_val['success'] = True
status = 201
ret_val['message'] = 'server.object_created'
else:
status = 412
ret_val['message'] = 'server.query_condition_failed'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_post -> POST erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/<traceable_object_id:int>', methods=['GET'])
@inject_user()
@scoped(['traceable_object:read'], require_all=True, require_all_actions=True)
async def api_traceable_object_element_get(request, user, traceable_object_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_id:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'GET':
try:
if user:
if user.get('user_id', None) and traceable_object_id:
if user.get('account_type_name') == 'user':
user_id = user.get('user_id')
else:
user_id = None
traceable_object_element = await get_traceable_object_element(
request, user_id=user_id, traceable_object_id=traceable_object_id)
if traceable_object_element:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object_element
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_element_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/<traceable_object_id:int>', methods=['PUT'])
@inject_user()
@scoped(['traceable_object:update'], require_all=True, require_all_actions=True)
async def api_traceable_object_element_put(request: Request, user, traceable_object_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_id:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'PUT':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if traceable_object_id:
name = request.json.get('name', None)
traceable_object_type_id = request.json.get('traceable_object_type_id', None)
# REPLACE WHEN USER MANAGEMENT IS IMPLEMENTED TO user.get('user_id')
# IMPLEMENT USER DROP DOWN ON ADMIN INTERFACE
if user.get('account_type_name') == 'admin':
user_id = request.json.get('user_id', None)
else:
user_id = user.get('user_id')
note = request.json.get('note', '')
show_on_map = request.json.get('show_on_map', False)
action = request.json.get('action', False)
active = request.json.get('active', True)
consumption = request.json.get('consumption', '0')
registration_number = request.json.get('registration_number', '0')
collision_avoidance_system = request.json.get('collision_avoidance_system', False)
vin_number = request.json.get('vin_number', '0')
vehicle_brand = request.json.get('vehicle_brand', '')
vehicle_brand_id = request.json.get('vehicle_brand_id', 0)
vehicle_model = request.json.get('vehicle_model', '')
vehicle_model_id = request.json.get('vehicle_model_id', 0)
vehicle_year = request.json.get('vehicle_year', '')
vehicle_default_throttle = request.json.get('vehicle_default_throttle', 60)
meta_information = {
'consumption': consumption,
'registration_number': registration_number,
'vin_number': vin_number,
'vehicle_brand': vehicle_brand,
'vehicle_model_id': vehicle_model_id,
'vehicle_brand_id': vehicle_brand_id,
'vehicle_model': vehicle_model,
'vehicle_year': vehicle_year,
'vehicle_default_throttle': vehicle_default_throttle
}
if None not in [name]:
traceable_object = await update_traceable_object_element(
request, user_id=user_id, traceable_object_id=traceable_object_id, name=name,
traceable_object_type_id=traceable_object_type_id,
note=note, meta_information=meta_information, show_on_map=show_on_map,
action=action, collision_avoidance_system=collision_avoidance_system,
active=active)
if traceable_object:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object
status = 202
ret_val['message'] = 'server.accepted'
else:
status = 412
ret_val['message'] = 'server.query_condition_failed'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_element_put -> PUT erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/<traceable_object_id:int>', methods=['DELETE'])
@inject_user()
@scoped(['traceable_object:delete'], require_all=True, require_all_actions=True)
async def api_traceable_object_element_delete(request: Request, user, traceable_object_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_id:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'DELETE':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if traceable_object_id:
if user.get('account_type_name') == 'admin':
user_id = None
else:
user_id = user.get('user_id')
# print(30 * '-')
await delete_traceable_object_element(
request, user_id=user_id, traceable_object_id=traceable_object_id)
# print(30 * '-')
status = 202
ret_val['success'] = True
ret_val['message'] = 'server.accepted'
else:
status = 403
ret_val['message'] = 'server.forbidden'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_element_delete -> DELETE erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/update_alram_time/<traceable_object_id:int>', methods=['PUT'])
@inject_user()
@scoped(['traceable_object:update'], require_all=True, require_all_actions=True)
async def api_traceable_object_element_alarmput_put(request: Request, user, traceable_object_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_id:
:alarm_start:
:alarm_state:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'PUT':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if traceable_object_id:
alarm_start = request.json.get('alarm_start', '')
alarm_state = request.json.get('alarm_state', False)
datetime.datetime.now()
#alram_on=datetime.datetime.now()+timedelta(hours=int(0), minutes=int(alarm_start))
if alarm_start==0 and alarm_state is True:
hw_action_id = 6 #SOUND BUZZER
value = True
hw_action = await get_hw_action_element(request, hw_action_id)
hw_module_element = await get_hw_module_element_by_traceable_object_id(request, user_id=user.get('user_id'), traceable_object_id=traceable_object_id)
created_command = await create_user_hw_command_element(
request, user_id=1,
hw_action_id=hw_action.get('id'), proto_field=hw_action.get('proto_field'),
field_type=hw_action.get('action').get('type'), value=str(value),
state='pending', traceable_object_id=hw_module_element.get('traceable_object_id'),
hw_module_id=hw_module_element.get('id'), ack_message=True,
active=True
)
if alarm_state is True:
alarm_on_tme=datetime.datetime.now()
else:
alarm_on_tme=None
if None not in [alarm_start]:
traceable_object = await update_traceable_object_alram_time_element(
request, alarm_start=alarm_start, alarm_on_tme=alarm_on_tme, traceable_object_id=traceable_object_id,alarm_state=alarm_state)
if traceable_object:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object
status = 202
ret_val['message'] = 'server.accepted'
else:
status = 412
ret_val['message'] = 'server.query_condition_failed'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_element_put -> PUT erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/update_alram_status/<traceable_object_id:int>', methods=['PUT'])
@inject_user()
@scoped(['traceable_object:update'], require_all=True, require_all_actions=True)
async def api_traceable_object_element_alarmput_put(request: Request, user, traceable_object_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_id:
:alarm_pause_status:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'PUT':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if traceable_object_id:
#alram_on=datetime.datetime.now()+timedelta(hours=int(0), minutes=int(alarm_start))
alarm_pause_status = request.json.get('alarm_pause_status', '')
traceable_object_element_value=await get_traceable_object_element(request, user_id=user.get('user_id'), traceable_object_id=traceable_object_id)
#print(traceable_object_element_value)
if alarm_pause_status:
alarm_pause_time=datetime.datetime.now()
traceable_object = await update_traceable_object_alram_status(
request, alarm_pause_status=alarm_pause_status, alarm_pause_time=alarm_pause_time, traceable_object_id=traceable_object_id,alarm_pause_duration=traceable_object_element_value['alarm_pause_duration'])
else:
alarm_pause_time=traceable_object_element_value['alarm_pause_time']
current_time=datetime.datetime.now()
date_time_obj =datetime.datetime.strptime(traceable_object_element_value['alarm_pause_time'],'%Y-%m-%d %H:%M:%S')
time_delta = (current_time - date_time_obj)
total_seconds = time_delta.total_seconds()
total_seconds = total_seconds+traceable_object_element_value['alarm_pause_duration']
traceable_object = await update_traceable_object_alram_status(
request, alarm_pause_status=alarm_pause_status, alarm_pause_time=None, traceable_object_id=traceable_object_id,alarm_pause_duration=total_seconds)
#status = 412
#ret_val['message'] = 'server.query_condition_failed'
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object
status = 202
ret_val['message'] = 'server.accepted'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_element_put -> PUT erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
# SATART ALARM FOR DEVICE OBJECT TYPE API
@api_traceable_object_blueprint.route('/start_alarm_bycron', methods=['GET'])
async def api_traceable_object_type_list_get(request: Request):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
size = proc_arg_to_int(request.args.get('size', '1'), 1)
page = proc_arg_to_int(request.args.get('page', '1'), 1)
name = request.args.get('name', None)
offset = (page - 1) * size
traceable_object= ret_val
if request.method == 'GET':
try:
traceable_list = await get_traceable_object_list(request,limit=0)
for device in traceable_list:
alarm_start=device['alarm_start']
alarm_pause_status=device['alarm_pause_status']
alarm_status=device['alarm_state']
alarm_pause_duration=device['alarm_pause_duration']
if alarm_pause_duration is None:
alarm_pause_duration=0
traceable_object_id=device['id']
if alarm_start:
#print(device)
alarm_on_tme=datetime.datetime.strptime(device['alarm_on_tme'], '%Y-%m-%d %H:%M:%S')
date_time_obj =datetime.datetime.strptime(device['alarm_on_tme'], '%Y-%m-%d %H:%M:%S')
#set time before booing end 5 mints
alaram_before=alarm_start-3 #mints
alrmbeforetime=date_time_obj+timedelta(hours=int(0), minutes=int(alaram_before), seconds=int(alarm_pause_duration))
hw_action_id = 6 #SOUND BUZZER
value = True
if (datetime.datetime.now()>=alrmbeforetime) and alarm_status is True and alarm_pause_status is False:
#print(device)
#print('khalid')
hw_action = await get_hw_action_element(request, hw_action_id)
hw_module_element = await get_hw_module_element_by_traceable_object_id(request, user_id=device['user_id'], traceable_object_id=device['id'])
for x in range(20):
created_command = await create_user_hw_command_element(
request, user_id=1,
hw_action_id=hw_action.get('id'), proto_field=hw_action.get('proto_field'),
field_type=hw_action.get('action').get('type'), value=str(value),
state='pending', traceable_object_id=hw_module_element.get('traceable_object_id'),
hw_module_id=hw_module_element.get('id'), ack_message=True,
active=True
)
if created_command:
alrmbeforetime=date_time_obj+timedelta(hours=int(0), minutes=int(alarm_start+12), seconds=int(alarm_pause_duration))
if (datetime.datetime.now()>=alrmbeforetime):
traceable_object = await update_traceable_object_alram_time_element(
request, alarm_start=alarm_start, alarm_on_tme=alarm_on_tme, traceable_object_id=traceable_object_id,alarm_state=False)
except Exception as al_err:
logger.error('Function api_traceable_object_type_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(traceable_object).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/update_vehicle_timeslots/<traceable_object_id:int>', methods=['PUT'])
@inject_user()
@scoped(['traceable_object:update'], require_all=True, require_all_actions=True)
async def api_traceable_object_element_alarmput_put(request: Request, user, traceable_object_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_id:
:time_slots:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'PUT':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if traceable_object_id:
time_slots = request.json.get('time_slots', {})
#time_slots ={"name":"John", "age":31,"Salary":25000}
time_slots = json.dumps(time_slots)
alarm_on_tme=datetime.datetime.now()
if None not in [time_slots]:
traceable_object = await add_vehicle_timer_time_slots(
request, alarm_on_tme=alarm_on_tme, time_slots=time_slots, traceable_object_id=traceable_object_id,user_id=user.get('user_id'))
if traceable_object:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object
status = 202
ret_val['message'] = 'server.accepted'
else:
status = 412
ret_val['message'] = 'server.query_condition_failed'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_element_put -> PUT erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/rent_completed/<traceable_object_id:int>', methods=['GET'])
@inject_user()
@scoped(['traceable_object:update'], require_all=True, require_all_actions=True)
async def api_traceable_object_element_rent_completed(request: Request, user, traceable_object_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_id:
:time_slots:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'GET':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if traceable_object_id:
traceable_object=await get_traceable_object_element(request, user_id=user.get('user_id'), traceable_object_id=traceable_object_id)
if traceable_object["alarm_state"]:
currentTime=datetime.datetime.now()
alrmbeforetime=currentTime-timedelta(hours=int(0), minutes=int(0), seconds=int(traceable_object['alarm_pause_duration']))
extendTime=0
alarm_on_tme=datetime.datetime.strptime(traceable_object['alarm_on_tme'], '%Y-%m-%d %H:%M:%S')
timediff=alrmbeforetime-alarm_on_tme
total_seconds = timediff.total_seconds()
minutes = total_seconds/60
extendTime=minutes-traceable_object["alarm_start"]
extendTime=round(extendTime,2)
#add rent statics for completed rent
traceable_object2 = await add_vehicle_wise_rent_statics(
request, alarm_start=traceable_object["alarm_start"], alarm_on_tme=alarm_on_tme, traceable_object_id=traceable_object_id,alarm_state=traceable_object["alarm_state"],user_id=user.get('user_id'),extended_time=extendTime)
#alarm reset on table
traceable_object = await update_traceable_object_alram_time_element(
request, alarm_start=traceable_object["alarm_start"], alarm_on_tme=None, traceable_object_id=traceable_object_id,alarm_state=False)
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object
status = 202
ret_val['message'] = 'server.accepted'
else:
status = 412
ret_val['message'] = 'No rent found'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_element_put -> PUT erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/create_command', methods=['POST'])
@inject_user()
@scoped(['traceable_object:create_command'], require_all=True, require_all_actions=True)
async def api_traceable_object_create_command_post(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'POST':
try:
if user:
if user.get('user_id'):
hw_action_id = request.json.get('hw_action_id', None)
# TODO: SEND VALID ELEMENTS FROM FRONTEND
value = request.json.get('value', True)
hw_module_id = request.json.get('hw_module_id')
# print(request.json)
if None not in [hw_action_id]:
hw_action = await get_hw_action_element(request, hw_action_id)
hw_module_element = await get_hw_module_element(request, hw_module_id=hw_module_id)
if hw_action:
# TODO: REMOVE ON FRONTEND NEEDED TO BE IMPLEMENTED VALUE PASS
if hw_action_id == 5:
state_list = await get_user_hw_command_state_by_traceable_object_id(
request,
hw_module_element.get('traceable_object_id'))
if state_list:
act_list = state_list.get('action_list')
pref_action = [x for x in act_list if x.get('hw_action_id') == 5]
if pref_action:
element = pref_action[0]
if element.get('state') is False:
value = True
else:
value = False
print(state_list)
created_command = await create_user_hw_command_element(
request, user_id=user.get('user_id'),
hw_action_id=hw_action.get('id'), proto_field=hw_action.get('proto_field'),
field_type=hw_action.get('action').get('type'), value=str(value),
state='pending', traceable_object_id=hw_module_element.get('traceable_object_id'),
hw_module_id=hw_module_element.get('id'), ack_message=True,
active=True
)
if created_command:
ret_val['data'] = created_command
ret_val['success'] = True
status = 201
ret_val['message'] = 'server.object_created'
else:
status = 412
ret_val['message'] = 'server.query_condition_failed'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_create_command_post -> POST erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
# TRACEABLE OBJECT TYPE API
@api_traceable_object_blueprint.route('/type', methods=['GET'])
@inject_user()
@scoped(['traceable_object:read'], require_all=True, require_all_actions=True)
async def api_traceable_object_type_list_get(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
size = proc_arg_to_int(request.args.get('size', '1'), 1)
page = proc_arg_to_int(request.args.get('page', '1'), 1)
name = request.args.get('name', None)
offset = (page - 1) * size
if request.method == 'GET':
try:
if user:
if user.get('user_id', None):
traceable_object_list = await get_traceable_object_type_list(
request, name=name, limit=size, offset=offset)
traceable_object_count = await get_traceable_object_type_list_count(request, name=name)
if traceable_object_list:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
res_data_formatted = await populate_response_format(
traceable_object_list, traceable_object_count, size=size, page=page)
ret_val['data'] = res_data_formatted
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = {}
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_type_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/type/dropdown', methods=['GET'])
@inject_user()
@scoped(['traceable_object:query_dropdown'], require_all=True, require_all_actions=True)
async def api_traceable_object_type_list_dropdown_get(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
name = request.args.get('name', None)
if request.method == 'GET':
try:
if user:
if user.get('user_id', None):
traceable_object_type_list = await get_traceable_object_type_dropdown_list(
request, name=name)
if traceable_object_type_list:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object_type_list
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = []
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_type_dropdown_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/type', methods=['POST'])
@inject_user()
@scoped(['traceable_object:create'], require_all=True, require_all_actions=True)
async def api_traceable_object_type_post(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'POST':
try:
if user:
if user.get('user_id'):
if user.get('account_type_name') == 'admin':
name = request.json.get('name', None)
active = request.json.get('active', True)
if None not in [name]:
traceable_object_type = await create_traceable_object_type_element(
request, name, active)
ret_val['data'] = traceable_object_type
ret_val['success'] = True
status = 201
ret_val['message'] = 'server.object_created'
else:
status = 412
ret_val['message'] = 'server.query_condition_failed'
else:
status = 403
ret_val['message'] = 'server.forbidden'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_type_post -> POST erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/type/<traceable_object_type_id:int>', methods=['GET'])
@inject_user()
@scoped(['traceable_object:read'], require_all=True, require_all_actions=True)
async def api_traceable_object_type_element_get(request, user, traceable_object_type_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_type_id:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'GET':
try:
if user:
if user.get('user_id', None) and traceable_object_type_id:
traceable_object_type_element = await get_traceable_object_type_element(
request, traceable_object_type_id)
if traceable_object_type_element:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object_type_element
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_type_element_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/type/<traceable_object_type_id:int>', methods=['PUT'])
@inject_user()
@scoped(['traceable_object:update'], require_all=True, require_all_actions=True)
async def api_traceable_object_type_element_put(request, user, traceable_object_type_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_type_id:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'PUT':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if True and traceable_object_type_id:
name = request.json.get('name', None)
if None not in [name]:
utot = await update_traceable_object_type_element(
request, traceable_object_type_id, name, True)
if utot:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = utot
status = 202
ret_val['message'] = 'server.accepted'
else:
status = 412
ret_val['message'] = 'server.query_condition_failed'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_type_element_put -> PUT erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
@api_traceable_object_blueprint.route('/type/<traceable_object_type_id:int>', methods=['DELETE'])
@inject_user()
@scoped(['traceable_object:delete'], require_all=True, require_all_actions=True)
async def api_traceable_object_type_element_delete(request, user, traceable_object_type_id: int = 0):
"""
:param request:
:param user:
:param traceable_object_type_id:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
if request.method == 'DELETE':
try:
if user:
if user.get('user_id'):
# TODO: IMPLEMENT USER ACCESS if user.get('is_superuser'):
if True and traceable_object_type_id:
await delete_traceable_object_type_element(
request, traceable_object_type_id=traceable_object_type_id)
status = 202
ret_val['success'] = True
ret_val['message'] = 'server.accepted'
else:
status = 403
ret_val['message'] = 'server.forbidden'
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_type_element_delete -> DELETE erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
# TRACEABLE OBJECT BRAND API
@api_traceable_object_blueprint.route('/brand/dropdown', methods=['GET'])
@inject_user()
@scoped(['traceable_object:query_dropdown'], require_all=True, require_all_actions=True)
async def api_traceable_object_brand_list_get(request: Request, user):
"""
:param request:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
# size = proc_arg_to_int(request.args.get('size', '1'), 1)
# page = proc_arg_to_int(request.args.get('page', '1'), 1)
name = request.args.get('name', None)
# offset = (page - 1) * size
if request.method == 'GET':
try:
if user:
if user.get('user_id', None):
traceable_object_brand_list = await get_traceable_object_brand_dropdown_list(
request, name=name)
if traceable_object_brand_list:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object_brand_list
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = {}
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_brand_list_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
# TRACEABLE OBJECT BRAND API
@api_traceable_object_blueprint.route('/model/dropdown/<brand_id:int>', methods=['GET'])
@inject_user()
@scoped(['traceable_object:query_dropdown'], require_all=True, require_all_actions=True)
async def api_traceable_object_model_list_get(request: Request, user, brand_id: int = 0):
"""
:param request:
:param brand_id:
:param user:
:return:
"""
status = 500
ret_val = {'success': False, 'message': 'server.query_failed', 'data': None}
# size = proc_arg_to_int(request.args.get('size', '1'), 1)
# page = proc_arg_to_int(request.args.get('page', '1'), 1)
name = request.args.get('name', None)
# offset = (page - 1) * size
if request.method == 'GET':
try:
if user:
if user.get('user_id', None):
traceable_object_model_list = await get_traceable_object_model_dropdown_list(
request, name=name, traceable_object_brand_id=brand_id)
if traceable_object_model_list:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = traceable_object_model_list
status = 200
else:
ret_val['success'] = True
ret_val['message'] = 'server.query_success'
ret_val['data'] = {}
status = 200
else:
status = 400
ret_val['message'] = 'server.bad_request'
else:
status = 401
ret_val['message'] = 'server.unauthorized'
except Exception as al_err:
logger.error('Function api_traceable_object_model_list_get -> GET erred with: {}'.format(al_err))
return response.raw(
ujson.dumps(ret_val).encode(),
headers={'X-Served-By': 'sanic', 'Content-Type': 'application/json'},
status=status
)
|
from django.conf.urls import url
from django.conf.urls.static import static
from django.conf import settings
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView
from agagd_core import views as agagd_views
urlpatterns = [
url(r'^$', agagd_views.index, name='index'),
url(r'.php$', RedirectView.as_view(url=reverse_lazy('index'))),
url(r'^search/$', agagd_views.search, name='search'),
url(r'^player/(?P<member_id>\d+)/$', agagd_views.member_detail, name='member_detail'),
url(r'^chapter/(?P<chapter_id>\d+)/$', agagd_views.chapter_detail, name='chapter_detail'),
url(r'^chapter/(?P<chapter_code>\w+)/$', agagd_views.chapter_code_redirect, name='chapter_code_redirect'),
url(r'^country/(?P<country_name>[\w ]+)/$', agagd_views.country_detail, name='country_detail'),
url(r'^player/(?P<member_id>\d+)/vs/$', agagd_views.find_member_vs, name='find_member_vs'),
url(r'^player/(?P<member_id>\d+)/vs/(?P<other_id>\d+)$', agagd_views.member_vs, name='member_vs'),
url(r'^all_player_ratings/$', agagd_views.all_player_ratings, name='all_player_ratings'),
url(r'^ratings/(?P<member_id>\d+)/$', agagd_views.member_ratings, name='member_ratings'),
url(r'^gamestats/$', agagd_views.game_stats, name='game_stats'),
url(r'^tournaments/$', agagd_views.tournament_list, name='tourney_list'),
url(r'^tournaments/(?P<tourn_code>\w{1,20})/$', agagd_views.tournament_detail, name='tournament_detail'),
# Static Pages
url(r'^information/$', agagd_views.information),
url(r'^qualifications/$', agagd_views.qualifications)
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
# Generated by Django 3.2 on 2021-05-01 04:49
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('Tour_app', '0027_room_no_of_available'),
]
operations = [
migrations.CreateModel(
name='Booking',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('no_of_rooms', models.IntegerField()),
('amount_paid', models.DecimalField(decimal_places=2, max_digits=10)),
('check_in', models.DateField()),
('check_out', models.DateField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('update_at', models.DateTimeField(auto_now=True)),
('hotel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Tour_app.hotel')),
('person', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
('room', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Tour_app.room')),
],
),
]
|
'''
Author: MK_Devil
Date: 2021-12-20 17:18:09
LastEditTime: 2022-01-19 17:13:00
LastEditors: MK_Devil
'''
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import random
# 打印 1-50 内可以被 3 整除的数字
n = 1
while n <= 50:
if n % 3 == 0:
print(n, end='\t')
n += 1
print()
n = 1
counter = 1
while n + 3 < 50:
n = 3 * counter
counter += 1
print(n, end='\t')
print()
# 打印 1-10 的累加和
n = 1
res = 0
while n <= 10:
res += n
n += 1
print(res)
# 猜数字
ran = random.randint(1, 50)
counter = 0
while True:
guess = input('输入你猜的数字,输入 q 退出:')
if guess == 'q':
break
guess = int(guess)
counter += 1
if guess > ran:
print('大了')
elif guess < ran:
print('小了')
else:
print('正确,数字为%d' % ran)
if counter == 1:
print('恭喜一次猜对!')
elif 1 < counter < 5:
print('运气不错,%d次猜对' % counter)
elif 5 < counter < 10:
print('运气一般,%d次猜对' % counter)
else:
print('尼哥就是说的你,%d次才猜对' % counter)
break
|
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import requests
from architect.monitor.client import BaseClient
from celery.utils.log import get_logger
logger = get_logger(__name__)
class GraphiteClient(BaseClient):
def __init__(self, **kwargs):
super(GraphiteClient, self).__init__(**kwargs)
def check_status(self):
status = True
logger.info('Checking status of monitor {} at {}'.format(self.name, self.base_url))
try:
requests.get(self.base_url,
cert=self.cert,
verify=self.verify)
except requests.exceptions.ConnectionError as err:
logger.error(err)
status = False
return status
def get_series(self):
data = self.get_http_series_params()
return self.process_series(data)
def get_series_params(self):
return {}
def get_series_url(self):
url = '/metrics/index.json'
return self.base_url + url
def process_series(self, response):
print(response)
if response['status'] == 'error':
self.log_error(response['errorType'], response['error'])
return {}
else:
data = response['data']
response_data = {}
for datum in data:
name = datum.pop('__name__')
if name not in response_data:
response_data[name] = []
response_data[name].append(datum)
return response_data
def get_instant_url(self):
params = ["from={}".format(self.start),
"until={}".format(self.end)]
params += ["target={}".format(query) for query in self.queries]
url = '/render?format=json&{}'.format('&'.join(params))
return self.base_url + url
def get_range(self):
data = self.get_http_range_params()
return self.process_range(data)
def get_range_params(self):
return {
'target': self.queries,
'from': self.start,
'until': self.end,
'format': 'json'
}
def get_range_url(self):
url = '/render'
return self.base_url + url
def process_range(self, data):
np_data = [(series['query'],
np.array(series['datapoints'])) for series in data]
series = [pd.DataFrame(series[:, 0],
index=series[:, 1],
columns=[query]) for query, series in np_data if series.any()]
if len(series) > 0:
return pd.concat(series, axis=1, join='inner')
else:
return None
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
import fixtures
import requests
from neutronclient.common import exceptions
from neutronclient.tests.unit import test_cli20 as neutron_test_cli20
from gbpclient import gbpshell
from gbpclient.v2_0 import client as gbpclient
from six.moves import StringIO
API_VERSION = neutron_test_cli20.API_VERSION
TOKEN = neutron_test_cli20.TOKEN
ENDURL = neutron_test_cli20.ENDURL
capture_std_streams = neutron_test_cli20.capture_std_streams
end_url = neutron_test_cli20.end_url
class ParserException(Exception):
pass
class FakeStdout(neutron_test_cli20.FakeStdout):
pass
class MyResp(neutron_test_cli20.MyResp):
pass
class MyApp(neutron_test_cli20.MyApp):
pass
class MyUrlComparator(neutron_test_cli20.MyUrlComparator):
pass
class MyComparator(neutron_test_cli20.MyComparator):
pass
class CLITestV20Base(neutron_test_cli20.CLITestV20Base):
shell = gbpshell
client = gbpclient
def setUp(self, plurals=None):
super(CLITestV20Base, self).setUp()
self.client = gbpclient.Client(token=TOKEN, endpoint_url=self.endurl)
def _test_create_resource(self, resource, cmd, name, myid, args,
position_names, position_values,
tenant_id=None, tags=None, admin_state_up=True,
extra_body=None, cmd_resource=None,
parent_id=None, **kwargs):
if not cmd_resource:
cmd_resource = resource
body = {resource: {}, }
if tenant_id:
body[resource].update({'tenant_id': tenant_id})
if tags:
body[resource].update({'tags': tags})
if extra_body:
body[resource].update(extra_body)
body[resource].update(kwargs)
for i in range(len(position_names)):
body[resource].update({position_names[i]: position_values[i]})
ress = {resource:
{self.id_field: myid}, }
if name:
ress[resource].update({'name': name})
resstr = self.client.serialize(ress)
# url method body
resource_plural = self.client.get_resource_plural(cmd_resource)
path = getattr(self.client, resource_plural + "_path")
if parent_id:
path = path % parent_id
mock_body = MyComparator(body, self.client)
cmd_parser = cmd.get_parser('create_' + resource)
resp = (MyResp(200), resstr)
with mock.patch.object(
cmd, "get_client", return_value=self.client
) as mock_get_client, mock.patch.object(
self.client.httpclient, "request", return_value=resp
) as mock_request:
gbpshell.run_command(cmd, cmd_parser, args)
self.assert_mock_multiple_calls_with_same_arguments(
mock_get_client, mock.call(), None)
mock_request.assert_called_once_with(
end_url(path), 'POST',
body=mock_body,
headers=neutron_test_cli20.ContainsKeyValue(
{'X-Auth-Token': TOKEN}))
_str = self.fake_stdout.make_string()
self.assertIn(myid, _str)
if name:
self.assertIn(name, _str)
def check_parser_ext(self, cmd, args, verify_args, ext):
cmd_parser = self.cmd.get_parser('check_parser')
cmd_parser = ext.get_parser(cmd_parser)
stderr = StringIO()
with fixtures.MonkeyPatch('sys.stderr', stderr):
try:
parsed_args = cmd_parser.parse_args(args)
except SystemExit:
raise ParserException("Argument parse failed: %s" %
stderr.getvalue())
for av in verify_args:
attr, value = av
if attr:
self.assertIn(attr, parsed_args)
self.assertEqual(value, getattr(parsed_args, attr))
return parsed_args
class ClientV2TestJson(CLITestV20Base):
pass
class CLITestV20ExceptionHandler(CLITestV20Base):
def _test_exception_handler_v20(
self, expected_exception, status_code, expected_msg,
error_type=None, error_msg=None, error_detail=None,
error_content=None):
if error_content is None:
error_content = {'NeutronError': {'type': error_type,
'message': error_msg,
'detail': error_detail}}
e = self.assertRaises(expected_exception,
gbpclient.exception_handler_v20,
status_code, error_content)
self.assertEqual(status_code, e.status_code)
if expected_msg is None:
if error_detail:
expected_msg = '\n'.join([error_msg, error_detail])
else:
expected_msg = error_msg
self.assertEqual(expected_msg, e.message)
def test_exception_handler_v20_neutron_known_error(self):
# TODO(Sumit): This needs to be adapted for GBP
pass
def test_exception_handler_v20_neutron_known_error_without_detail(self):
# TODO(Sumit): This needs to be adapted for GBP
pass
def test_exception_handler_v20_unknown_error_to_per_code_exception(self):
for status_code, client_exc in list(
exceptions.HTTP_EXCEPTION_MAP.items()):
error_msg = 'Unknown error'
error_detail = 'This is detail'
self._test_exception_handler_v20(
client_exc, status_code,
error_msg + '\n' + error_detail,
'UnknownError', error_msg, error_detail)
def test_exception_handler_v20_neutron_unknown_status_code(self):
error_msg = 'Unknown error'
error_detail = 'This is detail'
self._test_exception_handler_v20(
exceptions.NeutronClientException, 501,
error_msg + '\n' + error_detail,
'UnknownError', error_msg, error_detail)
def test_exception_handler_v20_bad_neutron_error(self):
error_content = {'NeutronError': {'unknown_key': 'UNKNOWN'}}
self._test_exception_handler_v20(
exceptions.NeutronClientException, 500,
expected_msg={'unknown_key': 'UNKNOWN'},
error_content=error_content)
def test_exception_handler_v20_error_dict_contains_message(self):
error_content = {'message': 'This is an error message'}
self._test_exception_handler_v20(
exceptions.NeutronClientException, 500,
expected_msg='This is an error message',
error_content=error_content)
def test_exception_handler_v20_error_dict_not_contain_message(self):
error_content = {'error': 'This is an error message'}
expected_msg = '%s-%s' % (500, error_content)
self._test_exception_handler_v20(
exceptions.NeutronClientException, 500,
expected_msg=expected_msg,
error_content=error_content)
def test_exception_handler_v20_default_fallback(self):
error_content = 'This is an error message'
expected_msg = '%s-%s' % (500, error_content)
self._test_exception_handler_v20(
exceptions.NeutronClientException, 500,
expected_msg=expected_msg,
error_content=error_content)
def test_exception_status(self):
e = exceptions.BadRequest()
self.assertEqual(e.status_code, 400)
e = exceptions.BadRequest(status_code=499)
self.assertEqual(e.status_code, 499)
# SslCertificateValidationError has no explicit status_code,
# but should have a 'safe' defined fallback.
e = exceptions.SslCertificateValidationError()
self.assertIsNotNone(e.status_code)
e = exceptions.SslCertificateValidationError(status_code=599)
self.assertEqual(e.status_code, 599)
def test_connection_failed(self):
self.client.httpclient.auth_token = 'token'
excp = requests.exceptions.ConnectionError('Connection refused')
with mock.patch.object(self.client.httpclient, "request",
side_effect=excp) as mock_request:
error = self.assertRaises(exceptions.ConnectionFailed,
self.client.get, '/test')
mock_request.assert_called_once_with(
end_url('/test'), 'GET',
body=None,
headers=neutron_test_cli20.ContainsKeyValue(
{'X-Auth-Token': 'token'}))
# NB: ConnectionFailed has no explicit status_code, so this
# tests that there is a fallback defined.
self.assertIsNotNone(error.status_code)
|
def check_armstrong(num1,num2):
for num in range(num1,num2+1):
length = len(str(num))
sum = 0
temp = num
while (temp != 0):
sum = sum + ((temp % 10) ** length)
temp = temp // 10
if sum == num:
print(num)
num1 = int(input("Enter the starting number: "))
num2 = int(input("Enter the ending number: "))
check_armstrong(num1,num2) |
# Generated by Django 2.2.5 on 2019-09-08 11:09
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MIGD',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField(max_length=50)),
('age', models.TextField()),
('email', models.EmailField(max_length=254)),
('phone_no', models.TextField(max_length=10)),
('query', models.TextField(max_length=200)),
],
),
migrations.CreateModel(
name='SpeedMentoring',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField(max_length=50)),
('age', models.TextField()),
('email', models.EmailField(max_length=254)),
('phone_no', models.TextField(max_length=10)),
('review', models.TextField(max_length=200)),
],
),
migrations.CreateModel(
name='Workshops',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField(max_length=50)),
('age', models.TextField()),
('email', models.EmailField(max_length=254)),
('phone_no', models.TextField(max_length=10)),
('work', models.TextField(max_length=100)),
],
),
]
|
x = 2
y = 7
anser = x + y
print(anser)
|
# -*- coding: utf-8 -*-
"""
Django settings for yysite project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Directory
LOG_DIR = os.path.join(BASE_DIR, 'log')
VAR_DIR = os.path.join(BASE_DIR, 'var')
os.makedirs(LOG_DIR, exist_ok=True)
os.makedirs(VAR_DIR, exist_ok=True)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'bqg_=(8))my1ice*5loz*z(6bv(71^xi@zn)u@&9b95&2kiev7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'yyfeed',
# 'jet.dashboard',
'jet',
'django_cron',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'yysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'yysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django',
'USER': 'django',
'PASSWORD': 'django',
'CHARSET': 'utf8mb4',
'OPTIONS': {
'charset': 'utf8mb4',
'init_command': "SET sql_mode='STRICT_TRANS_TABLES'",
},
},
# 'sqlite': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(VAR_DIR, 'db.sqlite3'),
# },
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'console': {
'()': 'yyutil.logging.ColorfulFormatter',
'format': '%(asctime)s [%(threadName)s] %(levelname)-5s %(name)s - %(message)s',
'datefmt': '%H:%M:%S',
},
'file': {
'format': '%(asctime)s [%(process)d.%(threadName)s] %(levelname)-5s %(name)s - '
'%(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'console',
'stream': 'ext://sys.stdout',
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'file',
'filename': os.path.join(LOG_DIR, 'application.log'),
'maxBytes': 10 * 1024 * 1024,
'backupCount': 5,
},
},
'root': {
'handlers': ['console'],
'level': 'TRACE',
},
'PIL.Image': {
'level': 'WARNING',
}
}
# Cache
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': os.path.join(VAR_DIR, 'cache'),
}
}
# Notify
ADMINS = [('MoonFruit', 'dkmoonfruit@gmail.com')]
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 465
EMAIL_USE_SSL = True
EMAIL_HOST_USER = 'dkmoonfruit@gmail.com'
EMAIL_HOST_PASSWORD = ''
DEFAULT_FROM_EMAIL = 'dkmoonfruit@gmail.com'
SERVER_EMAIL = 'dkmoonfruit@gmail.com'
# Cron Job
# http://django-cron.readthedocs.io/en/latest/
CRON_CLASSES = [
'django_cron.cron.FailedRunsNotificationCronJob',
'yyfeed.cron.HearthstoneJob',
'yyfeed.cron.OoxxJob',
'yyfeed.cron.IAppsJob',
'yyfeed.cron.SmzdmJob',
'yyfeed.cron.TtrssJob',
# 'yyfeed.cron.RosiyyJob',
]
FAILED_RUNS_CRONJOB_EMAIL_PREFIX = "[Django] "
DJANGO_CRON_DELETE_LOGS_OLDER_THAN = 15
# YYFeed
YYFEED_CACHE = {
'()': 'yyutil.cache.FileCache',
'filename': os.path.join(VAR_DIR, 'cache.pickle'),
}
|
# -*- coding:utf-8 -*-
"""
@author:zhouqiuhong
@file:__init__.py.py
@time:2018/7/27 002714:59
""" |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 14 20:45:36 2018
@author: Daniel Salazar
"""
"""
This is the preprocessing portion:
- read the dataset 'usa-training.json'
- removes the following columns: rating, year, imdb, country, director
- reorganizes the columns logically
- removes any rows that have a genre which doesn't match the allowable types
- removes any rows that have a country other than USA
- separate the genre column
- isolate only year in releasedate
- removes any symbols from the dataset
- join all the columns into one
- concatenate labels back into dataframe
- saves the test and train sets to csv for next stage
"""
import pandas as pd
import re
import unicodedata
pd.set_option('display.notebook_repr_html', False)
pd.set_option('display.max_columns', 7)
pd.set_option('display.max_rows', 10)
pd.set_option('display.width', 60)
#read json into dataframe
df = pd.read_csv('usa-training.csv')
# filter out letters from string
def clean_feature(raw_feature):
if isinstance(raw_feature, unicode):
raw_feature = unicodedata.normalize('NFKD', raw_feature).encode('ascii','ignore')
letters_only = re.sub(u"[^a-zA-Z0-9 ]", "", raw_feature)
words = letters_only.lower().split()
clean_feature = [w for w in words]
return(" ".join(clean_feature))
# save dataframe to csv
def save_df(df_,filename):
# write to csv
df_.to_csv(filename, encoding='utf-8')
# remove country and releasedate columns
df = df.drop(columns = ['country', 'releasedate'])
# convert numbers to string
df[['year','rating']] = df[['year','rating']].astype(str)
# remove all symbols
for col in df.columns:
df[col].apply(lambda x: clean_feature(x))
# reorganize dataframe
columnTitles=['title','director','actor1','actor2','year','rating','genre']
df = df.reindex(columns=columnTitles)
# save dataframe
save_df(df,'clean_data.csv')
|
from django.contrib import admin
from .models import Book
from .models import Editor
from .models import LiteraryGenre
admin.site.register(Book)
admin.site.register(Editor)
admin.site.register(LiteraryGenre)
|
import picamera
import time
camera = picamera.PiCamera()
camera.resolution = (320, 160)
camera.rotation = 180
camera.start_preview()
camera.start_recording('video.h264')
time.sleep(5)
camera.stop_recording()
camera.stop_preview()
# sudo apt-get install -y gpac
# MP4Box -fps 30 -add video.h264 video.mp4
|
class Solution(object):
def rotate(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: None Do not return anything, modify matrix in-place instead.
"""
start = 0
end = len(matrix)-1
while start < end:
pos1 = [start, start]
pos2 = [start, end]
pos3 = [end, end]
pos4 = [end, start]
for i in range(start, end):
temp1 = matrix[pos1[0]][pos1[1]]
temp2 = matrix[pos2[0]][pos2[1]]
temp3 = matrix[pos3[0]][pos3[1]]
temp4 = matrix[pos4[0]][pos4[1]]
matrix[pos1[0]][pos1[1]] = temp4
matrix[pos2[0]][pos2[1]] = temp1
matrix[pos3[0]][pos3[1]] = temp2
matrix[pos4[0]][pos4[1]] = temp3
pos1[1] += 1
pos2[0] += 1
pos3[1] -= 1
pos4[0] -= 1
start += 1
end -= 1
# return matrix
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
'''Copyright (C) 2018, Nudt, JingshengTang, All Rights Reserved
#Author: Jingsheng Tang
#Email: mrtang@nudt.edu.cn
# This gui program employs the vertical synchronization mode on the basis of using the directx
# graphic driver. It makes the program update the drawing synchornously with the monitor, thus
# to accurately contorl the stimulus graphics. It is similar to the sychtoolbox of Matlab. On
# this basis, the stimulus trigger event is also set synchronously with the actual drawing.
# Since the vertical synchronization mode is used, the graphic user interface is fullscreen.
'''
from guiengine import GuiEngine
from multiprocessing import Queue
from multiprocessing import Event
import multiprocessing
import time
layout = {'screen':{'size':(200,200),'color':(0,0,0),'type':'fullscreen',
'Fps':60,'caption':'this is an example'},
'cue':{'class':'sinBlock','parm':{'size':(100,100),'position':(100,100),
'frequency':13,'visible':True}}}
def example1():
Q_c2g = Queue()
E_g2p = Event()
Q_g2s = Queue()
gui = GuiEngine(layout,Q_c2g,E_g2p,Q_g2s)
gui.StartRun()
def proc(layout,Q_c2g,E_g2p,Q_g2s):
gui = GuiEngine(layout,Q_c2g,E_g2p,Q_g2s)
gui.StartRun()
def example2():
Q_c2g = Queue()
E_g2p = Event()
Q_g2s = Queue()
process = multiprocessing.Process(target=proc,args=(layout,Q_c2g,E_g2p,Q_g2s))
process.start()
# while True:
# if E_g2p.is_set():break
# Q_c2g.put([{'cue':{'start':True}},{''}])
# time.sleep(5)
# Q_c2g.put([{'cue':{'start':False}},{''}])
# time.sleep(1)
print 'main process exit'
if __name__ == '__main__':
example2()
|
import urllib2
import maya.cmds as cmds
import os
import zipfile
import shutil
from . import calabash_menu as cm
temp = cmds.internalVar(utd=True)
this_path = os.path.normpath(os.path.dirname(__file__))
this_file = os.path.splitext(os.path.basename(__file__))
script_path = os.path.dirname(this_path)
def check_version():
version_file = os.path.join(script_path, 'version.md')
update_version = False
version_url = 'https://raw.githubusercontent.com/thomasjmoore/Calabash-Tools/master/src/calabash/scripts/version.md'
read_version = urllib2.urlopen(version_url).read()
read_version = int(read_version[-3:])
version_check = os.path.isfile(version_file)
if version_check:
current_version = open(version_file, 'r').read()
current_version = int(current_version[-3:])
if current_version < read_version:
update_version = True
else:
update_version = True
return update_version
def download():
url = 'https://github.com/thomasjmoore/Calabash-Tools/archive/master.zip'
download_file = "master.zip"
file_name = os.path.join(temp, download_file)
fileWrite = open(file_name, 'w')
fileWrite.write(urllib2.urlopen(url).read())
fileWrite.close()
return file_name
def install(zip_file=""):
calabash_path = os.path.dirname(script_path)
module_path = os.path.dirname(calabash_path)
if os.path.exists("%s.mod"%(calabash_path)):
os.remove("%s.mod"%(calabash_path))
if os.path.exists(calabash_path):
shutil.rmtree(calabash_path)
zipname = os.path.splitext(zip_file)
unzipped_files = os.path.join(zipname[0], "Calabash-Tools-master", "src")
if os.path.exists(unzipped_files):
shutil.rmtree(unzipped_files)
zip = zipfile.ZipFile(zip_file)
zip.extractall(zipname[0])
shutil.copy2("%s%s%s.mod"%(unzipped_files, os.path.sep,"calabash"), "%s%s%s.mod"%(module_path, os.path.sep,"calabash"))
shutil.copytree("%s%s%s"%(unzipped_files, os.path.sep,"calabash"), "%s%s%s"%(module_path, os.path.sep,"calabash"))
print("Calabash Tools Update Installed")
reload(cm)
def check():
update = check_version()
if not update:
print("Calabash Tools are up to date")
return
cmds.warning("Update found")
update = cmds.confirmDialog(title="Update found",
message="Install Update?",
button=["Update", "Cancel"],
defaultButton="Update",
cancelButton="Cancel",
dismissString="Cancel")
if not update == "Update":
cmds.warning("Action canceled")
return
zip_file = download()
if not zip_file:
cmds.warning("Download unsuccesful.")
return
install(zip_file) |
"""Script to test the recommender system API
"""
import firefly
api = firefly.Client("https://recommender-system-demo.rorocloud.io/")
result = api.predict(user_id=25)
print(result)
|
from django.shortcuts import render
from django.http import HttpRequest
from django.template import RequestContext
from datetime import datetime
def home(request):
assert isinstance(request, HttpRequest)
return render(request,
"home.html",
context_instance=RequestContext(request,
{
'title': 'Home Page',
'year': datetime.now().year,
'request': request,
'user': request.user,
})
)
def about(request):
return render(request, 'about.html', {})
def contact(request):
return render(request, 'contact.html', {})
# Create your views here.
|
# Generated by Django 2.2.1 on 2019-05-24 03:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dept',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dept_name', models.CharField(max_length=20)),
('dept_manager', models.CharField(max_length=20)),
('dept_manager_telep', models.CharField(max_length=20)),
('dept_floor', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Doctor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('doctor_id', models.CharField(max_length=20)),
('doctor_name', models.CharField(max_length=20)),
('doctor_sex', models.CharField(max_length=20)),
('doctor_age', models.CharField(max_length=20)),
('doctor_telep', models.CharField(max_length=20)),
('doctor_position', models.CharField(max_length=20)),
('doctor_dept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='MyApp.Dept')),
],
),
migrations.CreateModel(
name='Medicine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('medicine_name', models.CharField(max_length=20)),
('medicine_num', models.IntegerField()),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_name', models.CharField(max_length=50)),
('user_passwd', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Work',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('work_doctor_name', models.CharField(max_length=20)),
('work_time', models.CharField(max_length=20)),
('work_doctor_dept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='MyApp.Dept')),
('work_doctor_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='MyApp.Doctor')),
],
),
migrations.CreateModel(
name='Room',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('room_id', models.CharField(max_length=10)),
('room_patient_id', models.CharField(max_length=20)),
('room_patient_name', models.CharField(max_length=20)),
('room_patient_dept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='MyApp.Dept')),
],
),
migrations.CreateModel(
name='Patient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('patient_name', models.CharField(max_length=20)),
('patient_sex', models.CharField(max_length=10)),
('patient_age', models.CharField(max_length=10)),
('patient_telep', models.CharField(max_length=20)),
('patient_idcard', models.CharField(max_length=30)),
('patient_dept', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='MyApp.Dept')),
],
),
migrations.CreateModel(
name='Deal_method',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deal_room_id', models.CharField(max_length=20)),
('deal_doctor_name', models.CharField(max_length=20)),
('medicine_detail', models.CharField(max_length=50)),
('diagnosis_time', models.CharField(max_length=20)),
('diagnosis_result', models.CharField(max_length=50)),
('doctor_suggestions', models.CharField(max_length=50)),
('deal_patient_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='MyApp.Patient')),
],
),
]
|
import xmlrpclib
class TimeoutServer(xmlrpclib.ServerProxy):
"""Timeout server for XMLRPC.
XMLRPC + timeout - still a bit ugly - but at least gets rid of setdefaulttimeout
inspired by http://stackoverflow.com/questions/372365/set-timeout-for-xmlrpclib-serverproxy
(although their stuff was messy, this is cleaner)
@see: http://stackoverflow.com/questions/372365/set-timeout-for-xmlrpclib-serverproxy
"""
def __init__(self, *args, **kwargs):
timeout = kwargs.pop("timeout", None)
kwargs["transport"] = TimeoutTransport(timeout=timeout)
xmlrpclib.ServerProxy.__init__(self, *args, **kwargs)
def _set_timeout(self, timeout):
t = self._ServerProxy__transport
t.timeout = timeout
# If we still have a socket we need to update that as well.
if hasattr(t, "_connection") and t._connection[1] and t._connection[1].sock:
t._connection[1].sock.settimeout(timeout)
class TimeoutTransport(xmlrpclib.Transport):
def __init__(self, *args, **kwargs):
self.timeout = kwargs.pop("timeout", None)
xmlrpclib.Transport.__init__(self, *args, **kwargs)
def make_connection(self, *args, **kwargs):
conn = xmlrpclib.Transport.make_connection(self, *args, **kwargs)
if self.timeout is not None:
conn.timeout = self.timeout
return conn
timeout = 120 + 60
url = "http://{0}:{1}".format("localhost", 9090)
server = TimeoutServer(url, allow_none=True,
timeout=timeout)
server._set_timeout(timeout)
print server.list_contents('./')
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin
from django.contrib.auth.models import Group
from .models import User
from .forms import AccountUserChangeForm, AccountUserCreationForm
class UserAdmin(DjangoUserAdmin):
fieldsets = (
(
None,
{
'fields': ('email', 'password')
}
),
(
u'Personal info',
{'fields': ('first_name', 'last_name')}
),
(
u'Permissions',
{'fields': ('is_active', 'is_staff', 'is_superuser')}
),
(
u'Important dates',
{'fields': ('last_login', 'created_at')}
),
)
add_fieldsets = (
(
None,
{
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')
}
),
)
readonly_fields = ('password', 'created_at',)
list_display = ('email',)
list_filter = ('is_active',)
search_fields = ('email',)
ordering = ('email',)
form = AccountUserChangeForm
add_form = AccountUserCreationForm
admin.site.register(User, UserAdmin)
admin.site.unregister(Group)
|
#!/usr/bin/python3
# could wirte method explain
def changeme(mylist):
mylist.append([1, 2, 3, 4])
print("list value: ", mylist)
return
mylist = [10, 20, 30]
changeme(mylist)
print("list value: ",mylist) |
import os.path
from mitmproxy import ctx
from mitmproxy import io
from mitmproxy import exceptions
class ReadFile:
"""
An addon that handles reading from file on startup.
"""
def __init__(self):
self.path = None
def load_flows_file(self, path: str) -> int:
path = os.path.expanduser(path)
cnt = 0
try:
with open(path, "rb") as f:
freader = io.FlowReader(f)
for i in freader.stream():
cnt += 1
ctx.master.load_flow(i)
return cnt
except (IOError, exceptions.FlowReadException) as v:
if cnt:
ctx.log.warn(
"Flow file corrupted - loaded %i flows." % cnt,
)
else:
ctx.log.error("Flow file corrupted.")
raise exceptions.FlowReadException(v)
def configure(self, options, updated):
if "rfile" in updated and options.rfile:
self.path = options.rfile
def running(self):
if self.path:
try:
self.load_flows_file(self.path)
except exceptions.FlowReadException as v:
raise exceptions.OptionsError(v)
finally:
self.path = None
ctx.master.addons.trigger("processing_complete")
|
def change_ip(interface, ip):
'''Change an interfaces IP address.
ARGS:
@interface -- The interface to be changed.
@ip -- The new IP address to use.
RETURNS:
None
'''
import sys
sys.path.append(..)
from run_command import run_command
run_command('sudo ifconfig ' + interface + ' ' + ip) |
###########################################################################
#
# OpenOPC Gateway Service
#
# A Windows service providing remote access to the OpenOPC library.
#
# Copyright (c) 2007-2008 Barry Barnreiter (barry_b@users.sourceforge.net)
#
###########################################################################
import win32serviceutil
import win32service
import win32event
import servicemanager
import _winreg
import select
import socket
import os
import time
import OpenOPC
try:
import Pyro.core
import Pyro.protocol
except ImportError:
print 'Pyro module required (http://pyro.sourceforge.net/)'
exit()
Pyro.config.PYRO_MULTITHREADED = 1
opc_class = OpenOPC.OPC_CLASS
opc_gate_port = 7766
def getvar(env_var):
"""Read system enviornment variable from registry"""
try:
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\CurrentControlSet\\Control\Session Manager\Environment',0,_winreg.KEY_READ)
value, valuetype = _winreg.QueryValueEx(key, env_var)
return value
except:
return None
# Get env vars directly from the Registry since a reboot is normally required
# for the Local System account to inherit these.
if getvar('OPC_CLASS'): opc_class = getvar('OPC_CLASS')
if getvar('OPC_GATE_PORT'): opc_gate_port = int(getvar('OPC_GATE_PORT'))
class opc(Pyro.core.ObjBase):
def __init__(self):
Pyro.core.ObjBase.__init__(self)
self._remote_hosts = {}
self._init_times = {}
self._tx_times = {}
def get_clients(self):
"""Return list of server instances as a list of (GUID,host,time) tuples"""
reg = self.getDaemon().getRegistered()
hosts = self._remote_hosts
init_times = self._init_times
tx_times = self._tx_times
hlist = [(k, hosts[k] if hosts.has_key(k) else '', init_times[k], tx_times[k]) for k,v in reg.iteritems() if v == None]
return hlist
def create_client(self):
"""Create a new OpenOPC instance in the Pyro server"""
opc_obj = OpenOPC.client(opc_class)
base_obj = Pyro.core.ObjBase()
base_obj.delegateTo(opc_obj)
uri = self.getDaemon().connect(base_obj)
opc_obj._open_serv = self
opc_obj._open_self = base_obj
opc_obj._open_host = self.getDaemon().hostname
opc_obj._open_port = self.getDaemon().port
opc_obj._open_guid = uri.objectID
remote_ip = self.getLocalStorage().caller.addr[0]
try:
remote_name = socket.gethostbyaddr(remote_ip)[0]
self._remote_hosts[uri.objectID] = '%s (%s)' % (remote_ip, remote_name)
except socket.herror:
self._remote_hosts[uri.objectID] = '%s' % (remote_ip)
self._init_times[uri.objectID] = time.time()
self._tx_times[uri.objectID] = time.time()
return Pyro.core.getProxyForURI(uri)
def release_client(self, obj):
"""Release an OpenOPC instance in the Pyro server"""
self.getDaemon().disconnect(obj)
del self._remote_hosts[obj.GUID()]
del self._init_times[obj.GUID()]
del self._tx_times[obj.GUID()]
del obj
class OpcService(win32serviceutil.ServiceFramework):
_svc_name_ = "zzzOpenOPCService"
_svc_display_name_ = "OpenOPC Gateway Service"
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
def SvcStop(self):
servicemanager.LogInfoMsg('\n\nStopping service')
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
servicemanager.LogInfoMsg('\n\nStarting service on port %d' % opc_gate_port)
daemon = Pyro.core.Daemon(port = opc_gate_port)
daemon.connect(opc(), "opc")
while win32event.WaitForSingleObject(self.hWaitStop, 0) != win32event.WAIT_OBJECT_0:
socks = daemon.getServerSockets()
ins,outs,exs = select.select(socks,[],[],1)
for s in socks:
if s in ins:
daemon.handleRequests()
break
daemon.shutdown()
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(OpcService)
|
"""
File: GAproject.py
--------------------
This file contains Glorili Alejandro's final project
for Stanford's Code In Place python course, Spring 2020.
Run on Python 3 or higher.
This program, along with the file "Drawing_Coordinates" and images directory,
has the user trace over dots on a canvas to make a drawing.
The user has a specified amount of time to trace over all the dots.
Once time is up, or the drawing is fully traced, the user is prompted to guess
the subject of the drawing. They are told if they are correct, then shown the
image of the real drawing. The user is asked if they want to continue, and the game
continues until they exit by typing anything but 'y' or 'Y'.
"""
import tkinter
import time
import random
from PIL import ImageTk
from PIL import Image
GAME_NAME = "Guess Your Drawing"
COORDS_FILE_NAME = 'Drawing_Coordinates'
CANVAS_WIDTH = 600
CANVAS_HEIGHT = 600
PATH_SIZE = 5
DOT_SIZE = 10
ALLOWED_TIME = 30 # in seconds
DELAY = 1 / 75
def main():
# Create blank canvas according constants.
canvas = make_canvas(CANVAS_WIDTH, CANVAS_HEIGHT, GAME_NAME)
# Continue playing drawing activity until user types in value for keep_playing
# as something other than y or Y for yes. Initialized with 'y' to enter loop
keep_playing = 'y'
while keep_playing == 'y' or keep_playing == 'Y':
keep_playing = play_drawing_activity(canvas)
# Outro
print('Okay, byeeee!')
canvas.mainloop()
def play_drawing_activity(canvas):
# Clear anything that might be on the canvas.
canvas.delete("all")
# Draw dots that make up the drawing with their state='hidden' (invisible to viewer).
# Save dictionary of shape, with shape name as the key, and the list of dots as its value.
invisible_shape = draw_invisible_dots(canvas)
# Save the string of the shapes's name. Get dict's keys as a list, then the first list item.
shape_name = list(invisible_shape.keys())[0]
# Create hidden image of the full shape, to be revealed at the end
image = ImageTk.PhotoImage(Image.open("images/"+shape_name+".jpg"))
full_shape = canvas.create_image(0, 0, anchor="nw", image=image, state='hidden')
# Get list of dots in order they will be drawn
invisible_dots = invisible_shape[shape_name]
# Reveal first dot, at index 0
dot_index = 0
target = reveal_next_dot(canvas, invisible_dots, dot_index)
# Capture cursor coordinates to enter while loop
mouse_x = canvas.winfo_pointerx() - canvas.winfo_rootx()
mouse_y = canvas.winfo_pointery() - canvas.winfo_rooty()
# Allow the cursor to move to the first dot without leaving a trail
while not mouse_on_target(canvas, mouse_x, mouse_y, target):
# Each loop checks if cursor is on target, exits when cursor meets target
mouse_x = canvas.winfo_pointerx() - canvas.winfo_rootx()
mouse_y = canvas.winfo_pointery() - canvas.winfo_rooty()
canvas.update()
time.sleep(DELAY)
# Determine start time and end time of drawing activity
start_time = time.time()
end_time = start_time + ALLOWED_TIME
# Continue drawing activity until end time OR all dots are traced over
while time.time() < end_time:
# Capture cursor coordinates
mouse_x = canvas.winfo_pointerx() - canvas.winfo_rootx()
mouse_y = canvas.winfo_pointery() - canvas.winfo_rooty()
# Draw squares that form the path
draw_path(canvas, mouse_x, mouse_y)
# Reveal next dot when cursor meets the current target
if mouse_on_target(canvas, mouse_x, mouse_y, target):
make_dot_black(canvas, invisible_dots, dot_index)
# Change index, to specify next dot
dot_index += 1
# Exit loop if the index reaches the length of the invisible_dots list
if dot_index == len(invisible_dots):
break
# Reveal next dot in invisible_dots, specified by the updated index
# Save revealed dot as the next target for the loop
target = reveal_next_dot(canvas, invisible_dots, dot_index)
canvas.update()
time.sleep(DELAY)
# Ask user for a guess on what they just drew
guess = input('What did you draw?: ')
# Let user know if they got it right or wrong
if shape_name.upper() in guess.upper():
print("That's right! It's a", shape_name + '!')
else:
print("That's not it! It's a", shape_name + '!')
# Show image of full shape behind drawing
canvas.itemconfig(full_shape, state='normal')
# Ask user if they'd like to continue, and return the response
keep_playing = input('Wanna try another one? Type Y or N: ')
return keep_playing
def make_dot_black(canvas, dot_list, index):
"""
Changes the fill of a dot at a specified index in dot_list to black, returns filled dot.
:param canvas: canvas for drawings
:param dot_list: list of dots that make up the drawing
:param index : specifies which dot the function is configuring
:return dot_list[index]: returns dot at index from dot_list, now with fill='black'
"""
canvas.itemconfig(dot_list[index], fill='black')
return dot_list[index]
def reveal_next_dot(canvas, dot_list, index):
"""
Changes the state of a dot at a specified index in dot_list to 'normal', returns that dot.
:param canvas: canvas for drawings
:param dot_list: list of dots that make up the drawing
:param index : specifies which dot the program is revealing (changing state to 'normal')
:return dot_list[index]: returns dot at index from dot_list, now with state='normal'
"""
canvas.itemconfig(dot_list[index], state='normal')
return dot_list[index]
def draw_invisible_dots(canvas):
"""
This function reads the coordinates file, usually "Drawing_Coordinates", line by line.
It then draws 'hidden' dots on the canvas. Their state will change to
'normal' in the animation loop according to the user's actions.
It returns a list of the dots in the order they will be revealed.
*IMPT* Specifics about the coordinates file:
FILE MUST END WITH TWO AND ONLY TWO BLANK LINES!
SHAPE NAME LINE MUST BE AS FOLLOWS: "Shape, <shape-name-here>" ex: "Shape, Flower"
MUST HAVE ONE AND ONLY ONE BLANK LINE BETWEEN EACH SHAPE!
:param canvas: canvas for drawings
:return list of dots: an ordered list of the dots the user will trace
"""
shapes_list = []
dot_dict = {}
label = 'name'
for line in open(COORDS_FILE_NAME):
# checks if line is blank.
if line.strip() == "":
shapes_list.append(dot_dict)
continue
# Look at line in loop, get rid of white space and ends, and split at ', '.
content = (line.strip().split(', '))
if content[0] == 'Shape':
dot_dict = {}
label = content[1]
dot_dict[label] = []
else:
dot_x = int(content[0])
dot_y = int(content[1])
dot_dict[label].append(canvas.create_oval(dot_x, dot_y, dot_x + DOT_SIZE, dot_y + DOT_SIZE, state='hidden', fill='red', outline='white'))
chosen_shape_num = random.randint(0, (len(shapes_list)-1))
return shapes_list[chosen_shape_num]
def mouse_on_target(canvas, current_x, current_y, target):
"""
Tells you if the current location of the cursor is within the area of a target shape.
:param canvas: canvas for drawings
:param current_x: the current x location of the cursor
:param current_y: the current y location of the cursor
:param target: the shape we are comparing the cursor location to
:return boolean: True if both x and y locations of the cursor are within the target
"""
target_x_min = canvas.coords(target)[0]
target_x_max = canvas.coords(target)[2]
target_y_min = canvas.coords(target)[1]
target_y_max = canvas.coords(target)[3]
mouse_in_x = target_x_min < current_x < target_x_max
mouse_in_y = target_y_min < current_y < target_y_max
return mouse_in_x and mouse_in_y
def draw_path(canvas, mouse_x, mouse_y):
"""
Draws a black square of PATH_SIZE dimensions onto canvas at mouse location.
:param canvas: canvas for drawings
:param mouse_x: mouse's x location
:param mouse_y: mouse's y location
"""
canvas.create_rectangle(mouse_x, mouse_y, mouse_x + PATH_SIZE, mouse_y + PATH_SIZE, fill='black')
def make_canvas(width, height, title=None):
"""
DO NOT MODIFY
Creates and returns a drawing canvas
ready for drawing.
"""
objects = {}
top = tkinter.Tk()
top.minsize(width=width, height=height)
if title:
top.title(title)
canvas = tkinter.Canvas(top, width=width + 1, height=height + 1)
canvas.pack()
return canvas
if __name__ == '__main__':
main()
|
import model_layers
import torch
from torch import nn
class encoder(nn.Module):
def __init__(self):
super(encoder, self).__init__()
self.convblock1 = model_layers.convblock(3,12)
self.downsamp1 = model_layers.downsamp(12, 16)
self.convblock2 = model_layers.convblock(12,24)
self.downsamp2 = model_layers.downsamp(24, 8)
self.bottleneck = nn.Sequential(nn.Flatten(),
nn.Linear(24 * 8 * 8, 1000)
)
def forward(self, x):
x = self.convblock1(x)
x = self.downsamp1(x)
x = self.convblock2(x)
x = self.downsamp2(x)
x = self.bottleneck(x)
return x
class decoder(nn.Module):
def __init__(self, scales):
super(decoder, self).__init__()
self.bottleneck = nn.Sequential(nn.Linear(1000, 24 * 8 * 8),
model_layers.reshape([-1,24,8,8])
)
self.up1 = model_layers.Upsample(24,12, scales[0])
self.up2 = model_layers.Upsample(12,3, scales[1])
def forward(self,x):
x = self.bottleneck(x)
x = self.up1(x)
x = self.up2(x)
return x
class autoencoder(nn.Module):
def __init__(self, scales):
super(autoencoder, self).__init__()
self.encoder = encoder()
self.decoder = decoder(scales)
def encode(self, x): return self.encoder(x)
def decode(self, x): return torch.clamp(self.decoder(x), min = 0, max = 1)
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return torch.clamp(x, min = 0, max = 1)
|
import pandas as pd
import numpy as np
import pyrebase
import os
config = {
"apiKey": "AIzaSyAXtE0fQeJSN8r1Omtyx5vTlsdyYrF9XpE",
"authDomain": "tympass-32736.firebaseapp.com",
"databaseURL" : "https://tympass-32736.firebaseio.com",
"projectId": "tympass-32736",
"storageBucket": "tympass-32736.appspot.com",
"messagingSenderId": "990276104410",
"appId": "1:990276104410:web:a6d956ded09fc3c958b5e3",
"measurementId": "G-7HF9TQ5QC1"
}
firebase = pyrebase.initialize_app(config)
storage = firebase.storage()
path_on_cloud = "data/demo.xlsx"
#path_local=r'D:\lol\demo.xlsx';
#storage.child(path_on_cloud).put(path_local)
d = os.getcwd()
os.chdir(d)
storage.child(path_on_cloud).download("new.xlsx")
excel_data_df = pd.read_excel('new.xlsx')
x=[' ']
#df# print whole sheet data
x = excel_data_df['Name'].tolist()
name = input("Enter your name - ")
#x.append(' ')
i=0
p=0
a=0
for i in x:
if(i==name):
p=p+1
break;
os.remove("new.xlsx")
if(p==1):
print("Your name is in our records")
else:
print("Your name is not in our records")
|
# coding: utf-8
####################################
#System.Windows.FormsのformをIronPythonで作ってC#に渡す例
####################################
#.NET Frameworkのクラスライブラリを使う宣言
# まだどの場合にどの程度ベースなライブラリをインポートすれば
# 足りるのか良く分かっていないので、とりあえず色々追加している
import System
import clr
import sys
# DLLのフルパス指定は、スクリプト実行する場合は
# 一度、sys.path.appendでパスを移動してから参照追加する
sys.path.append(r'C:\test\samples\IronPython\CS\IronPythonCS\bin\Debug')
clr.AddReference('CsCalled')
# DLLをインポート
import CsCalled
a = CsCalled.Class1()
ret = CsCalled.Class1.doSomething(a, value)
# クラスの関数を呼ぶときは、どうも第1引数に
# そのクラスオブジェクトを渡さないといけないらしい
# IronPython2.0.2 をインストールしたのだが、
# いつ頃かまでのバージョンのPythonでは、第1引数に self を
# 渡さなくてはいけなかったらしく、その名残っぽい?
# 第1引数にクラスオブジェクトを渡さないと↓のようなエラーが出る
# TypeError: 関数名 takes exactly n argument (0 given)
|
# coding=utf-8
from lxml import html
from .config import headers, pid_url, manga_url, search_save_dir, search_url
import os
import requests
import time
table = str.maketrans("|\\?*<\":>+[]/ .'",'。'*15) # 过滤不能创建文件的字符
def GetSearchPic(uid, session, pid, star='', author_save_dir='', search_save_dir=''):
pid_home = pid_url + pid # 获得pid所在url
manga_home = manga_url + pid # 获得manga的url
try:
image_html = session.get(pid_home)
except requests.exceptions.ConnectionError:
time.sleep(5)
image_html = session.get(pid_home)
pic_tree = html.fromstring(image_html.text)
img_node = pic_tree.xpath('//div[@class="wrapper"]/img[@class="original-image"]') # 获得img节点
if len(img_node):
pic_name = img_node[0].get('alt') # 获得图片名
pic_url = img_node[0].get('data-src') # 获得图片url
if star:
name = search_save_dir + star + '-' + uid + '-' + pid + '-' + pic_name.translate(table) + '.jpg'
else:
name = author_save_dir + uid + '-' + pid + '-' + pic_name.translate(table) + '.jpg'
get_pic = session.get(pic_url, headers=headers)
with open(name, 'wb') as f:
f.write(get_pic.content)
f.close()
print(name + ' saved')
else:
manga_html = session.get(manga_home)
manga_tree = html.fromstring(manga_html.text)
manga_pic_node = manga_tree.xpath('//img[@data-filter="manga-image"]') # 获得manga图片节点
manga_name = pic_tree.xpath('//h1[@class="title"]/text()')[0]
if star:
save_dir = search_save_dir + star + '-' + manga_name.translate(table)
else:
save_dir = author_save_dir + manga_name.translate(table)
if os.path.exists(save_dir):
pass
else:
os.mkdir(save_dir)
for i in range(manga_pic_node.__len__()):
try:
get_manga_pic = session.get(manga_pic_node[i].get('data-src'),headers=headers)
except requests.exceptions.ConnectionError:
time.sleep(5)
get_manga_pic = session.get(manga_pic_node[i].get('data-src'), headers=headers)
save_name = save_dir + '/' + str(i) + '.jpg'
with open(save_name, 'wb') as f:
f.write(get_manga_pic.content)
f.close()
print(save_name + ' saved')
def searchstart(s, collection, search_key):
os.mkdir(search_save_dir + search_key)
num = searchpage(s, search_key)
if num > 1000:
num = 1000
else:
pass
for i in range(1, num):
print('现在是%s页' % i)
try:
r_search = s.get(search_url % search_key + str(i))
except requests.exceptions.ConnectionError:
time.sleep(5)
r_search = s.get(search_url % search_key + str(i))
tree_search = html.fromstring(r_search.text)
el2 = tree_search.xpath('//ul[@class="count-list"]/li/a') # 获得收藏数的节点
for k in range(el2.__len__()):
img = el2[k].xpath('../../../a/div/img') # 获得图片节点
pid = img[0].get('data-id') # 获得pid
uid = img[0].get('data-user-id') # 获得作者uid
star = el2[k].text_content() # 获得收藏数
if (int(star) > int(collection)):
GetSearchPic(pid=pid, uid=uid, session=s, star=star, search_save_dir=search_save_dir + search_key + '/')
s.close()
def searchpage(session,search_key):
r_search = session.get(search_url % search_key + str(1))
tree = html.fromstring(r_search.text)
num = (int(tree.xpath('//span[@class="count-badge"]')[0].text[0:-1]) + 2) / 20 + 2
return int(num)
|
class Node:
def __init__(self, data=None):
self.data = data
self._next = None
class LinkedList:
def __init__(self):
self._head = None
def push(self, val):
new_node = Node(val)
if self._head == None:
self._head = new_node
else:
new_node._next = self._head
self._head = new_node
# 递归式反转
def reverse_recursion(self, head_node):
'''
blank_node:原链表的最后一个结点,反转后的头结点,不是递归函数的参数,每次都是定值
head_node:递归函数的参数,每次都是变化的,有入栈和出栈的流程
'''
if head_node._next == None:
return head_node
blank_node = self.reverse_recursion(head_node._next)
head_node._next._next = head_node
head_node._next = None
print('head node is', head_node.data)
print('blank node is', blank_node.data)
return blank_node
# 遍历式反转
def reverse_traverse(self, head_node):
pre = None
cur = head_node
nxt = head_node
while cur != None:
nxt = cur._next
cur._next = pre
pre = cur
# 遍历下一个节点,准备反转
cur = nxt
return pre
# 反转 node1 到 node2 区间之间的节点
def reverse_traverse_interval(self,node1, node2):
pre = None
cur = node1
nxt = node1
while cur != node2:
nxt = cur._next
cur._next = pre
pre = cur
# 遍历下一个节点,准备反转
cur = nxt
return pre
# k 个节点为一组进行反转
def reverse_k_group(self,head_node,k):
if head_node == None:
return None
a = head_node
b = head_node
for i in range(k):
if b == None:
return head_node
b = b._next
# 计算a,b区间的新头结点
new_head = self.reverse_traverse_interval(a, b)
# 结点连接
a._next = self.reverse_k_group(b,k)
return new_head
def show(self):
current = self._head
while current:
print(current.data,end="->")
current = current._next
if __name__ == '__main__':
l = LinkedList()
l.push(5)
l.push(4)
l.push(3)
l.push(2)
l.push(1)
print('print linked list:')
l.show()
print()
node1 = l._head
node2 = node1._next
node3 = node2._next
node4 = node3._next
'''
q = l.reverse_recursion(l._head)
while q:
print(q.data)
q = q._next
exit()
'''
'''
print('\n part reverse:')
t = l.reverse_traverse_interval(node1,node3._next)
while t:
print(t.data)
t = t._next
'''
print('\n reverse k')
g = l.reverse_k_group(node1,3)
print('type of g:', type(g))
while g:
print(g.data)
g = g._next
exit()
r = l.reverse_traverse(l._head)
print('\nreverse:')
while r:
print(r.data)
r = r._next
|
# Generated by Django 2.1.5 on 2019-07-10 19:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chat', '0019_reportchat'),
]
operations = [
migrations.AddField(
model_name='reportchat',
name='content',
field=models.TextField(default=225),
preserve_default=False,
),
]
|
from django.shortcuts import render
from .models import *
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
import os
@csrf_exempt
def signup_faculty(request):
if request.user.is_authenticated():
link1='<div class="dropdown"><button class="dropbtn">PROFILE</button><div class="dropdown-content">'
link1+='<a href="/student_view/'+str(request.user)+'" >MY PROFILE</a>'
link1+='<a href="/students_profile">STUDENTS</a><a href="/alumni_profile">ALUMNI</a></div></div>'
return render (request,'index.html',{'link1':link1,'link2':'<a href="/logout/">LOGOUT</a>'})
else:
if request.method=="POST":
try:
response={}
login_id=str(request.POST.get('faculty_id'))
designation=str(request.POST.get('designation'))
print designation
education=str(request.POST.get('education'))
area_of_interest=str(request.POST.get('area_of_interest'))
image_name=request.FILES.get('photo').name
try:
folder = 'media/faculty_images/'
os.mkdir(os.path.join(folder))
except Exception,e:
print e
pass
print "image=",image_name
url=folder+login_id+image_name
fout = open(url, 'wb+')
file_content = request.FILES.get('photo').read()
fout.write(file_content)
fout.close()
other_details=str(request.POST.get('other_details'))
print other_details
password=str(request.POST.get('password'))
print password
try:
faculty_data_row=faculty_data.objects.get(faculty_id=login_id)
setattr(faculty_data_row,'designation',str(designation))
setattr(faculty_data_row,'education',str(education))
setattr(faculty_data_row,'area_of_interest',str(area_of_interest))
setattr(faculty_data_row,'other_details',str(other_details))
setattr(faculty_data_row,'photo',url)
faculty_data_row.save()
User.objects.create_user(username=login_id,password=password)
return render(request,'login.html',{'msg':'Sign up done','link2':'<a href="/login/">LOGIN</a>'})
except Exception,e:
print e
return render(request,'login.html',{'msg':'Invalid login id','link2':'<a href="/login/">LOGIN</a>'})
except:
return render(request,'login.html',{'msg':'Data not get','link2':'<a href="/login/">LOGIN</a>'})
def faculty_profile(request,faculty_id):
try:
JSON_response={}
JSON_response['login_id']=faculty_id
faculty_data_row=faculty_data.objects.get(faculty_id=faculty_id)
JSON_response['name']=faculty_data_row.name
JSON_response['mobile']=faculty_data_row.mobile
JSON_response['email']=faculty_data_row.email
JSON_response['designation']=faculty_data_row.designation
photo=str(faculty_data_row.photo)
photo_url=' src='+'"/'+photo+'"'
JSON_response['photo']=photo_url
print photo_url
JSON_response['education']=faculty_data_row.education
JSON_response['area_of_interest']=faculty_data_row.area_of_interest
JSON_response['other_details']=faculty_data_row.other_details
if request.user.is_authenticated():
login_id=str(request.user)
link1='<div class="dropdown"><button class="dropbtn">PROFILE</button><div class="dropdown-content">'
link1+='<a href="/student_view/'+str(request.user)+'" >MY PROFILE</a>'
link1+='<a href="/students_profile">STUDENTS</a><a href="/alumni_profile">ALUMNI</a></div></div>'
JSON_response['link1']=link1
JSON_response['link2']='<a href="/logout/">LOGOUT</a>'
if login_id==faculty_id:
edit_url=str(request.scheme+'://'+request.get_host()+'/edit_faculty_profile/')
edit='<a href="'+edit_url+'"'+' class="btn btn-default" style="float:right">Edit</a>'
JSON_response['edit']=edit
else:
JSON_response['link2']='<a href="/login/">LOGIN</a>'
return render(request,'show_faculty_profile.html',JSON_response)
except Exception,e:
print e
link1='<div class="dropdown"><button class="dropbtn">PROFILE</button><div class="dropdown-content">'
link1+='<a href="/student_view/'+str(request.user)+'" >MY PROFILE</a>'
link1+='<a href="/students_profile">STUDENTS</a><a href="/alumni_profile">ALUMNI</a></div></div>'
return render(request,'show_faculty_profile.html',{'msg':'something occur try again','link1':link1,'link2':'<a href="/logout/">LOGOUT</a>'})
@login_required
def faculty_group_profile(request):
try:
for o in faculty_data.objects.all():
return HttpResponse('whole faculty data will be passed at once')
except:
return HttpResponse('something occur please try again')
@login_required
@csrf_exempt
def edit_faculty_profile(request):
if request.method=='POST':
try:
faculty_data_row=faculty_data.objects.get(faculty_id=str(request.user))
education=str(request.POST.get('education'))
print education
designation=str(request.POST.get('designation'))
area_of_interest=str(request.POST.get('area_of_interest'))
other_details=str(request.POST.get('other_details'))
try:
image_name=request.FILES.get('photo').name
try:
folder = 'media/faculty_images/'
os.mkdir(os.path.join(folder))
except Exception,e:
print e
pass
print "image=",image_name
url=folder+faculty_data_row.faculty_id+image_name
fout = open(url, 'wb+')
file_content = request.FILES.get('photo').read()
fout.write(file_content)
fout.close()
setattr(faculty_data_row,'photo',url)
except:
pass
setattr(faculty_data_row,'education',education)
setattr(faculty_data_row,'designation',designation)
setattr(faculty_data_row,'area_of_interest',area_of_interest)
setattr(faculty_data_row,'other_details',other_details)
#resume pending
faculty_data_row.save()
redirect_url='/faculty_view/'+str(request.user)
return HttpResponseRedirect(redirect_url)
except:
link1='<div class="dropdown"><button class="dropbtn">PROFILE</button><div class="dropdown-content">'
link1+='<a href="/student_view/'+str(request.user)+'" >MY PROFILE</a>'
link1+='<a href="/students_profile">STUDENTS</a><a href="/alumni_profile">ALUMNI</a></div></div>'
return render (request,'edit_faculty_profile.html',{'msg':'something occur please try again','link1':link1,'link2':'<a href="/logout/">LOGOUT</a>'})
else:
faculty_data_row=faculty_data.objects.get(faculty_id=str(request.user))
JSON_response={}
JSON_response['faculty_id']=faculty_data_row.faculty_id
JSON_response['name']=faculty_data_row.name
JSON_response['mobile']=faculty_data_row.mobile
JSON_response['email']=faculty_data_row.email
JSON_response['designation']=faculty_data_row.designation
photo=str(faculty_data_row.photo)
photo_url='<img src='+'"/'+photo+'"'+'>'
JSON_response['photo']=photo_url
print photo_url
JSON_response['education']=faculty_data_row.education
JSON_response['area_of_interest']=faculty_data_row.area_of_interest
JSON_response['other_details']=faculty_data_row.other_details
link1='<div class="dropdown"><button class="dropbtn">PROFILE</button><div class="dropdown-content">'
link1+='<a href="/student_view/'+str(request.user)+'" >MY PROFILE</a>'
link1+='<a href="/students_profile">STUDENTS</a><a href="/alumni_profile">ALUMNI</a></div></div>'
JSON_response['link1']=link1
JSON_response['link2']='<a href="/logout/">LOGOUT</a>'
return render (request,'edit_faculty_profile.html',JSON_response)
# Create your views here.
# Create your views here.
|
from which_pyqt import PYQT_VER
if PYQT_VER == 'PYQT5':
from PyQt5.QtCore import QLineF, QPointF, QObject
elif PYQT_VER == 'PYQT4':
from PyQt4.QtCore import QLineF, QPointF, QObject
else:
raise Exception('Unsupported Version of PyQt: {}'.format(PYQT_VER))
import time
# Some global color constants that might be useful
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
# Global variable that controls the speed of the recursion automation, in seconds
#
PAUSE = 2
#
# This is the class you have to complete.
#
class ConvexHullSolver(QObject):
# Class constructor
def __init__(self):
super().__init__()
self.pause = False
# Some helper methods that make calls to the GUI, allowing us to send updates
# to be displayed.
def showTangent(self, line, color):
self.view.addLines(line, color)
if self.pause:
time.sleep(PAUSE)
def eraseTangent(self, line):
self.view.clearLines(line)
def blinkTangent(self, line, color):
self.showTangent(line, color)
self.eraseTangent(line)
def showHull(self, polygon, color):
self.view.addLines(polygon, color)
if self.pause:
time.sleep(PAUSE)
def eraseHull(self, polygon):
self.view.clearLines(polygon)
def showText(self, text):
self.view.displayStatusText(text)
# Determines if the points are clockwise, counterclockwise, or co-linear
def orientation(self, p1, p2, p3):
value = (p2.y() - p1.y()) * (p3.x() - p2.x()) - (p2.x() - p1.x()) * (p3.y() - p2.y())
# value is co-linear
if value == 0:
return 0
# value is clockwise
if value > 0:
return 1
# value is counterclockwise
return -1
# Finds the upper tangent points of the Left Hull and the Right Hull
def upper_tangent(self, L, R, L_rightmost, R_leftmost):
Lsize = len(L)
Rsize = len(R)
Ltangent = L_rightmost
Rtangent = R_leftmost
isFound = False
while not isFound:
isFound = True
# Find the upper tangent on the Left Hull
while (self.orientation(R[Rtangent], L[Ltangent], L[(Lsize + Ltangent - 1) % Lsize]) >= 0 or
self.orientation(R[Rtangent], L[Ltangent], L[(Lsize + Ltangent + 1) % Lsize]) >= 0):
# If the previous index on the Left Hull is clockwise or co-linear, decrement the left tangent point
if self.orientation(R[Rtangent], L[Ltangent], L[(Lsize + Ltangent - 1) % Lsize]) >= 0:
Ltangent = (Lsize + Ltangent - 1) % Lsize
# If the next index on the Left Hull is clockwise or co-linear, increment the left tangent point
if self.orientation(R[Rtangent], L[Ltangent], L[(Lsize + Ltangent + 1) % Lsize]) >= 0:
Ltangent = (Lsize + Ltangent + 1) % Lsize
# Find the upper tangent on the Right Hull
while (self.orientation(L[Ltangent], R[Rtangent], R[(Rtangent + 1) % Rsize]) <= 0 or
self.orientation(L[Ltangent], R[Rtangent], R[(Rtangent - 1) % Rsize]) <= 0):
# If the next index on the Right Hull is counterclockwise or co-linear, increment the right tangent point
if self.orientation(L[Ltangent], R[Rtangent], R[(Rtangent + 1) % Rsize]) <= 0:
Rtangent = (Rtangent + 1) % Rsize
isFound = False
# If the previous index on the Right Hull is counterclockwise or co-linear, decrement the right tangent point
if self.orientation(L[Ltangent], R[Rtangent], R[(Rtangent - 1) % Rsize]) <= 0:
Rtangent = (Rtangent - 1) % Rsize
isFound = False
return Ltangent, Rtangent
# Finds the lower tangent points of the Left Hull and the Right Hull
def lower_tangent(self, L, R, L_rightmost, R_leftmost):
Lsize = len(L)
Rsize = len(R)
Ltangent = L_rightmost
Rtangent = R_leftmost
isFound = False
while not isFound:
isFound = True
# Find the lower tangent on the Right Hull
while (self.orientation(L[Ltangent], R[Rtangent], R[(Rsize + Rtangent + 1) % Rsize]) >= 0 or
self.orientation(L[Ltangent], R[Rtangent], R[(Rsize + Rtangent - 1) % Rsize]) >= 0):
# if the next index on the Right Hull is clockwise or co-linear, increment the right tangent point
if self.orientation(L[Ltangent], R[Rtangent], R[(Rsize + Rtangent + 1) % Rsize]) >= 0:
Rtangent = (Rsize + Rtangent + 1) % Rsize
# if the previous index on the Right Hull is clockwise or co-linear, decrement the right tangent point
if self.orientation(L[Ltangent], R[Rtangent], R[(Rsize + Rtangent - 1) % Rsize]) >= 0:
Rtangent = (Rsize + Rtangent - 1) % Rsize
# Find the lower tangent on the Left Hull
while (self.orientation(R[Rtangent], L[Ltangent], L[(Ltangent - 1) % Lsize]) <= 0 or
self.orientation(R[Rtangent], L[Ltangent], L[(Ltangent + 1) % Lsize]) <= 0):
# if the previous index on the Left Hull is counterclockwise or co-linear, decrement the left tangent point
if self.orientation(R[Rtangent], L[Ltangent], L[(Ltangent - 1) % Lsize]) <= 0:
Ltangent = (Ltangent - 1) % Lsize
isFound = False
# if the next index on the Left Hull is counterclockwise or co-linear, increment the left tangent point
if self.orientation(R[Rtangent], L[Ltangent], L[(Ltangent + 1) % Lsize]) <= 0:
Ltangent = (Ltangent + 1) % Lsize
isFound = False
return Ltangent, Rtangent
# Combines two Hulls into one Hull
def merge(self, L, R):
points = []
Lsize = len(L)
Rsize = len(R)
L_rightmost = 0
R_leftmost = 0
# Find the rightmost point of L
for i in range(1, Lsize):
if L[i].x() > L[L_rightmost].x():
L_rightmost = i
# Find the leftmost point of R
for i in range(1, Rsize):
if R[i].x() < R[R_leftmost].x():
R_leftmost = i
# Finds the upper and lower tangent points
Ltangent_upper, Rtangent_upper = self.upper_tangent(L, R, L_rightmost, R_leftmost)
Ltangent_lower, Rtangent_lower = self.lower_tangent(L, R, L_rightmost, R_leftmost)
# Creates the new Hull from lower tangent to upper tangent of the Left Hull
# then to the upper tangent to the lower tangent of the Right Hull
i = Ltangent_lower
points.append(L[i])
while i != Ltangent_upper:
i = (i + 1) % Lsize
points.append(L[i])
i = Rtangent_upper
points.append(R[i])
while i != Rtangent_lower:
i = (i + 1) % Rsize
points.append(R[i])
return points
# Returns a set of three points in clockwise order
def clockwise(self, p1, p2, p3):
clockwise_points = []
value = (p2.y() - p1.y()) * (p3.x() - p2.x()) - (p2.x() - p1.x()) * (p3.y() - p2.y())
# When value is clockwise, return the points as previously ordered
if value > 0:
clockwise_points.append(p1)
clockwise_points.append(p2)
clockwise_points.append(p3)
return clockwise_points
# When value is counterclockwise, switch the order of the second and third points
elif value < 0:
clockwise_points.append(p1)
clockwise_points.append(p3)
clockwise_points.append(p2)
return clockwise_points
# Determines the smallest, clockwise set of points in a convex shape
def convex_hull(self, points):
# Return points in clockwise order
if len(points) <= 2:
return points
elif len(points) == 3:
return self.clockwise(points[0], points[1], points[2])
midpoint = (int)(len(points) / 2)
left = points[:midpoint]
right = points[midpoint:]
# Recursively splits the points in half and then merges the Left Hull and the Right Hull
Left_Hull = self.convex_hull(left)
Right_Hull = self.convex_hull(right)
points_of_polygon = self.merge(Left_Hull, Right_Hull)
# If `Show Recursion` box is checked on the GUI
if self.pause:
polygon = [QLineF(points_of_polygon[i], points_of_polygon[(i + 1) % len(points_of_polygon)]) for i in
range(len(points_of_polygon))]
self.showHull(polygon, RED)
return points_of_polygon
# This is the method that gets called by the GUI and actually executes
# the finding of the hull
def compute_hull(self, points, pause, view):
self.pause = pause
self.view = view
assert (type(points) == list and type(points[0]) == QPointF)
t1 = time.time()
# Sort points depending on the x direction
points.sort(key=lambda point: point.x())
print(points)
t2 = time.time()
t3 = time.time()
# Creates a set of lines from the set of points returned from convex_hull
points_of_polygon = self.convex_hull(points)
polygon = [QLineF(points_of_polygon[i], points_of_polygon[(i + 1) % len(points_of_polygon)]) for i in
range(len(points_of_polygon))]
t4 = time.time()
# when passing lines to the display, pass a list of QLineF objects. Each QLineF
# object can be created with two QPointF objects corresponding to the endpoints
self.showHull(polygon, RED)
self.showText('Time Elapsed (Convex Hull): {:3.3f} sec'.format(t4 - t3))
|
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
'''
def home(request):
#print(dir(request))
print(request.method)
#print(request.COOKIES)
print(request.headers)
print(request.user)
print(request.get_full_path())
return HttpResponse("<!DOCTYPE><html><style>h1{color: blue}</style><h1>Hello<h1></html>")
'''''
def home(request):
response = HttpResponse(content_type='application/json')
response.write('<p>Test!</p>')
response.write('<style>h1{color: blue}</style><p>Test!</p>')
return response
def redir(request):
return HttpResponseRedirect('http://www.google.com') |
"""
Django settings for int_to_roman project.
"""
import os
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
SECRET_KEY = 'q8$hhx$nl^n%esc67!lswc@6**t-)_2s6)**6!&@*1ispnx1#a'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.staticfiles'
]
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware'
)
ROOT_URLCONF = 'int_to_roman.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.core.context_processors.static'
],
},
},
]
WSGI_APPLICATION = 'int_to_roman.wsgi.application'
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATIC_URL = '/static/'
|
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
from tensorflow.keras.utils import Sequence
from tensorflow.keras.optimizers import Adam
import tensorflow as tf
import numpy as np
import cv2
from glob import glob
import os
from network import AutoEncoder
from utils import generate_image_list, augment_images, read_img
from options import Options
cfg = Options().parse()
class data_flow(Sequence):
def __init__(self, filenames, batch_size, grayscale):
self.filenames = filenames
self.batch_size = batch_size
self.grayscale = grayscale
def __len__(self):
return int(np.ceil(len(self.filenames) / float(self.batch_size)))
def __getitem__(self, idx):
batch_x = self.filenames[idx * self.batch_size:(idx + 1) * self.batch_size]
batch_x = np.array([read_img(filename, self.grayscale) for filename in batch_x])
batch_x = batch_x / 255.
return batch_x, batch_x
# data
if cfg.aug_dir and cfg.do_aug:
img_list = generate_image_list(cfg)
augment_images(img_list, cfg)
dataset_dir = cfg.aug_dir if cfg.aug_dir else cfg.train_data_dir
file_list = glob(dataset_dir + '/*')
num_valid_data = int(np.ceil(len(file_list) * 0.2))
data_train = data_flow(file_list[:-num_valid_data], cfg.batch_size, cfg.grayscale)
data_valid = data_flow(file_list[-num_valid_data:], cfg.batch_size, cfg.grayscale)
# loss
if cfg.loss == 'ssim_loss':
@tf.function
def ssim_loss(gt, y_pred, max_val=1.0):
return 1 - tf.reduce_mean(tf.image.ssim(gt, y_pred, max_val=max_val))
loss = ssim_loss
elif cfg.loss == 'ssim_l1_loss':
@tf.function
def ssim_l1_loss(gt, y_pred, max_val=1.0):
ssim_loss = 1 - tf.reduce_mean(tf.image.ssim(gt, y_pred, max_val=max_val))
L1 = tf.reduce_mean(tf.abs(gt - y_pred))
return ssim_loss + L1 * cfg.weight
loss = ssim_l1_loss
else:
loss = 'mse'
# network
autoencoder = AutoEncoder(cfg)
optimizer = Adam(lr=cfg.lr, decay=cfg.decay)
autoencoder.compile(optimizer=optimizer, loss=loss, metrics=['mae'] if loss == 'mse' else ['mse'])
autoencoder.summary()
earlystopping = EarlyStopping(patience=20)
checkpoint = ModelCheckpoint(os.path.join(cfg.chechpoint_dir, '{epoch:02d}-{val_loss:.5f}.hdf5'), save_best_only=True,
period=1, mode='auto', verbose=1, save_weights_only=True)
autoencoder.fit(data_train, epochs=cfg.epochs, validation_data=data_valid, callbacks=[checkpoint, earlystopping])
# show reconstructed images
decoded_imgs = autoencoder.predict(data_valid)
n = len(decoded_imgs)
save_snapshot_dir = cfg.chechpoint_dir +'/snapshot/'
if not os.path.exists(save_snapshot_dir):
os.makedirs(save_snapshot_dir)
for i in range(n):
cv2.imwrite(save_snapshot_dir+str(i)+'_rec_valid.png', (decoded_imgs[i]*255).astype('uint8'))
|
from django.contrib import admin
from .models import Product, ProductType
@admin.register(Product)
class ProductAdmin(admin.ModelAdmin):
list_display = ('product_name', 'product_type', 'units', 'price_per_unit')
@admin.register(ProductType)
class ProductTypeAdmin(admin.ModelAdmin): # add this
list_display = ('type_name', )
|
import subprocess
import time
import os
import win32serviceutil
# Get service version and data
def get_service(name):
'''
service lookup, param str: [service name]
'''
try:
lookup_service = str(subprocess.check_output('sc query state=all | find "SERVICE_NAME: ' + name +'"', shell=True))
if not lookup_service:
return ('There is not ' + name + ' service installed')
service_name = lookup_service.split()[1]
service = service_name.split('\\')[0]
return service
except(Exception) as error:
logging.exception('Get service err: ')
print(error)
def service_status(service):
'''
param str: [service name]
returns 1 = stopped, 2 = start pending, 3 = stopped pending, 4 = running,
'''
status = win32serviceutil.QueryServiceStatus(service)[1]
return status
def start_service(service):
'''
param str: [service name]
'''
subprocess.run('net start '+ service)
def stop_service(service):
'''
param str: [service name]
stops the service with all dependent services
'''
subprocess.run('net stop '+ service +' /yes')
# Get PID of given process name
def get_pid(process_name):
try:
lookup_process = str(subprocess.check_output('tasklist | find "'+ process_name + '"', shell=True))
if lookup_process != 1:
result = lookup_process.split()[1]
return result
else:
print("There is not running any process with name " + process_name)
except(Exception) as error:
logging.exception('Get PID err: ')
print(error)
# Check connection on port
def check_connection(process_id):
try:
connection = str(subprocess.call('netstat -ano | find ":8093" | find "' + process_id + '"', shell=True))
if connection != "1":
DEPLOY_LOOP = 0
while DEPLOY_LOOP < 90:
connection_state = str(subprocess.check_output('netstat -ano | find ":8093" | find "' + process_id + '"', shell=True))
print(connection_state.split()[-3])
if connection_state.split()[-3] != "0.0.0.0:0":
print('Connection has been established on IP ' + connection_state.split()[-3])
break
time.sleep(10)
DEPLOY_LOOP += 1
print('Connection is not established yet, loop number ' + str(DEPLOY_LOOP) + ' of 20 attempts.')
else:
print('Communication was not established with clubspire process ' + process_id)
return connection
except(Exception) as error:
logging.exception('Check connection err: ')
print(error)
|
from .gcn import GCNConv
from .gat import GATConv, SparseGATConv
from .sgc import SGConv
from .trainable_sgc import TrainableSGConv
from .median import MedianConv
from .trimmed_conv import TrimmedConv
from .dagnn import PropConv
from .tagcn import TAGConv
from .appnp import APPNProp, PPNProp
from .graphsage import SAGEAggregator
from .ssgc import SSGConv
from .sim_attention import SimilarityAttention
from .sat import EigenConv, SpectralEigenConv, GraphEigenConv
|
import os
import pickle
class CIFAR10():
def unpickle(self, file):
with open(file , 'rb') as f:
dictionary = pickle.load(f, encoding='bytes')
return dictionary
def __init__(self, paths, transform=None):
super(CIFAR10, self).__init__()
self.dictionary = []
for p_ in paths:
rdir = os.path.abspath(p_)
self.dictionary.append(self.unpickle(rdir))
pass
def __len__(self):
return len(self.dictionary)*10000
def __getitem__(self, i):
image = self.dictionary[int(i/10000)][b'data'][i%9999]
label = self.dictionary[int(i/10000)][b'labels'][i%9999]
image = image.reshape(-1,32,32)
return image, label
|
import logging
from api.models import Category
from .base import BaseCategoryTestCase
logging.disable(logging.ERROR)
class CategoryTestCase(BaseCategoryTestCase):
"""
Test attend mutation queries
"""
def test_user_can_create_a_new_category(self):
query = '''
mutation subscribe {
createCategory(input: {
name: "someCategory",
description: "testCategory is still a test",
featuredImage: "https://github.com",
})
{
clientMutationId
newCategory {
name
description
featuredImage
}
}
}
'''
self.request.user = self.user
result = self.client.execute(query, context_value=self.request)
self.assertMatchSnapshot(result)
def test_user_cannnot_create_exact_category_twice(self):
query = '''
mutation subscribe {
createCategory(input: {
name: "someCategory",
description: "testCategory is still a test",
featuredImage: "https://github.com",
})
{
clientMutationId
newCategory {
name
description
featuredImage
}
}
}
'''
Category.objects.create(
name="someCategory",
description="testCategory is still a test",
featured_image="https://github.com",
)
self.request.user = self.user
result = self.client.execute(query, context_value=self.request)
self.assertMatchSnapshot(result)
def test_user_cannnot_create_category_with_existing_name(self):
query = '''
mutation subscribe {
createCategory(input: {
name: "Swimming Meetup 1",
description: "testCategory is still a test",
featuredImage: "https://github.com",
})
{
clientMutationId
newCategory {
name
description
featuredImage
}
}
}
'''
self.request.user = self.user
result = self.client.execute(query, context_value=self.request)
self.assertMatchSnapshot(result)
|
from geopy.distance import geodesic
from pandas import read_csv
def get_distance_matrix(df_lat_long):
df_distance_matrix = df_lat_long
df_distance_matrix = df_distance_matrix.drop(columns=df_lat_long.columns.tolist())
dictLatLong = df_lat_long.to_dict()
for cityX in df_lat_long.index.tolist():
tempColInfo = []
ct1 = (round(dictLatLong['Latitud'][cityX],6),round(dictLatLong['Longitud'][cityX],6))
for cityY in df_lat_long.index.tolist():
ct2 = (round(dictLatLong['Latitud'][cityY],6),round(dictLatLong['Longitud'][cityY],6))
tempColInfo.append(int(geodesic(ct1,ct2).km))
df_distance_matrix[cityX] = tempColInfo
return df_distance_matrix |
#Rest Api calls
import requests
from bs4 import BeautifulSoup
#import urllib2
#import re
response = requests.get("https://www.google.co.in/")
print(response.status_code)
soup = BeautifulSoup(response.text, 'html.parser')
for link in soup.find_all('a'):
print(link.get('href'))
print()
#html_page = urllib2.urlopen("https://www.google.co.in/")
#soup = BeautifulSoup(html_page)
#for link in soup.findAll('a', attrs={'href': re.compile("^http://")}):
# print(link.get('href'))
|
from collections import defaultdict
class Node(object):
name_index = defaultdict(list)
def __init__(self, a, b):
self.name = a
self.weight = b
Node.name_index[name].append(self)
self.tower = []
def add_tower(self, t):
self.tower.append(t)
def return_sum(self):
mySum = 0
mySum += self.weight
for i in range(0, len(self.tower)):
mySum += self.tower[i].return_sum()
return mySum
def print_child_list(self, k):
if len(self.tower) == 0:
pass
else:
for i in range(0, len(self.tower)):
for m in range(0,k):
print("\t", end="")
print(self.tower[i].name)
self.tower[i].print_child_list(k + 1)
def print_tower(self):
print(self.name, end=":")
for i in range(0, len(self.tower)):
print(self.tower[i].name, end=" ")
@classmethod
def find_by_name(cls, n):
return Node.name_index[n]
data = ""
with open("day7data.txt") as f:
data = f.read()
def findRoot(datalist):
datalist = data.split('\n') # splits data into individual lines
rootStringList = [] # list to contain only root towers
for i in range(0, len(datalist)):
if len(datalist[i].split()) > 2:
rootStringList.append(datalist[i])
rootList = [] # list to contain only roots
for i in range(0, len(rootStringList)):
rootList.append(rootStringList[i].split()[0])
childList = []
for i in range(0, len(rootStringList)):
tmp = rootStringList[i].split()
assert isinstance(tmp, list)
tmp.pop(0)
tmp.pop(0)
tmp.pop(0)
for j in range(0, len(tmp)):
childList.append(tmp[j].rstrip(','))
for i in range(0, len(rootList)):
if rootList[i] not in childList:
print(rootList[i])
return rootList[i]
datalist = data.split('\n') # splits data into individual lines
towerStringList = datalist # list to contain only programs
rootStringList = [] # list to contain only string of roots
for i in range(0, len(datalist)):
if len(datalist[i].split()) > 2:
rootStringList.append(datalist[i])
towerNodeList = []
for i in range(0, len(towerStringList)):
tmp = towerStringList[i].split()
name = tmp[0]
weight = int(tmp[1].lstrip('(').rstrip(')'))
towerNodeList.append(Node(name, weight))
for m in range(0, len(rootStringList)):
tempList = rootStringList[m].split()
tempList.pop(1)
tempList.pop(1)
for i in range(0, len(tempList)):
tempList[i] = tempList[i].strip(",")
for k in range(1, len(tempList)):
Node.find_by_name(tempList[0])[0].add_tower(Node.find_by_name(tempList[k])[0])
weightList = []
for node in towerNodeList:
if len(node.tower) > 1:
node.print_tower()
print(node.return_sum())
weightList.append(node.return_sum())
Node.find_by_name(findRoot(data))[0].print_child_list(0)
for node in Node.find_by_name(findRoot(data))[0].tower[4].tower:
print(node.name)
print(node.return_sum())
# finished 7 part 2 by deduction, in the end argoys was wrong, simple backtrack of arqoys fixes the issue |
"""
Given a binary tree, return the bottom-up level order traversal of its nodes'
values. (ie, from left to right, level by level from leaf to root).
For example:
Given binary tree [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
return its bottom-up level order traversal as:
[
[15,7],
[9,20],
[3]
]
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
from collections import deque
class Solution:
def levelOrderBottom(self, root):
if root is None:
return []
levels = self.level_order_v2(root)
return levels
def level_order_v2(self, root):
"""
We perform bfs to traverse the tree, and calculate
each levels sum
"""
queue = deque([root])
levels = []
curr_level = []
while len(queue) != 0:
# for each level
curr_level = []
for i in range(len(queue)):
# pop the value
node = queue.popleft()
curr_level.append(node.val)
# the we add the children
if node.left:
queue.append(node.left)
if node.right:
queue.append(node.right)
levels.append(curr_level)
return levels[::-1]
|
import numpy as np
from chainer import Variable
from chainer import serializers
import sys
sys.path.append('./networks')
import matplotlib.pyplot as plt
from matplotlib.widgets import Button
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--output_dir','-o',default = None)
parser.add_argument('--input_file','-i',default = None, help = 'Path to h5 file')
parser.add_argument('--mode','-m',help = 'Train mode (mnist_fc, mnist_conv, people_conv)', default = None)
args = parser.parse_args()
if args.mode == 'mnist_fc':
from mnist_fc import Generator
elif args.mode == 'mnist_conv':
from mnist_conv import Generator
elif args.mode == 'people_conv':
from people_conv import Generator
else:
exit()
G = Generator()
len_z = G.in_size
if args.input_file != None:
serializers.load_hdf5(args.input_file, G)
else:
print "You should select input file"
exit()
batchsize = 25
z = Variable(np.random.uniform(-1,1,(batchsize,len_z)).astype(np.float32))
y1 = G(z,False)
fig = plt.figure()
ax = []
for i in xrange(batchsize):
ax.append(fig.add_subplot(5,5,i+1))
ax[i].imshow(np.array(y1.data[i]).reshape(G.imshape[1],G.imshape[2]),cmap='gray')
ax[i].axis('off')
class callback(object):
def suffle(self,event):
z = Variable(np.random.uniform(-1,1,(batchsize,len_z)).astype(np.float32))
y1 = G(z,False)
for i in xrange(batchsize):
ax[i].imshow(np.array(y1.data[i]).reshape(G.imshape[1],G.imshape[2]),cmap='gray')
plt.draw()
c = callback()
axsuffle = plt.axes([0.8, 0.01, 0.1, 0.075])
button = Button(axsuffle, 'Suffle')
button.on_clicked(c.suffle)
plt.show()
|
def solve(arr):
for x in arr:
if x*-1 not in arr:
return x
'''
In this Kata, you will be given an array of integers whose elements have both a
negative and a positive value, except for one integer that is either only negative
or only positive. Your task will be to find that integer.
For example,
solve[1,-1,2,-2,3] = 3 --> 3 only has a positive ocurrence.
solve([-3,1,2,3,-1,-4,-2]) = -4 --> -4 only has a negative occurence
solve([1,-1,2,-2,3,3]) = 3 --> the integer that is only positive or only
negative my appear more than once.
Good luck!
'''
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/5/14 下午9:45
# @Author : Lucas Ma
# @File : iterate
d = {'a': 1, 'b': 2, 'c': 3}
for key in d:
print(key)
for v in d.values():
print(v)
# 由于字符串也是可迭代对象,因此,也可以作用于for循环:
for ch in 'abcdefg':
print(ch)
from collections import Iterable
print(isinstance('abc', Iterable))
for x, y in [(1, 1), (2, 4), (3, 9)]:
print(x,y)
|
from unittest import TestCase
from panopto_client.usage import UsageReporting, PanoptoAPIException
from panopto_client.tests import instance_args
import mock
@mock.patch.object(UsageReporting, '_instance',
return_value=mock.sentinel.instance)
@mock.patch.object(UsageReporting, '_request')
class PanoptoUsageTest(TestCase):
def test_init(self, mock_request, mock_instance):
client = UsageReporting()
self.assertEqual(client._port, 'BasicHttpBinding_IUsageReporting')
self.assertEqual(client._actas, None)
self.assertEqual(client._data, client._live)
def test_getUserDetailedUsage(self, mock_request, mock_instance):
client = UsageReporting()
try:
result = client.getUserDetailedUsage('test-user-id')
except TypeError:
pass
self.assertEqual(instance_args(mock_instance.call_args_list), [
'ns0:AuthenticationInfo', 'ns0:Pagination'])
mock_request.assert_called_with('GetUserDetailedUsage', {
'auth': mock.sentinel.instance, 'userId': 'test-user-id',
'pagination': mock.sentinel.instance})
|
# -*- coding: cp932 -*-
"""このソースコードは blanco Frameworkによって自動生成されています。
"""
class SampleJisX0201CharacterGroup:
"""JIS X 0201 (8ビット) 0x20から0x7eまでと 0xa1から0xdfのサンプル。blancoCharacterGroupの実装には影響しません。
"""
def __init__(self, encoding='cp932'):
"""クラス初期化メソッド
self -- このメソッドを含むクラス自身。
encoding='cp932' -- エンコーディング。デフォルトは'cp932'
"""
self.encoding = encoding
#end
def match(self, argCheck):
"""文字グループに含まれる文字であるかどうかを判定します。
self -- このメソッドを含むクラス自身。
argCheck -- チェックを行いたい文字。
return -- 文字グループに含まれていればture。グループに含まれない文字であればfalse。
"""
argCheckUnicode = unicode(argCheck, self.encoding)
# No.1
# 説明:0x20: 半角スペース。
# 0x20 (Windows-31J)
# 0x0020 (UTF-16BE)
if argCheckUnicode == u' ':
return True
#end
# No.2
# 説明:0x21
# 0x21 (Windows-31J)
# 0x0021 (UTF-16BE)
if argCheckUnicode == u'!':
return True
#end
# No.3
# 説明:0x22: ダブルクオーテーション。
# 0x22 (Windows-31J)
# 0x0022 (UTF-16BE)
if argCheckUnicode == u'"':
return True
#end
# No.4
# 説明:0x23: ISO646-USでも#。
# 0x23 (Windows-31J)
# 0x0023 (UTF-16BE)
if argCheckUnicode == u'#':
return True
#end
# No.5
# 説明:0x24: ISO646-USでも$。
# 0x24 (Windows-31J)
# 0x0024 (UTF-16BE)
if argCheckUnicode == u'$':
return True
#end
# No.6
# 説明:0x25
# 0x25 (Windows-31J)
# 0x0025 (UTF-16BE)
if argCheckUnicode == u'%':
return True
#end
# No.7
# 説明:0x26
# 0x26 (Windows-31J)
# 0x0026 (UTF-16BE)
if argCheckUnicode == u'&':
return True
#end
# No.8
# 説明:0x27: クオーテーション。
# 0x27 (Windows-31J)
# 0x0027 (UTF-16BE)
if argCheckUnicode == u'\'':
return True
#end
# No.9
# 説明:0x28
# 0x28 (Windows-31J)
# 0x0028 (UTF-16BE)
if argCheckUnicode == u'(':
return True
#end
# No.10
# 説明:0x29
# 0x29 (Windows-31J)
# 0x0029 (UTF-16BE)
if argCheckUnicode == u')':
return True
#end
# No.11
# 説明:0x2a
# 0x2a (Windows-31J)
# 0x002a (UTF-16BE)
if argCheckUnicode == u'*':
return True
#end
# No.12
# 説明:0x2b
# 0x2b (Windows-31J)
# 0x002b (UTF-16BE)
if argCheckUnicode == u'+':
return True
#end
# No.13
# 説明:0x2c
# 0x2c (Windows-31J)
# 0x002c (UTF-16BE)
if argCheckUnicode == u',':
return True
#end
# No.14
# 説明:0x2d
# 0x2d (Windows-31J)
# 0x002d (UTF-16BE)
if argCheckUnicode == u'-':
return True
#end
# No.15
# 説明:0x2e
# 0x2e (Windows-31J)
# 0x002e (UTF-16BE)
if argCheckUnicode == u'.':
return True
#end
# No.16
# 説明:0x2f: スラッシュ。
# 0x2f (Windows-31J)
# 0x002f (UTF-16BE)
if argCheckUnicode == u'/':
return True
#end
# No.17
# 説明:0x30: 数字の0。
# 0x30 (Windows-31J)
# 0x0030 (UTF-16BE)
if argCheckUnicode == u'0':
return True
#end
# No.18
# 説明:0x31: 数字の1。
# 0x31 (Windows-31J)
# 0x0031 (UTF-16BE)
if argCheckUnicode == u'1':
return True
#end
# No.19
# 説明:0x32: 数字の2。
# 0x32 (Windows-31J)
# 0x0032 (UTF-16BE)
if argCheckUnicode == u'2':
return True
#end
# No.20
# 説明:0x33: 数字の3。
# 0x33 (Windows-31J)
# 0x0033 (UTF-16BE)
if argCheckUnicode == u'3':
return True
#end
# No.21
# 説明:0x34: 数字の4。
# 0x34 (Windows-31J)
# 0x0034 (UTF-16BE)
if argCheckUnicode == u'4':
return True
#end
# No.22
# 説明:0x35: 数字の5。
# 0x35 (Windows-31J)
# 0x0035 (UTF-16BE)
if argCheckUnicode == u'5':
return True
#end
# No.23
# 説明:0x36: 数字の6。
# 0x36 (Windows-31J)
# 0x0036 (UTF-16BE)
if argCheckUnicode == u'6':
return True
#end
# No.24
# 説明:0x37: 数字の7。
# 0x37 (Windows-31J)
# 0x0037 (UTF-16BE)
if argCheckUnicode == u'7':
return True
#end
# No.25
# 説明:0x38: 数字の8。
# 0x38 (Windows-31J)
# 0x0038 (UTF-16BE)
if argCheckUnicode == u'8':
return True
#end
# No.26
# 説明:0x39: 数字の9。
# 0x39 (Windows-31J)
# 0x0039 (UTF-16BE)
if argCheckUnicode == u'9':
return True
#end
# No.27
# 説明:0x3a: コロン。
# 0x3a (Windows-31J)
# 0x003a (UTF-16BE)
if argCheckUnicode == u':':
return True
#end
# No.28
# 説明:0x3b: セミコロン。
# 0x3b (Windows-31J)
# 0x003b (UTF-16BE)
if argCheckUnicode == u';':
return True
#end
# No.29
# 0x3c (Windows-31J)
# 0x003c (UTF-16BE)
if argCheckUnicode == u'<':
return True
#end
# No.30
# 0x3d (Windows-31J)
# 0x003d (UTF-16BE)
if argCheckUnicode == u'=':
return True
#end
# No.31
# 0x3e (Windows-31J)
# 0x003e (UTF-16BE)
if argCheckUnicode == u'>':
return True
#end
# No.32
# 説明:0x3f
# 0x3f (Windows-31J)
# 0x003f (UTF-16BE)
if argCheckUnicode == u'?':
return True
#end
# No.33
# 説明:0x40: ISO646-USでも@。
# 0x40 (Windows-31J)
# 0x0040 (UTF-16BE)
if argCheckUnicode == u'@':
return True
#end
# No.34
# 説明:0x41: アルファベットのA
# 0x41 (Windows-31J)
# 0x0041 (UTF-16BE)
if argCheckUnicode == u'A':
return True
#end
# No.35
# 0x42 (Windows-31J)
# 0x0042 (UTF-16BE)
if argCheckUnicode == u'B':
return True
#end
# No.36
# 0x43 (Windows-31J)
# 0x0043 (UTF-16BE)
if argCheckUnicode == u'C':
return True
#end
# No.37
# 0x44 (Windows-31J)
# 0x0044 (UTF-16BE)
if argCheckUnicode == u'D':
return True
#end
# No.38
# 0x45 (Windows-31J)
# 0x0045 (UTF-16BE)
if argCheckUnicode == u'E':
return True
#end
# No.39
# 0x46 (Windows-31J)
# 0x0046 (UTF-16BE)
if argCheckUnicode == u'F':
return True
#end
# No.40
# 0x47 (Windows-31J)
# 0x0047 (UTF-16BE)
if argCheckUnicode == u'G':
return True
#end
# No.41
# 0x48 (Windows-31J)
# 0x0048 (UTF-16BE)
if argCheckUnicode == u'H':
return True
#end
# No.42
# 0x49 (Windows-31J)
# 0x0049 (UTF-16BE)
if argCheckUnicode == u'I':
return True
#end
# No.43
# 0x4a (Windows-31J)
# 0x004a (UTF-16BE)
if argCheckUnicode == u'J':
return True
#end
# No.44
# 0x4b (Windows-31J)
# 0x004b (UTF-16BE)
if argCheckUnicode == u'K':
return True
#end
# No.45
# 0x4c (Windows-31J)
# 0x004c (UTF-16BE)
if argCheckUnicode == u'L':
return True
#end
# No.46
# 0x4d (Windows-31J)
# 0x004d (UTF-16BE)
if argCheckUnicode == u'M':
return True
#end
# No.47
# 0x4e (Windows-31J)
# 0x004e (UTF-16BE)
if argCheckUnicode == u'N':
return True
#end
# No.48
# 0x4f (Windows-31J)
# 0x004f (UTF-16BE)
if argCheckUnicode == u'O':
return True
#end
# No.49
# 0x50 (Windows-31J)
# 0x0050 (UTF-16BE)
if argCheckUnicode == u'P':
return True
#end
# No.50
# 0x51 (Windows-31J)
# 0x0051 (UTF-16BE)
if argCheckUnicode == u'Q':
return True
#end
# No.51
# 0x52 (Windows-31J)
# 0x0052 (UTF-16BE)
if argCheckUnicode == u'R':
return True
#end
# No.52
# 0x53 (Windows-31J)
# 0x0053 (UTF-16BE)
if argCheckUnicode == u'S':
return True
#end
# No.53
# 0x54 (Windows-31J)
# 0x0054 (UTF-16BE)
if argCheckUnicode == u'T':
return True
#end
# No.54
# 0x55 (Windows-31J)
# 0x0055 (UTF-16BE)
if argCheckUnicode == u'U':
return True
#end
# No.55
# 0x56 (Windows-31J)
# 0x0056 (UTF-16BE)
if argCheckUnicode == u'V':
return True
#end
# No.56
# 0x57 (Windows-31J)
# 0x0057 (UTF-16BE)
if argCheckUnicode == u'W':
return True
#end
# No.57
# 0x58 (Windows-31J)
# 0x0058 (UTF-16BE)
if argCheckUnicode == u'X':
return True
#end
# No.58
# 0x59 (Windows-31J)
# 0x0059 (UTF-16BE)
if argCheckUnicode == u'Y':
return True
#end
# No.59
# 説明:0x5a: アルファベットのZ。
# 0x5a (Windows-31J)
# 0x005a (UTF-16BE)
if argCheckUnicode == u'Z':
return True
#end
# No.60
# 説明:0x5b: ISO646-USでも[。
# 0x5b (Windows-31J)
# 0x005b (UTF-16BE)
if argCheckUnicode == u'[':
return True
#end
# No.61
# 説明:0x5c: ISO646-USでは\。ISO646-JP (JIS X 0201)では¥。
# 0x5c (Windows-31J)
# 0x005c (UTF-16BE)
if argCheckUnicode == u'\\':
return True
#end
# No.62
# 説明:0x5d: ISO646-USでも]。
# 0x5d (Windows-31J)
# 0x005d (UTF-16BE)
if argCheckUnicode == u']':
return True
#end
# No.63
# 説明:0x5e: ISO646-USでも^。
# 0x5e (Windows-31J)
# 0x005e (UTF-16BE)
if argCheckUnicode == u'^':
return True
#end
# No.64
# 説明:0x5f
# 0x5f (Windows-31J)
# 0x005f (UTF-16BE)
if argCheckUnicode == u'_':
return True
#end
# No.65
# 説明:0x60: ISO646-USでも`。
# 0x60 (Windows-31J)
# 0x0060 (UTF-16BE)
if argCheckUnicode == u'`':
return True
#end
# No.66
# 説明:0x61: アルファベットのa。
# 0x61 (Windows-31J)
# 0x0061 (UTF-16BE)
if argCheckUnicode == u'a':
return True
#end
# No.67
# 0x62 (Windows-31J)
# 0x0062 (UTF-16BE)
if argCheckUnicode == u'b':
return True
#end
# No.68
# 0x63 (Windows-31J)
# 0x0063 (UTF-16BE)
if argCheckUnicode == u'c':
return True
#end
# No.69
# 0x64 (Windows-31J)
# 0x0064 (UTF-16BE)
if argCheckUnicode == u'd':
return True
#end
# No.70
# 0x65 (Windows-31J)
# 0x0065 (UTF-16BE)
if argCheckUnicode == u'e':
return True
#end
# No.71
# 0x66 (Windows-31J)
# 0x0066 (UTF-16BE)
if argCheckUnicode == u'f':
return True
#end
# No.72
# 0x67 (Windows-31J)
# 0x0067 (UTF-16BE)
if argCheckUnicode == u'g':
return True
#end
# No.73
# 0x68 (Windows-31J)
# 0x0068 (UTF-16BE)
if argCheckUnicode == u'h':
return True
#end
# No.74
# 0x69 (Windows-31J)
# 0x0069 (UTF-16BE)
if argCheckUnicode == u'i':
return True
#end
# No.75
# 0x6a (Windows-31J)
# 0x006a (UTF-16BE)
if argCheckUnicode == u'j':
return True
#end
# No.76
# 0x6b (Windows-31J)
# 0x006b (UTF-16BE)
if argCheckUnicode == u'k':
return True
#end
# No.77
# 0x6c (Windows-31J)
# 0x006c (UTF-16BE)
if argCheckUnicode == u'l':
return True
#end
# No.78
# 0x6d (Windows-31J)
# 0x006d (UTF-16BE)
if argCheckUnicode == u'm':
return True
#end
# No.79
# 0x6e (Windows-31J)
# 0x006e (UTF-16BE)
if argCheckUnicode == u'n':
return True
#end
# No.80
# 0x6f (Windows-31J)
# 0x006f (UTF-16BE)
if argCheckUnicode == u'o':
return True
#end
# No.81
# 0x70 (Windows-31J)
# 0x0070 (UTF-16BE)
if argCheckUnicode == u'p':
return True
#end
# No.82
# 0x71 (Windows-31J)
# 0x0071 (UTF-16BE)
if argCheckUnicode == u'q':
return True
#end
# No.83
# 0x72 (Windows-31J)
# 0x0072 (UTF-16BE)
if argCheckUnicode == u'r':
return True
#end
# No.84
# 0x73 (Windows-31J)
# 0x0073 (UTF-16BE)
if argCheckUnicode == u's':
return True
#end
# No.85
# 0x74 (Windows-31J)
# 0x0074 (UTF-16BE)
if argCheckUnicode == u't':
return True
#end
# No.86
# 0x75 (Windows-31J)
# 0x0075 (UTF-16BE)
if argCheckUnicode == u'u':
return True
#end
# No.87
# 0x76 (Windows-31J)
# 0x0076 (UTF-16BE)
if argCheckUnicode == u'v':
return True
#end
# No.88
# 0x77 (Windows-31J)
# 0x0077 (UTF-16BE)
if argCheckUnicode == u'w':
return True
#end
# No.89
# 0x78 (Windows-31J)
# 0x0078 (UTF-16BE)
if argCheckUnicode == u'x':
return True
#end
# No.90
# 説明:0x79
# 0x79 (Windows-31J)
# 0x0079 (UTF-16BE)
if argCheckUnicode == u'y':
return True
#end
# No.91
# 説明:0x7a: アルファベットのz。
# 0x7a (Windows-31J)
# 0x007a (UTF-16BE)
if argCheckUnicode == u'z':
return True
#end
# No.92
# 説明:0x7b: ISO646-USでも{。
# 0x7b (Windows-31J)
# 0x007b (UTF-16BE)
if argCheckUnicode == u'{':
return True
#end
# No.93
# 説明:0x7c: ISO646-USでも|。
# 0x7c (Windows-31J)
# 0x007c (UTF-16BE)
if argCheckUnicode == u'|':
return True
#end
# No.94
# 説明:0x7d: ISO646-USでも}。
# 0x7d (Windows-31J)
# 0x007d (UTF-16BE)
if argCheckUnicode == u'}':
return True
#end
# No.95
# 説明:0x7e: ISO646-USでは〜。ISO646-JP (JIS X 0201)では ̄。
# 0x7e (Windows-31J)
# 0x007e (UTF-16BE)
if argCheckUnicode == u'~':
return True
#end
# No.97
# 説明:0xa1: 半角カタカナ開始。
# 0xa1 (Windows-31J)
# 0xff61 (UTF-16BE)
if argCheckUnicode == u'。':
return True
#end
# No.98
# 説明:0xa2:
# 0xa2 (Windows-31J)
# 0xff62 (UTF-16BE)
if argCheckUnicode == u'「':
return True
#end
# No.99
# 説明:0xa3:
# 0xa3 (Windows-31J)
# 0xff63 (UTF-16BE)
if argCheckUnicode == u'」':
return True
#end
# No.100
# 説明:0xa4:
# 0xa4 (Windows-31J)
# 0xff64 (UTF-16BE)
if argCheckUnicode == u'、':
return True
#end
# No.101
# 説明:0xa5: 中黒(なかぐろ)
# 0xa5 (Windows-31J)
# 0xff65 (UTF-16BE)
if argCheckUnicode == u'・':
return True
#end
# No.102
# 説明:0xa6: カタカナのヲ
# 0xa6 (Windows-31J)
# 0xff66 (UTF-16BE)
if argCheckUnicode == u'ヲ':
return True
#end
# No.103
# 説明:0xa7:
# 0xa7 (Windows-31J)
# 0xff67 (UTF-16BE)
if argCheckUnicode == u'ァ':
return True
#end
# No.104
# 0xa8 (Windows-31J)
# 0xff68 (UTF-16BE)
if argCheckUnicode == u'ィ':
return True
#end
# No.105
# 0xa9 (Windows-31J)
# 0xff69 (UTF-16BE)
if argCheckUnicode == u'ゥ':
return True
#end
# No.106
# 0xaa (Windows-31J)
# 0xff6a (UTF-16BE)
if argCheckUnicode == u'ェ':
return True
#end
# No.107
# 0xab (Windows-31J)
# 0xff6b (UTF-16BE)
if argCheckUnicode == u'ォ':
return True
#end
# No.108
# 0xac (Windows-31J)
# 0xff6c (UTF-16BE)
if argCheckUnicode == u'ャ':
return True
#end
# No.109
# 0xad (Windows-31J)
# 0xff6d (UTF-16BE)
if argCheckUnicode == u'ュ':
return True
#end
# No.110
# 0xae (Windows-31J)
# 0xff6e (UTF-16BE)
if argCheckUnicode == u'ョ':
return True
#end
# No.111
# 0xaf (Windows-31J)
# 0xff6f (UTF-16BE)
if argCheckUnicode == u'ッ':
return True
#end
# No.112
# 0xb0 (Windows-31J)
# 0xff70 (UTF-16BE)
if argCheckUnicode == u'ー':
return True
#end
# No.113
# 0xb1 (Windows-31J)
# 0xff71 (UTF-16BE)
if argCheckUnicode == u'ア':
return True
#end
# No.114
# 0xb2 (Windows-31J)
# 0xff72 (UTF-16BE)
if argCheckUnicode == u'イ':
return True
#end
# No.115
# 0xb3 (Windows-31J)
# 0xff73 (UTF-16BE)
if argCheckUnicode == u'ウ':
return True
#end
# No.116
# 0xb4 (Windows-31J)
# 0xff74 (UTF-16BE)
if argCheckUnicode == u'エ':
return True
#end
# No.117
# 0xb5 (Windows-31J)
# 0xff75 (UTF-16BE)
if argCheckUnicode == u'オ':
return True
#end
# No.118
# 0xb6 (Windows-31J)
# 0xff76 (UTF-16BE)
if argCheckUnicode == u'カ':
return True
#end
# No.119
# 0xb7 (Windows-31J)
# 0xff77 (UTF-16BE)
if argCheckUnicode == u'キ':
return True
#end
# No.120
# 0xb8 (Windows-31J)
# 0xff78 (UTF-16BE)
if argCheckUnicode == u'ク':
return True
#end
# No.121
# 0xb9 (Windows-31J)
# 0xff79 (UTF-16BE)
if argCheckUnicode == u'ケ':
return True
#end
# No.122
# 0xba (Windows-31J)
# 0xff7a (UTF-16BE)
if argCheckUnicode == u'コ':
return True
#end
# No.123
# 0xbb (Windows-31J)
# 0xff7b (UTF-16BE)
if argCheckUnicode == u'サ':
return True
#end
# No.124
# 0xbc (Windows-31J)
# 0xff7c (UTF-16BE)
if argCheckUnicode == u'シ':
return True
#end
# No.125
# 0xbd (Windows-31J)
# 0xff7d (UTF-16BE)
if argCheckUnicode == u'ス':
return True
#end
# No.126
# 0xbe (Windows-31J)
# 0xff7e (UTF-16BE)
if argCheckUnicode == u'セ':
return True
#end
# No.127
# 0xbf (Windows-31J)
# 0xff7f (UTF-16BE)
if argCheckUnicode == u'ソ':
return True
#end
# No.128
# 0xc0 (Windows-31J)
# 0xff80 (UTF-16BE)
if argCheckUnicode == u'タ':
return True
#end
# No.129
# 0xc1 (Windows-31J)
# 0xff81 (UTF-16BE)
if argCheckUnicode == u'チ':
return True
#end
# No.130
# 0xc2 (Windows-31J)
# 0xff82 (UTF-16BE)
if argCheckUnicode == u'ツ':
return True
#end
# No.131
# 0xc3 (Windows-31J)
# 0xff83 (UTF-16BE)
if argCheckUnicode == u'テ':
return True
#end
# No.132
# 0xc4 (Windows-31J)
# 0xff84 (UTF-16BE)
if argCheckUnicode == u'ト':
return True
#end
# No.133
# 0xc5 (Windows-31J)
# 0xff85 (UTF-16BE)
if argCheckUnicode == u'ナ':
return True
#end
# No.134
# 0xc6 (Windows-31J)
# 0xff86 (UTF-16BE)
if argCheckUnicode == u'ニ':
return True
#end
# No.135
# 0xc7 (Windows-31J)
# 0xff87 (UTF-16BE)
if argCheckUnicode == u'ヌ':
return True
#end
# No.136
# 0xc8 (Windows-31J)
# 0xff88 (UTF-16BE)
if argCheckUnicode == u'ネ':
return True
#end
# No.137
# 0xc9 (Windows-31J)
# 0xff89 (UTF-16BE)
if argCheckUnicode == u'ノ':
return True
#end
# No.138
# 0xca (Windows-31J)
# 0xff8a (UTF-16BE)
if argCheckUnicode == u'ハ':
return True
#end
# No.139
# 0xcb (Windows-31J)
# 0xff8b (UTF-16BE)
if argCheckUnicode == u'ヒ':
return True
#end
# No.140
# 0xcc (Windows-31J)
# 0xff8c (UTF-16BE)
if argCheckUnicode == u'フ':
return True
#end
# No.141
# 0xcd (Windows-31J)
# 0xff8d (UTF-16BE)
if argCheckUnicode == u'ヘ':
return True
#end
# No.142
# 0xce (Windows-31J)
# 0xff8e (UTF-16BE)
if argCheckUnicode == u'ホ':
return True
#end
# No.143
# 0xcf (Windows-31J)
# 0xff8f (UTF-16BE)
if argCheckUnicode == u'マ':
return True
#end
# No.144
# 0xd0 (Windows-31J)
# 0xff90 (UTF-16BE)
if argCheckUnicode == u'ミ':
return True
#end
# No.145
# 0xd1 (Windows-31J)
# 0xff91 (UTF-16BE)
if argCheckUnicode == u'ム':
return True
#end
# No.146
# 0xd2 (Windows-31J)
# 0xff92 (UTF-16BE)
if argCheckUnicode == u'メ':
return True
#end
# No.147
# 0xd3 (Windows-31J)
# 0xff93 (UTF-16BE)
if argCheckUnicode == u'モ':
return True
#end
# No.148
# 0xd4 (Windows-31J)
# 0xff94 (UTF-16BE)
if argCheckUnicode == u'ヤ':
return True
#end
# No.149
# 0xd5 (Windows-31J)
# 0xff95 (UTF-16BE)
if argCheckUnicode == u'ユ':
return True
#end
# No.150
# 0xd6 (Windows-31J)
# 0xff96 (UTF-16BE)
if argCheckUnicode == u'ヨ':
return True
#end
# No.151
# 0xd7 (Windows-31J)
# 0xff97 (UTF-16BE)
if argCheckUnicode == u'ラ':
return True
#end
# No.152
# 0xd8 (Windows-31J)
# 0xff98 (UTF-16BE)
if argCheckUnicode == u'リ':
return True
#end
# No.153
# 0xd9 (Windows-31J)
# 0xff99 (UTF-16BE)
if argCheckUnicode == u'ル':
return True
#end
# No.154
# 0xda (Windows-31J)
# 0xff9a (UTF-16BE)
if argCheckUnicode == u'レ':
return True
#end
# No.155
# 0xdb (Windows-31J)
# 0xff9b (UTF-16BE)
if argCheckUnicode == u'ロ':
return True
#end
# No.156
# 0xdc (Windows-31J)
# 0xff9c (UTF-16BE)
if argCheckUnicode == u'ワ':
return True
#end
# No.157
# 0xdd (Windows-31J)
# 0xff9d (UTF-16BE)
if argCheckUnicode == u'ン':
return True
#end
# No.158
# 0xde (Windows-31J)
# 0xff9e (UTF-16BE)
if argCheckUnicode == u'゙':
return True
#end
# No.159
# 説明:0xdf: 半角カタカナの○。終了。
# 0xdf (Windows-31J)
# 0xff9f (UTF-16BE)
if argCheckUnicode == u'゚':
return True
#end
return False
#end
def matchAll(self, argCheck):
"""与えられた文字列が、全て文字グループに含まれる文字であるかどうかを判定します。
self -- このメソッドを含むクラス自身。
argCheck -- チェックを行いたい文字列。
return -- 全ての文字が文字グループに含まれていればture。グループに含まれない文字が含まれていればfalse。
"""
if argCheck is None:
raise ValueError, "メソッド[matchAll]のパラメータ[argCheck]にnullが与えられました。しかし、このパラメータにnullを与えることはできません。"
#end
argCheckUnicode = unicode(argCheck, self.encoding)
for arg in argCheckUnicode:
if self.match(arg.encode(self.encoding)) == False:
return False
#end
#end
return True
#end
def matchAny(self, argCheck):
"""与えられた文字列が、文字グループに含まれる文字をひとつでも含んでいるかどうかを判定します。
self -- このメソッドを含むクラス自身。
argCheck -- チェックを行いたい文字列。
return -- 文字グループに含まれている文字をひとつでも含んでいればture。グループに含まれる文字をひとつも含まない場合はfalse。
"""
if argCheck is None:
raise ValueError, "メソッド[matchAny]のパラメータ[argCheck]にnullが与えられました。しかし、このパラメータにnullを与えることはできません。"
#end
argCheckUnicode = unicode(argCheck, self.encoding)
for arg in argCheckUnicode:
if self.match(arg.encode(self.encoding)):
return True
#end
#end
return False
#end
#end
|
"""
Python Wechaty - https://github.com/wechaty/python-wechaty
Authors: Huan LI (李卓桓) <https://github.com/huan>
Jingjing WU (吴京京) <https://github.com/wj-Mcat>
2018-now @copyright Wechaty
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import annotations
from typing import (
TYPE_CHECKING,
# overload,
# cast,
Optional,
)
from wechaty_puppet import get_logger # type: ignore
# pylint:disable=R0401
if TYPE_CHECKING:
from wechaty_puppet import Puppet # type: ignore
from .wechaty import Wechaty
log = get_logger('Accessory')
class Accessory:
"""
Translate the function from TypeScript to Python
See: https://github.com/wechaty/wechaty/blob/master/src/accessory.ts
"""
_puppet: Optional[Puppet] = None
_wechaty: Optional[Wechaty] = None
@classmethod
def set_puppet(cls, new_puppet: Puppet):
"""doc"""
if cls._puppet is not None:
raise AttributeError('can not set twice')
cls._puppet = new_puppet
@classmethod
def set_wechaty(cls, new_wechaty: Wechaty):
"""doc"""
if cls._wechaty is not None:
raise AttributeError('can not set twice')
cls._wechaty = new_wechaty
@classmethod
def get_puppet(cls) -> Puppet:
"""doc"""
if cls._puppet is None:
raise AttributeError('puppet not found')
return cls._puppet
@classmethod
def get_wechaty(cls) -> Wechaty:
"""doc"""
if cls._wechaty is None:
raise AttributeError('wechaty not found')
return cls._wechaty
@property
def puppet(self) -> Puppet:
"""doc"""
if self._puppet is None:
raise AttributeError('puppet not set')
return self._puppet
@property
def wechaty(self) -> Wechaty:
"""
instance property
"""
if self._wechaty is None:
raise AttributeError('wechaty not set')
return self._wechaty
|
"""
CCT 建模优化代码
GPU 加速示例 track_multi_particle_beamline_for_magnet_with_multi_qs 2
作者:赵润晓
日期:2021年5月6日
"""
# 因为要使用父目录的 cctpy 所以加入
from os import error, path
import sys
sys.path.append(path.dirname(path.abspath(path.dirname(__file__))))
from hust_sc_gantry import HUST_SC_GANTRY
from cctpy import *
ga32 = GPU_ACCELERATOR(float_number_type=GPU_ACCELERATOR.FLOAT32)
ga64 = GPU_ACCELERATOR(float_number_type=GPU_ACCELERATOR.FLOAT64,block_dim_x=512)
# ---- track_multi_particle_beamline_for_magnet_with_multi_qs -----
# 多个 qs 测试
bl1 = HUST_SC_GANTRY().create_first_bending_part_beamline()
bl2 = HUST_SC_GANTRY(agcct12_current=-3000).create_first_bending_part_beamline()
p1 = ParticleFactory.create_proton_along(bl1,kinetic_MeV=215)
p2 = ParticleFactory.create_proton_along(bl1,kinetic_MeV=220)
p11_cpu,p21_cpu = p1.copy(),p2.copy()
p12_cpu,p22_cpu = p1.copy(),p2.copy()
p1_gpu32,p2_gpu32 = p1.copy(),p2.copy()
print("track_multi_particle_beamline_for_magnet_with_multi_qs")
footstep=100*MM
ParticleRunner.run_only([p11_cpu,p21_cpu],bl1,bl1.get_length(),footstep)
ParticleRunner.run_only([p12_cpu,p22_cpu],bl2,bl2.get_length(),footstep)
print(p11_cpu.detailed_info())
print(p21_cpu.detailed_info())
print(p12_cpu.detailed_info())
print(p22_cpu.detailed_info())
pll = ga64.track_multi_particle_beamline_for_magnet_with_multi_qs(
[bl1,bl2],[p1_gpu32,p2_gpu32],bl1.get_length(),footstep
)
print(pll[0][0].detailed_info())
print(pll[0][1].detailed_info())
print(pll[1][0].detailed_info())
print(pll[1][1].detailed_info())
print((p11_cpu-pll[0][0]).detailed_info())
print((p21_cpu-pll[0][1]).detailed_info())
print((p12_cpu-pll[1][0]).detailed_info())
print((p22_cpu-pll[1][1]).detailed_info())
# Particle[p=(3.687315812380205, 1.548315945537494, -0.003352065021200123), v=(119474899.55705348, 126923892.97270872, -352485.58348381834)], rm=2.0558942080656965e-27, e=1.6021766208e-19, speed=174317774.94179922, distance=4.149802255227576]
# Particle[p=(3.6902588367117777, 1.5457564023956827, -0.003130092145109502), v=(121103380.25921707, 127398432.4832374, -329143.847303274)], rm=2.0648075176021083e-27, e=1.6021766208e-19, speed=175781619.95982552, distance=4.149802255227576]
# Particle[p=(3.687478027359044, 1.548166411708122, -0.0039737849740400085), v=(119517094.38436584, 126885165.8237462, -432788.91453453223)], rm=2.0558942080656965e-27, e=1.6021766208e-19, speed=174317774.94179922, distance=4.149802255227576]
# Particle[p=(3.6903875694662838, 1.545650405905456, -0.0037170902813668744), v=(121140927.39202842, 127363741.12784411, -405102.59567924944)], rm=2.0648075176021083e-27, e=1.6021766208e-19, speed=175781619.95982552, distance=4.149802255227576]
# Particle[p=(3.687315812380205, 1.5483159455374929, -0.0033520650212005175), v=(119474899.55705343, 126923892.97270869, -352485.58348386886)], rm=2.0558942080656965e-27, e=1.6021766208e-19, speed=174317774.94179922, distance=4.149802255227576]
# Particle[p=(3.6902588367117777, 1.5457564023956827, -0.0031300921451098366), v=(121103380.25921713, 127398432.4832374, -329143.8473033173)], rm=2.0648075176021083e-27, e=1.6021766208e-19, speed=175781619.95982552, distance=4.149802255227576]
# Particle[p=(3.6874780273590444, 1.5481664117081209, -0.003973784974042229), v=(119517094.3843659, 126885165.82374611, -432788.91453478823)], rm=2.0558942080656965e-27, e=1.6021766208e-19, speed=174317774.94179922, distance=4.149802255227576]
# Particle[p=(3.6903875694662838, 1.545650405905456, -0.0037170902813662204), v=(121140927.39202844, 127363741.12784408, -405102.59567917266)], rm=2.0648075176021083e-27, e=1.6021766208e-19, speed=175781619.95982552, distance=4.149802255227576]
# Particle[p=(0.0, 1.1102230246251565e-15, 3.946495907847236e-16), v=(4.470348358154297e-08, 2.9802322387695312e-08, 5.052424967288971e-08)], rm=0.0, e=0.0, speed=0.0, distance=0.0]
# Particle[p=(0.0, 0.0, 3.3480163086352377e-16), v=(-5.960464477539063e-08, 0.0, 4.330649971961975e-08)], rm=0.0, e=0.0, speed=0.0, distance=0.0]
# Particle[p=(-4.440892098500626e-16, 1.1102230246251565e-15, 2.220446049250313e-15), v=(-5.960464477539063e-08, 8.940696716308594e-08, 2.5599729269742966e-07)], rm=0.0, e=0.0, speed=0.0, distance=0.0]
# Particle[p=(0.0, 0.0, -6.539907504432563e-16), v=(-1.4901161193847656e-08, 2.9802322387695312e-08, -7.677590474486351e-08)], rm=0.0, e=0.0, speed=0.0, distance=0.0] |
from database.models import db_session, Base, Department, Employee
from sqlalchemy import create_engine
engine = create_engine('sqlite:///database.sqlite3', convert_unicode=True)
Base.metadata.create_all(bind=engine)
# Fill the tables with some data
# engineering = Department(name='Engineering')
# db_session.add(engineering)
hr = Department(name='Human Resources')
# db_session.add(hr)
#
# peter = Employee(name='Peter', department=engineering)
# db_session.add(peter)
# roy = Employee(name='Roy', department=engineering)
# db_session.add(roy)
# tracy = Employee(name='Tracy', department=hr)
# db_session.add(tracy)
tracy = Employee(name='Srinivasa Modalavalasa', department=hr)
db_session.add(tracy)
tracy = Employee(name='Venkata Vangi Varapu', department=hr)
db_session.add(tracy)
tracy = Employee(name='Jagarlapudi Channamma', department=hr)
db_session.add(tracy)
db_session.commit()
|
def power(first_number, second_number):
return first_number ** second_number
def mod_power(first_number, second_number,mod):
return first_number**second_number%mod
first_number = int(input())
second_number = int(input())
mod = int(input())
print(power(first_number, second_number))
print(mod_power(first_number, second_number, mod))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 23 10:20:31 2020
@author: aureoleday
"""
import matplotlib.pyplot as plt
# from scipy import signal
import scipy.signal as signal
import numpy as np
FS = 4e3
nyq = FS/2
T = 1/FS
filt_ord = 31
FC = 0.06*nyq
b = signal.firwin(filt_ord,cutoff=FC/nyq,window=("kaiser",8),pass_zero='highpass')
w, h = signal.freqz(b)
fig, ax1 = plt.subplots()
ax1.set_title('Digital filter frequency response')
ax1.plot(w, 20 * np.log10(abs(h)), 'b')
ax1.set_ylabel('Amplitude [dB]', color='b')
ax1.set_xlabel('Frequency [rad/sample]')
ax2 = ax1.twinx()
angles = np.unwrap(np.angle(h))
ax2.plot(w, angles, 'g')
ax2.set_ylabel('Angle (radians)', color='g')
ax2.grid()
ax2.axis('tight')
plt.show()
c = np.array(list(map(int,b*(2**17))))
print(c)
|
from collections import deque
from itertools import zip_longest
from math import isclose
from typing import List
from leetcode import TreeNode, test, new_tree
def average_of_levels(root: TreeNode) -> List[float]:
if not root:
return []
queue = deque()
queue.append(root)
result = []
while queue:
level_length = len(queue)
level_sum = 0
for _ in range(level_length):
node = queue.popleft()
level_sum += node.val
if node.left:
queue.append(node.left)
if node.right:
queue.append(node.right)
result.append(level_sum / level_length)
return result
test(
average_of_levels,
[
(new_tree(3, 9, 20, None, None, 15, 7), [3, 14.5, 11]),
],
equals_func=lambda lhs, rhs: all(
isclose(left, right)
for left, right in zip_longest(lhs, rhs, fillvalue=float("nan"))
),
)
|
import random
from game import constants
from game.actor import Actor
from game.location import Location
class Word(Actor):
'''This class keeps track of the score value for each wod, and resets the screen
when the word is guessed correctly
_score_value (int): how much each word is worth'''
def __init__(self):
super().__init__()
self.reset()
self._score_value = 0
def reset(self):
position = Location(random.randint(1,constants.MAX_X), random.randint(1,constants.MAX_Y))
self.set_position(position)
def get_score(self, _score_value):
_score_value = len(self._text)
return _score_value
|
import os
from subprocess import call
import sys
__location__ = os.path.realpath(os.path.join(os.getcwd(),
os.path.dirname(__file__)))
path0 = os.path.join(__location__, 'GUIS/MainPage.py')
call([sys.executable, path0])
|
from nltk.corpus import wordnet
syns=wordnet.synsets("innocent")
print(syns) # a list of Synsets with part of speech, eg:- Synset(good.n.01) first noun
print(syns[1].lemmas()) # [Lemma('good.n.02.good'), Lemma('good.n.02.goodness')]
for i in syns:
print(i.name()) # good.n.01
for l in i.lemmas(): # synonyms list
print("synonym : ",l.name()) # synonyms eg:- good, goodness
if l.antonyms():
k=l.antonyms() # antonym list
for j in k:
print("antonym : ",j.name()) # antonym
print(i.definition()) # definition
print(i.examples()) # example usage
print("\n")
w1=wordnet.synset("computer.n.01")
w2=wordnet.synset("pc.n.01")
print(w1.wup_similarity(w2)) # generates semantic similarity between two words
|
from Veiculo import Veiculo
class Moto(Veiculo):
def __init__(self):
self.cilindrados = None |
from django.apps import AppConfig
import numpy as np # To work with arrays
import cv2
from django.urls import path, include
class BackgroundConfig(AppConfig):
name = 'background'
#MODEL_PATH = path("model")
#BERT_PRETRAINED_PATH = path("model/uncased_L-12_H-768_A-12/")
#ABEL_PATH = Path("label/")
#predictor = background.get_colors(get_image('img'), 5)
#get_colors(get_image('sample_image.jpg'), 5)
#BertClassificationPredictor(model_path = MODEL_PATH/"multilabel-emotion-color-suggestion.bin",
#pretrained_path = BERT_PRETRAINED_PATH,
#label_path = LABEL_PATH, multi_label=True) |
# This script shows how to get all connected faces of a bmesh
# There are two versions of the function. a simple one
# and one that can take angle, material and large meshes into account.
import bpy
import bmesh
# Simple - get all linked faces
def get_linked_faces(f):
if f.tag:
# If the face is already tagged, return empty list
return []
# Add the face to list that will be returned
f_linked = [f]
f.tag = True
# Select edges that link two faces
edges = [e for e in f.edges if len(e.link_faces) == 2]
for e in edges:
# Select all firs-degree linked faces, that are not yet tagged
faces = [elem for elem in e.link_faces if not elem.tag]
# Recursively call this function on all connected faces
if not len(faces) == 0:
for elem in faces:
# Extend the list with second-degree connected faces
f_linked.extend(get_linked_faces(elem))
return f_linked
""" Get linked faces
Args:
f ('BMFace') - the current face to check for linked faces
Kwargs:
stack ('int') - the current recursive stack count.
max_angle ('float') - the maximum angle for connected faces in radian
Returns:
The connected faces by max angle threshold, material-id,
without exceeding max stack trace
"""
def get_linked_faces(f, stack=0, max_angle=3.1416, match_material=False):
# Fixme: Find non-recursive alternative
# Make pretty
if f.tag:
return []
f_linked = [f]
m_idx = f.material_index
f.tag = True
# Select edges that link two faces
edges = [e for e in f.edges if len(e.link_faces) == 2]
for e in edges:
faces = [elem for elem in e.link_faces if not elem.tag]
if not len(faces) == 0:
angle = e.calc_face_angle_signed()
if angle <= max_angle:
if match_material:
for elem in faces:
if f.material_index is m_idx:
# Recursive
if stack < 900:
f_linked.extend(get_linked_faces(elem, stack=stack + 1, max_angle=max_angle, match_material=match_material))
else:
print('Stopped recursive call, else it might exceed maximum stack count.')
else:
for elem in faces:
# Recursive
if stack < 900:
f_linked.extend(get_linked_faces(elem, stack=stack + 1, max_angle=max_angle, match_material=match_material))
else:
print('Stopped recursive call, else it might exceed maximum stack count.')
return f_linked
def main():
obj = bpy.context.selected_objects[0]
# change mode to editmode
bpy.ops.object.mode_set(mode='EDIT')
bm = bmesh.from_edit_mesh(obj.data)
bm.faces.ensure_lookup_table()
linked_faces = get_linked_faces(bm.faces[0])
print(linked_faces)
# Do something with the linked faces ...
bm.free()
main()
|
import cv2 as cv
import numpy as np
import matplotlib.pyplot as plt
import sys
def binarization(img, smooth):
#Fuck pep 8
segment = cv.adaptiveThreshold(smooth, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 21, 5)
return (segment)
def erosion(img, core):
erosion = cv.erode(img, core, iterations = 1)
return (erosion)
def dilation(img, core):
dilation = cv.dilate(img, core, iterations = 1)
return (dilation)
def shows(array):
out = np.vstack([
np.hstack(array[0]),
np.hstack(array[1])
])
#English because english it's cool
cv.imshow("Adaptative binarization", out)
cv.waitKey(0)
#Open image, convert to gray shades, apply the smooth atribute
img = cv.imread(sys.argv[1])
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
smooth = cv.GaussianBlur(img, (9,9), 0)#Smoothing the img
img_seg = binarization(img, smooth)
#Create core matrix
core = np.ones((5,5), np.uint8)
img_ero = erosion(img_seg, core)
img_dila = dilation(img_seg, core)
opening = dilation(img_ero, core)
closing = erosion(img_dila, core)
shows([[smooth, img_seg], [img_ero, img_dila]])
shows([[img_ero, img_dila],[closing, opening]])
|
# -*- coding: utf-8 -*-
import os, time, requests, urllib
from datetime import datetime
class default():
def __init__(self):
pass
def run(self):
now_time = datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S')
max_page = 30
keyword_ko = '마블 케이스'
keyword_en = urllib.parse.quote(keyword_ko)
store_name = '유니폰'
count = 0
for page in range(1,max_page,1):
count +=1
print(count)
url='https://search.shopping.naver.com/search/all.nhn?origQuery='+'%s' % keyword_en+'&pagingIndex='+'%s' % page +'&pagingSize=40&viewType=list&sort=rel&frm=NVSHPAG&query=%EB%A7%88%EB%B8%94%20%EC%BC%80%EC%9D%B4%EC%8A%A4'
payload = {'key1': 'value1', 'key2': 'value2'}
r = requests.post(url, data=payload)
result = r.text
#enc_result = result.encode('utf8')
#enc_result = enc_result.decode()
#if enc_result.find('%s' % store_name) > -1:
if result.find('%s' % store_name) > -1:
print('%s 키워드: %s page : %s\n' % (now_time,keyword_ko,page))
file=open('ranking.txt','a')
print('rankin.txt make success')
file.write('%s 키워드: %s page : %s\n' % (now_time,keyword_ko,page))
file.close()
def main():
try:
DF = default()
DF.run()
except:
import traceback
traceback.print_exc()
if __name__ == "__main__":
stime = time.time()
main()
etime = time.time()
print(round(etime-stime,3),' eclapsed')
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import textwrap
from dataclasses import dataclass
from typing import Iterable, Mapping
from pants.backend.go.subsystems.golang import GolangSubsystem
from pants.backend.go.util_rules import goroot
from pants.backend.go.util_rules.goroot import GoRoot
from pants.core.util_rules.system_binaries import BashBinary
from pants.engine.env_vars import EnvironmentVars, EnvironmentVarsRequest
from pants.engine.fs import EMPTY_DIGEST, CreateDigest, Digest, FileContent, MergeDigests
from pants.engine.internals.selectors import Get, MultiGet
from pants.engine.process import Process, ProcessResult
from pants.engine.rules import collect_rules, rule
from pants.util.frozendict import FrozenDict
from pants.util.logging import LogLevel
@dataclass(frozen=True)
class GoSdkProcess:
command: tuple[str, ...]
description: str
env: FrozenDict[str, str]
input_digest: Digest
working_dir: str | None
output_files: tuple[str, ...]
output_directories: tuple[str, ...]
replace_sandbox_root_in_args: bool
def __init__(
self,
command: Iterable[str],
*,
description: str,
env: Mapping[str, str] | None = None,
input_digest: Digest = EMPTY_DIGEST,
working_dir: str | None = None,
output_files: Iterable[str] = (),
output_directories: Iterable[str] = (),
allow_downloads: bool = False,
replace_sandbox_root_in_args: bool = False,
) -> None:
object.__setattr__(self, "command", tuple(command))
object.__setattr__(self, "description", description)
object.__setattr__(
self,
"env",
(
FrozenDict(env or {})
if allow_downloads
else FrozenDict({**(env or {}), "GOPROXY": "off"})
),
)
object.__setattr__(self, "input_digest", input_digest)
object.__setattr__(self, "working_dir", working_dir)
object.__setattr__(self, "output_files", tuple(output_files))
object.__setattr__(self, "output_directories", tuple(output_directories))
object.__setattr__(self, "replace_sandbox_root_in_args", replace_sandbox_root_in_args)
@dataclass(frozen=True)
class GoSdkRunSetup:
digest: Digest
script: FileContent
CHDIR_ENV = "__PANTS_CHDIR_TO"
SANDBOX_ROOT_ENV = "__PANTS_REPLACE_SANDBOX_ROOT"
@rule
async def go_sdk_invoke_setup(goroot: GoRoot) -> GoSdkRunSetup:
# Note: The `go` tool requires GOPATH to be an absolute path which can only be resolved
# from within the execution sandbox. Thus, this code uses a bash script to be able to resolve
# absolute paths inside the sandbox.
go_run_script = FileContent(
"__run_go.sh",
textwrap.dedent(
f"""\
export GOROOT={goroot.path}
sandbox_root="$(/bin/pwd)"
export GOPATH="${{sandbox_root}}/gopath"
export GOCACHE="${{sandbox_root}}/cache"
/bin/mkdir -p "$GOPATH" "$GOCACHE"
if [ -n "${GoSdkRunSetup.CHDIR_ENV}" ]; then
cd "${GoSdkRunSetup.CHDIR_ENV}"
fi
if [ -n "${GoSdkRunSetup.SANDBOX_ROOT_ENV}" ]; then
export __PANTS_SANDBOX_ROOT__="$sandbox_root"
args=("${{@//__PANTS_SANDBOX_ROOT__/$sandbox_root}}")
set -- "${{args[@]}}"
fi
exec "{goroot.path}/bin/go" "$@"
"""
).encode("utf-8"),
)
digest = await Get(Digest, CreateDigest([go_run_script]))
return GoSdkRunSetup(digest, go_run_script)
@rule
async def setup_go_sdk_process(
request: GoSdkProcess,
go_sdk_run: GoSdkRunSetup,
bash: BashBinary,
golang_env_aware: GolangSubsystem.EnvironmentAware,
goroot: GoRoot,
) -> Process:
input_digest, env_vars = await MultiGet(
Get(Digest, MergeDigests([go_sdk_run.digest, request.input_digest])),
Get(
EnvironmentVars,
EnvironmentVarsRequest(golang_env_aware.env_vars_to_pass_to_subprocesses),
),
)
env = {
**env_vars,
**request.env,
GoSdkRunSetup.CHDIR_ENV: request.working_dir or "",
"__PANTS_GO_SDK_CACHE_KEY": f"{goroot.full_version}/{goroot.goos}/{goroot.goarch}",
}
if request.replace_sandbox_root_in_args:
env[GoSdkRunSetup.SANDBOX_ROOT_ENV] = "1"
# Disable the "coverage redesign" experiment on Go v1.20+ for now since Pants does not yet support it.
if goroot.is_compatible_version("1.20"):
exp_str = env.get("GOEXPERIMENT", "")
exp_fields = exp_str.split(",") if exp_str != "" else []
exp_fields = [exp for exp in exp_fields if exp != "coverageredesign"]
if "nocoverageredesign" not in exp_fields:
exp_fields.append("nocoverageredesign")
env["GOEXPERIMENT"] = ",".join(exp_fields)
return Process(
argv=[bash.path, go_sdk_run.script.path, *request.command],
env=env,
input_digest=input_digest,
description=request.description,
output_files=request.output_files,
output_directories=request.output_directories,
level=LogLevel.DEBUG,
)
@dataclass(frozen=True)
class GoSdkToolIDRequest:
tool_name: str
@dataclass(frozen=True)
class GoSdkToolIDResult:
tool_name: str
tool_id: str
@rule
async def compute_go_tool_id(request: GoSdkToolIDRequest) -> GoSdkToolIDResult:
result = await Get(
ProcessResult,
GoSdkProcess(
["tool", request.tool_name, "-V=full"],
description=f"Obtain tool ID for Go tool `{request.tool_name}`.",
),
)
return GoSdkToolIDResult(tool_name=request.tool_name, tool_id=result.stdout.decode().strip())
def rules():
return (*collect_rules(), *goroot.rules())
|
from PyPDF2 import PdfFileWriter, PdfFileReader, PdfFileMerger
from reportlab.pdfgen import canvas
import random
import string
import time
import os
import configparser
class water:
def __init__(self,inputFile="",watermarkDir="",config = "",info={}):
self.canvas = canvas.Canvas('temp.pdf')
self.srcDir = inputFile
self.wmDir = watermarkDir
self.width = 300
self.height = 500
self.info = info
self.out = ""
try:
self.config = configparser.ConfigParser()
self.config.read(config)
self.readDefaultConfig()
except:
print('Error: init')
def readDefaultConfig(self):
errPlace = ""
try:
c = self.config['Default']
errPlace = 'alpha'
self.canvas.globalAlpha = c['alpha']
errPlace = 'font'
self.canvas.setFont(c['Font'],int(c['FontSize']))
errPlace = 'fontRGB'
self.canvas.setFillColorRGB(float(c['FontColorR']),float(c['FontColorG']),float(c['FontColorB']))
errPlace = 'Page'
self.width = int(c['PageWidth'])
self.height = int(c['PageHeight'])
except:
print('Error: readDefaultConfig - ', errPlace)
def imgWM(self,\
x = -1,y = -1,\
w = 300, h = 300):
if x == -1:
x = random.randint(0,self.width)
if y == -1:
y = random.randint(0,self.width)
self.canvas.drawImage(self.wmDir, x, y, width = w, height = h, mask = 'auto')
def strWM(self,\
x = 0, y = 0,\
src = "",):
self.canvas.drawString(x,y,src)
def merge(self):
self.canvas.save()
watermark_obj = PdfFileReader('temp.pdf')
watermark_page = watermark_obj.getPage(0)
pdf_reader = PdfFileReader(self.srcDir)
pdf_writer = PdfFileWriter()
pdf_merger = PdfFileMerger()
for page in range(pdf_reader.getNumPages()):
page = pdf_reader.getPage(page)
page.mergePage(watermark_page)
pdf_writer.addPage(page)
filepath,fullflname = os.path.split(self.srcDir)
fname,ext = os.path.splitext(fullflname)
if filepath == ".":
filepath = "./"
output = filepath + "/"+ fname + "_out" + ext
self.out = fname + "_out" + ext
with open(output, 'wb') as out:
pdf_writer.write(out)
def do(self):
self.imgWM()
n = "oregonstate" + str(random.randint(0,1000000))
import hashlib
md5 = hashlib.md5()
s = self.info['name'] + str(int(time.time()) )
md5.update(s.encode('utf-8'))
self.strWM(30, 30, md5.hexdigest())
self.strWM(250, 750, self.info['title']+"_"+self.info['id'])
self.strWM(250, 30, self.info['inst'])
self.strWM(30, 750, str(int(time.time()) ))
self.merge()
return self.out
|
from . import api, utils, exceptions
BRACKET_URL = '/phase_group/'
VALID_BRACKET_PARAMS = ['sets', 'entrants']
def players(bracket_id, filter_response=True):
uri = BRACKET_URL + str(bracket_id)
response = api.get(uri, VALID_BRACKET_PARAMS)
if filter_response:
response = _filter_player_response(response)
return response
def sets(bracket_id, filter_response=True):
uri = BRACKET_URL + str(bracket_id)
response = api.get(uri, VALID_BRACKET_PARAMS)
if filter_response:
response = _filter_set_response(response)
return response
def sets_played_by_player(bracket_id, tag):
try:
tag = str(tag)
tag = tag.lower()
except:
msg = "Given player tag is not and cannot be converted into a string"
raise exceptions.ValidationError(msg)
uri = BRACKET_URL + str(bracket_id)
response = api.get(uri, VALID_BRACKET_PARAMS)
return _filter_sets_given_player(response, tag)
def _filter_sets_given_player(response, tag):
result_player = None
players = _filter_player_response(response)
for p in players:
if p['tag'].lower() == tag:
result_player = p
if result_player is None:
return []
player_sets = []
sets = _filter_set_response(response)
# grab sets the player was involved in
for _set in sets:
player_is_entrant1 = str(result_player['entrant_id']) == _set['entrant_1_id']
player_is_entrant2 = str(result_player['entrant_id']) == _set['entrant_2_id']
if player_is_entrant1 or player_is_entrant2:
_set['player_id'] = result_player['entrant_id']
if player_is_entrant1:
_set['opponent_id'] = int(_set['entrant_2_id'])
elif player_is_entrant2:
_set['opponent_id'] = int(_set['entrant_1_id'])
player_sets.append(_set)
# grab information about player's opponents
for item in player_sets:
for opponent in players:
if item['opponent_id'] == opponent['entrant_id']:
item['opponent_info'] = opponent
return {
'player': result_player,
'sets': player_sets
}
def _filter_player_response(response):
players = []
entrants = response['entities']['entrants']
for entrant in entrants:
player = _get_player_from_entrant(entrant)
players.append(player)
return players
def _filter_set_response(response):
entities = response.get('entities', None)
if entities is None:
return []
bracket_sets = response['entities'].get('sets', None)
if bracket_sets is None:
return []
groups = entities.get('groups', None)
if groups is None:
return []
is_final_bracket = _is_final_bracket(groups)
results_sets = []
for bracket_set in bracket_sets:
# don't return `projected` brackets
if 'preview' in str((bracket_set['id'])):
break
_set, success = _get_set_from_bracket(bracket_set, is_final_bracket)
if success:
results_sets.append(_set)
return results_sets
def _is_final_bracket(groups):
is_final_bracket = False
w_id = groups.get('winnersTargetPhaseId', None)
if w_id == 'None' or w_id is None:
is_final_bracket = True
return is_final_bracket
def _get_set_from_bracket(bracket_set, is_final_bracket):
# ignore bye sets
if bracket_set['entrant1Id'] is None or bracket_set['entrant2Id'] is None:
return None, False
# winner's id of `None` or loser's id of `None` means the set was not played
if bracket_set['winnerId'] is None or bracket_set['loserId'] is None:
return None, False
_set = {
'id': str(bracket_set['id']), # make all IDS ints?
'entrant_1_id': str(bracket_set['entrant1Id']),
'entrant_2_id': str(bracket_set['entrant2Id']),
'entrant_1_score': bracket_set['entrant1Score'],
'entrant_2_score': bracket_set['entrant2Score'],
'winner_id': str(bracket_set['winnerId']),
'loser_id': str(bracket_set['loserId']),
'full_round_text': bracket_set['fullRoundText'] if is_final_bracket else 'pools',
'medium_round_text': bracket_set['midRoundText'] if is_final_bracket else 'pools',
'short_round_text': bracket_set['shortRoundText'] if is_final_bracket else 'pools',
'bracket_id': str(bracket_set['phaseGroupId'])
}
return _set, True
def _get_player_from_entrant(entrant):
participant_id = str(entrant['participantIds'][0])
player_id = str(entrant['playerIds'][participant_id])
player_dict = entrant['mutations']['players'][player_id]
participant_dict = entrant['mutations']['participants'][participant_id]
return {
'entrant_id': entrant['id'],
'tag': player_dict['gamerTag'],
#'fname': utils.get_subfield(participant_dict, 'contactInfo', 'nameFirst'),
#'lname': utils.get_subfield(participant_dict, 'contactInfo', 'nameLast'),
'state': player_dict['state'],
'country': player_dict['country'],
'final_placement': entrant['finalPlacement'],
'seed': entrant['initialSeedNum']
}
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
def laugh():
print('hahaha')
|
# -*- coding: utf-8 -*-
# # 前処理・後処理の機能をまとめたファイル
"""
# dataset作成 #
pklデータの連結等
"""
import os
import re
import glob
import cv2
import pickle
import matplotlib as plt
import numpy as np
import random
from scipy import ndimage as ndi
from mlc.io import read_raw, vtk_to_numpy, shiftscale_uchar, numpy_to_vtk,write_raw
from PIL import Image
from mlc.function import make_pickle,stop,check_existfile,concatenate_pkl,Read_Parameter
## メインの関数
def save_sizedata(size,dataname):
"""
元画像のサイズを保存する
Parameter
---------
size :numpyのlist
-超音波画像
label :numpyのlist
-ラベル画像
color :tuple
-色の数値 例)blue = (255,0,0)
epsilon :float型
-輪郭の近似値,値が低いほど細かく近似する
result
-------
image :original image
label :輪郭描写後の画像
"""
#datasetの詳細をテキストで保存
with open(root_path + "sizedata.txt",mode='w') as f_dataset:
for t in range(len(task)):
f_dataset.write(dataname[t]+ ":" +size[t] )
#pklデータ作成
def make_dataset(root,task,size):
"""
# # Dataset の作成
-//aka/share/amed/amed_liver_labelのデータを使用
Parameter
-----------
root :strings
-データセットを作成するためのデータまでのパス
task :list型
-データセット作成で使用するディレクトリ名を格納したリスト
size :tuple型
-データセットの画像サイズ
Result
--------
「./image」にデータセット作成
-image.pkl
-label.pkl
"""
x,y = size
num_list = []
image_size,image_name = [],[]
mode = "image"
for s in range(len(task)):
path = Path(root + task[s])
print("\n\n"+task[s])
for k in range(2):#image,label
list = []
z = 0
paths = path[k]+ '/*.jpg'
files = glob.glob(paths)
if not files:
paths = path[k]+ '/*.png'
files = glob.glob(paths)
a = path[k + 2].split('/')
for p in range(len(a)):
if a[p] in ["image"]:
mode = "image"
if a[p] in ["label"]:
mode = "label"
else:
continue
for f in files:
z = z + 1
image = Image.open(f)
image = image.convert('L')
image_name.append(os.path.basename(f))
image_size.append(image.size)
image_resize = image.resize((x, y))
image = np.array(image_resize)
#check image write
if mode == "image":
cv2.imwrite(path[k+2]+"/{0:04d}.jpg".format(z) , image)
if mode == "label":
#image = np.where(image > 0,255,0)# 条件満たす 255 それ以外 0
#image = np.where(image == 255,0,255)# 条件満たす 255 それ以外 0
image[image > 200] = 0
image[image > 0] = 255
cv2.imwrite(path[k+2]+"/{0:04d}.jpg".format(z) , image)
image = np.expand_dims(image,axis=-1)
#data type convert (image => float : label => int)
if mode == "image":
image = np.asarray(image, dtype=np.uint8)
addPath = "/image.pkl"
if mode == "label":
image = np.asarray(image, dtype=np.uint8)
addPath = "/label.pkl"
list.append(image)
print("\r{0:d}".format(z),end="")
#save pickle data
list = np.asarray(list)
save_path = path[4] + "/pkl"
check_existfile(save_path)
save_path = save_path + addPath
f = open(save_path,'wb')
pickle.dump(list,f)
print(" ")
num_list.append(z)
#画像サイズの情報を保存する
size_path = os.path.dirname(path[0])
print("size_path",size_path)
with open(size_path + "/sizedata.txt",mode='w') as f_sizedata:
for sn in range(len(image_name)):
f_sizedata.write(image_name[sn]+ ":" +str(image_size[sn])+"\n" )
image_size,image_name = [],[]
dataset_image,dataset_label = [],[]
for i in range(len(task)):
dataset_image.append(root + task[i] + "/pkl/image.pkl")
dataset_label.append(root + task[i] + "/pkl/label.pkl")
#データを連結
ori_image = load_pkl(dataset_image[0])
ori_label = load_pkl(dataset_label[0])
for i in range(len(dataset_image) - 1):
#image
add_image = load_pkl(dataset_image[i + 1])
ori_image = concatenate_pkl(ori_image,add_image)
#label
add_label = load_pkl(dataset_label[i + 1])
ori_label = concatenate_pkl(ori_label,add_label)
root_path = "D:/Users/takami.h/Desktop/AMED_proiect/U-net/AmedSegmentation/image/"
f_image = open(root_path + "image.pkl",'wb')
f_label = open(root_path + "label.pkl",'wb')
with open(root_path + "image.pkl",mode='wb') as f_image:
pickle.dump(ori_image,f_image)
with open(root_path + "label.pkl",mode='wb') as f_label:
pickle.dump(ori_label,f_label)
#datasetの詳細をテキストで保存
with open(root_path + "dataset.txt",mode='w') as f_dataset:
f_dataset.write("data Path :" + root)
f_dataset.write("\n画像枚数 :" + str(len(ori_image)))
f_dataset.write("\n解像度 :" + str(ori_image[0].shape))
f_dataset.write("\n各フォルダの画像枚数")
for t in range(len(task)):
f_dataset.write("\n -"+task[t] + " :" + str(num_list[t]))
## 部分処理の関数
def load_pkl(filename):
with open(filename,mode='rb') as f:
pkl = pickle.load(f)
return pkl
def Path(path):
"""
# # パスの作成 # #
Parameters
--------------
train :string
-画像データパス
val :string
-ラベル画像データパス
return
--------------
() :タプル
-(train_image , val_image , train_label ,val_label)
"""
#ディレクトリ無ければ作る
image_path = path + "/image"
label_path = path + "/label"
rimage_path = image_path + "/resize"
rlabel_path = label_path + "/resize"
check_existfile(image_path)
check_existfile(label_path)
check_existfile(rimage_path)
check_existfile(rlabel_path)
return (image_path,label_path,rimage_path,rlabel_path,path)
if __name__ == "__main__":
"""
※注意事項※
1.rootを指定する事
-dataまでのroot path
2.taskを指定する事
-どのファイルからdataset作るか指定する事
"""
root = "//aka/share/amed/amed_liver_label/"
task = ["Image06",
"Image07",
"Image09",
"Image10",
"Image11",
"Image17",
"Image15_with_tumor",
"Amed_train_image"]
size = (560,560)
make_dataset(root,task,size)
|
import csv
import numpy as np
import matplotlib.pyplot as plt
import skimage
from scipy.io import loadmat
from skimage.util import random_noise
from sklearn.datasets import fetch_mldata
from sklearn.decomposition import PCA, KernelPCA
def pca_reduce(train_set, test_set, component_count):
pca = PCA(n_components=component_count)
pca.fit(train_set)
return pca.inverse_transform(pca.transform(test_set))
def kpca_reduce(train_set, test_set, component_count):
kpca = KernelPCA(kernel="rbf", n_components=component_count, fit_inverse_transform=True)
kpca.fit(train_set)
return kpca.inverse_transform(kpca.transform(test_set))
# Linear pca version works, rest doesn't
def figure2():
usps = fetch_mldata('usps')
digit = 0
data = np.random.choice(np.where((usps.target-1) == digit)[0], size=300, replace=False)
pca = PCA(n_components=256)
pca.fit(usps.data[data])
eigenvecs = pca.components_
#for n in [1, 2, 4, 16, 32, 64, 128, 256]:
# plt.imshow(np.reshape(eigenvecs[n-1], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
# plt.show()
kpca = KernelPCA(kernel="rbf", n_components=256, fit_inverse_transform=True)
kpca.fit(usps.data[data])
alphas = kpca.alphas_
print(alphas[0])
#for n in [1, 2, 4, 16, 32, 64, 128, 256]:
#shape=(16,16)
#alphas = kpca.alphas_.T[64]
#plt.show()
# Does not work at all, the code here makes no sense atm!!
def figure3():
usps = fetch_mldata('usps')
digit = 3
data = np.random.choice(np.where((usps.target-1) == digit)[0], size=350, replace=False)
training_set, testing_set = data[:300], data[-50:]
fractions = []
denoised = loadmat('pre_three')['pre_images']
for components in range(1,21):
linear_denoised = pca_reduce(usps.data[training_set], usps.data[testing_set], components)
linear_fraction = np.sum(np.power((denoised[components-1] - usps.data[testing_set[6]]), 2))
plt.imshow(np.reshape(linear_denoised[0], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.show()
print(linear_fraction)
def figure4():
usps = fetch_mldata('usps')
idx = np.random.randint(9298, size=3000)
training_set = usps.data[idx,:]
# Gaussian noise version
for digit in range(0,10):
data = np.random.choice(np.where((usps.target-1) == digit)[0], size=350, replace=False)
_, testing_set = data[:300], data[-50:]
gaussian_set = skimage.util.random_noise(usps.data[testing_set], mode='gaussian', var=0.5 ** 2)
#Plot original testing image
plt.subplot(13, 10, digit+1)
plt.imshow(np.reshape(usps.data[testing_set[2]], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
#Plot testing image with additive gaussian noise
plt.subplot(13, 10, digit+11)
plt.imshow(np.reshape(gaussian_set[2], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
#plt.show()
for i, components in enumerate([1, 4, 16, 64, 256]):
print(components)
linear_gaussian_digit = pca_reduce(training_set, gaussian_set, components)
plt.subplot(13, 10, digit+21+10*i)
plt.imshow(np.reshape(linear_gaussian_digit[2], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
kernel_gaussian_digit = kpca_reduce(training_set, gaussian_set, components)
plt.subplot(13, 10, digit+71+10*i)
plt.imshow(np.reshape(kernel_gaussian_digit[2], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
plt.show()
# Speckle noise version
for digit in range(0,10):
data = np.random.choice(np.where((usps.target-1) == digit)[0], size=350, replace=False)
_, testing_set = data[:300], data[-50:]
snp_set = skimage.util.random_noise(usps.data[testing_set], mode='s&p', amount=0.4, salt_vs_pepper = 0.5)
#Plot original testing image
plt.subplot(13, 10, digit+1)
plt.imshow(np.reshape(usps.data[testing_set[2]], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
#Plot testing image with salt and pepepr noise
plt.subplot(13, 10, digit+11)
plt.imshow(np.reshape(snp_set[2], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
for i, components in enumerate([1, 4, 16, 64, 256]):
print(components)
linear_snp_digit = pca_reduce(training_set, snp_set, components)
plt.subplot(13, 10, digit+21+10*i)
plt.imshow(np.reshape(linear_snp_digit[2], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
kernel_snp_digit = kpca_reduce(training_set, snp_set, components)
plt.subplot(13, 10, digit+71+10*i)
plt.imshow(np.reshape(kernel_snp_digit[2], (16, 16)), cmap=plt.cm.Greys, interpolation='none')
plt.axis('off')
plt.show()
def main():
#figure2()
#figure3()
figure4()
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
# Вам дана в архиве (ссылка) файловая структура, состоящая из директорий и файлов.
# https://stepic.org/media/attachments/lesson/24465/main.zip
# Вам необходимо распаковать этот архив, и затем найти в данной в файловой структуре все директории, в которых есть хотя бы один файл с расширением ".py".
# Ответом на данную задачу будет являться файл со списком таких директорий, отсортированных в лексикографическом порядке.
# Для лучшего понимания формата задачи, ознакомьтесь с примером.
# Пример архива https://stepic.org/media/attachments/lesson/24465/sample.zip
# Пример ответа https://stepic.org/media/attachments/lesson/24465/sample_ans.txt
# unzip main.zip
# find main/ -type f -name "*.py" | xargs -I{} dirname {}|sort|uniq
import os.path
with open('2-4-6.list', 'w') as f:
f.write("\n".join(sorted([dir.replace('\\', '/').replace('./main', 'main') for dir, _, dirfiles in os.walk('./main') if len([file for file in dirfiles if os.path.splitext(file)[1] == '.py']) > 0]))) |
__all__ = ["mongo", "log"] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.