content
stringlengths 5
1.05M
|
|---|
#!/usr/bin/python
from load import LoadState, LoadHandler
from sys import argv, stdout
from base64 import b64encode
def arr_join(arr):
return ", ".join(map(lambda x: "\"{}\"".format(x), arr))
class Handler(LoadHandler):
def load_file(self, state, file, packs, type):
super(Handler, self).load_file(state, file, packs, type)
stdout.write("/* Including packs: {} */\n".format(", ".join(packs)))
if type == LoadState.TYPE_PACK:
deps = state.getDependencies(packs[0])
if "load" not in packs:
# function(file, provided, required, size, type)
stdout.write('load.addDependency("about:blank", [{}], [{}], 0, {});\n\n'.format(
arr_join(packs), arr_join(deps), LoadState.TYPE_PACK
))
with open(file) as f:
for l in f.readlines():
stdout.write(l)
stdout.write("\n")
if type == LoadState.TYPE_RES:
with open(file) as f:
buff = f.read()
for p in packs:
stdout.write('load.provideResource("{}", atob("{}"));\n\n'.format(p, b64encode(buff)))
if type == LoadState.TYPE_BINRES:
with open(file, "rb") as f:
buff = f.read()
for p in packs:
stdout.write('load.provideBinaryResource("{}", "{}");\n\n'.format(p, b64encode(buff)))
if type == LoadState.TYPE_EXT:
for p in packs:
deps = state.getDependencies(p)
# function(file, provided, required, size, type)
stdout.write('load.addDependency("{}", [\"{}\"], [{}], 0, {});\n\n'.format(
file, p, arr_join(deps), LoadState.TYPE_EXT
))
""" Evaluate a single package """
def evaluate(self, state, pack, type):
super(Handler, self).evaluate(state, pack, type)
if len(argv) != 3:
print("Usage: catter.py deps.json pack")
exit(1)
ls = LoadState(handler=Handler())
ls.loadDeps(argv[1])
ls.importAndEvaluate("load")
ls.importAndEvaluate(argv[2])
stdout.write("load.importAndEvaluate(\"{}\");\n".format(argv[2]))
|
class OpplastException(Exception):
"""General exception class for opplast"""
class VideoIDError(OpplastException):
"""Error for when Video ID is not found"""
class ExceedsCharactersAllowed(OpplastException):
"""Exception for when given string is too long"""
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RRappdirs(RPackage):
"""An easy way to determine which directories on the users computer
you should use to save data, caches and logs. A port of Python's
'Appdirs' to R."""
homepage = "https://cloud.r-project.org/package=rappdirs"
url = "https://cloud.r-project.org/src/contrib/rappdirs_0.3.1.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/rappdirs"
version('0.3.1', sha256='2fd891ec16d28862f65bb57e4a78f77a597930abb59380e757afd8b6c6d3264a')
depends_on('r@2.14:', type=('build', 'run'))
|
#!/usr/bin/env python
from __future__ import unicode_literals
import argparse
import io
import json
import xml.etree.ElementTree
SubElement = xml.etree.ElementTree.SubElement
XFACTOR = 20
YFACTOR = 20
XMARGIN = 5
YMARGIN = 5
FONT_SIZE = 15
BITMARKER_HEIGHT = 10
def layout(fields, width):
"""
Adds virtual_x1 / virtual_x2 and virtual_y1 / virtual_y2 (in virtual coordinates) to all fields
Returns height in virtual coordinates
"""
# Calculate virtual coordinates
x = 0
y = 0
for field in fields:
size = field['size']
assert size >= 1
assert (x + size <= width) or (x == 0 and size % width == 0)
field['virtual_x1'] = x
field['virtual_y1'] = y
field['virtual_x2'] = x + ((size - 1) % width) + 1
field['virtual_y2'] = y + max(size // width, 1)
y = y + ((x + size) // width)
x = (x + size) % width
return field['virtual_y2']
def plot_file(fn):
with io.open(fn, 'r', encoding='utf-8') as f:
proto = json.load(f)
doc = xml.etree.ElementTree.Element('svg')
doc.attrib['xmlns'] = 'http://www.w3.org/2000/svg'
width = proto.get('width', 32)
large_mark_every = proto.get('large_mark_every', 8)
medium_mark_every = proto.get('medium_mark_every', 4)
fields = proto['fields']
height = layout(fields, width)
# Translate into physical coordinates
document_width = XMARGIN + width * XFACTOR + XMARGIN
document_height = YMARGIN + BITMARKER_HEIGHT + height * YFACTOR + YMARGIN
xcoord = lambda v: XMARGIN + v * XFACTOR
ycoord = lambda v: YMARGIN + BITMARKER_HEIGHT + YFACTOR * v
doc.attrib['viewBox'] = '0 0 %d %d' % (document_width, document_height)
bitmarkers = SubElement(doc, 'g')
# Bit markers
for i in range(0, width + 1):
bm = SubElement(bitmarkers, 'line')
factor = (
1 if (i % large_mark_every) == 0 else
(0.56 if (i % medium_mark_every) == 0 else 0.3)
)
bmheight = factor * BITMARKER_HEIGHT
bm.attrib.update({
'x1': str(xcoord(i)),
'x2': str(xcoord(i)),
'y1': str(ycoord(0) - bmheight),
'y2': str(ycoord(0)),
'style': 'stroke:#000000;stroke-opacity:1;stroke-width:1',
})
for field in fields:
field['x1'] = xcoord(field['virtual_x1'])
field['y1'] = ycoord(field['virtual_y1'])
field['x2'] = xcoord(field['virtual_x2'])
field['y2'] = ycoord(field['virtual_y2'])
g = SubElement(doc, 'g')
g.attrib['id'] = field['label']
# Draw hint lines
vheight = field['virtual_y2'] - field['virtual_y1']
for i in range(1, vheight):
line = SubElement(g, 'line')
y = field['y1'] + i * YFACTOR
line.attrib.update({
'x1': str(field['x1']),
'x2': str(field['x2']),
'y1': str(y),
'y2': str(y),
'style': 'stroke:#000000;stroke-opacity:0.15;stroke-width:0.5',
})
t = SubElement(g, 'text')
t.attrib.update({
'x': str((field['x1'] + field['x2']) / 2),
'y': str((field['y1'] + field['y2']) / 2),
'text-anchor': 'middle',
'dy': '%s' % (.3 * FONT_SIZE),
'font-size': '%s' % FONT_SIZE,
})
t.text = field['label']
r = SubElement(g, 'rect')
r.attrib.update({
'x': str(field['x1']),
'y': str(field['y1']),
'width': str(abs(field['x2'] - field['x1'])),
'height': str(abs(field['y2'] - field['y1'])),
'style': 'fill:none;stroke:#000000;stroke-opacity:1',
})
xml.etree.ElementTree.dump(doc)
def main():
parser = argparse.ArgumentParser(description='Plot protocol formats')
parser.add_argument(
'file', metavar='FILE', help='Protocol description file')
opts = parser.parse_args()
plot_file(opts.file)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import time
import os
import re
import sys
import nltk
from nltk.tokenize import sent_tokenize
import string
from nltk.tokenize import RegexpTokenizer
from nltk.corpus import stopwords
import re
from math import log10
file_input = None
path = 'test_books'
word_counts = {}
number_of_terms_in_document = {}
number_of_docs_with_term = {}
words_tfidf = {}
def tfidf(word_counts, total_number_of_documents):
for word_and_doc in word_counts:
n_terms_appear = int(word_counts[word_and_doc])
terms_in_doc = int(number_of_terms_in_document[word_and_doc.split('#')[1]])
words_tfidf[word_and_doc] = [float(n_terms_appear) / float(terms_in_doc)]
word = word_and_doc.split('#')[0]
words_tfidf[word_and_doc].append(log10(float(total_number_of_documents) / float(number_of_docs_with_term[word])))
words_tfidf[word_and_doc].append(words_tfidf[word_and_doc][0] * words_tfidf[word_and_doc][1])
def preprocess(sentence):
#sentence = sentence.lower()
tokenizer = RegexpTokenizer(r'\w+')
tokens = tokenizer.tokenize(sentence)
filtered_words = [w for w in tokens if not w in stopwords.words('english')]
return filtered_words
def read_input(file):
for line in file:
# split the line into words
yield preprocess(line)
def word_count(word, file):
if word + "#" + file in word_counts:
word_counts[word + "#" + file] += 1
else:
word_counts[word + "#" + file] = 1
if word in number_of_docs_with_term:
number_of_docs_with_term[word] += 1
else:
number_of_docs_with_term[word] = 1
if file in number_of_terms_in_document:
number_of_terms_in_document[file] += 1
else:
number_of_terms_in_document[file] = 1
def main(separator='\t'):
start_time = time.time()
total_number_of_documents = 0
for file in os.listdir(path):
total_number_of_documents += 1
current = os.path.join(path, file)
if os.path.isfile(current):
current_file = open(current)
data = read_input(current_file)
for words in data:
for word in words:
word_count(word, file)
tfidf(word_counts, total_number_of_documents)
with open('out.txt', 'w') as f:
for word_and_doc in words_tfidf:
f.write( word_and_doc + "\t" + str(words_tfidf[word_and_doc][0]) + "\t" + str(words_tfidf[word_and_doc][1]) + "\t" + str(words_tfidf[word_and_doc][2]) + "\n")
f.write(str(time.time() - start_time))
print "%f seconds" % (time.time() - start_time)
# print(word_counts)
# print(number_of_terms_in_document)
# print(total_number_of_documents)
# print(number_of_docs_with_term)
# print(words_tfidf)
if __name__ == "__main__":
main()
|
from lin.exception import Success
from lin.redprint import Redprint
from app.libs.jwt_api import member_login_required, get_current_member
from app.models.comment import Comment
from app.validators.v1.comment_forms import CommentContent
comment_api = Redprint('comment')
@comment_api.route('/product/<int:pid>', methods=['GET'])
def get_comments_of_product(pid):
models = Comment.get_comments_of_product(pid)
return models
@comment_api.route('', methods=['POST'])
@member_login_required
def create_comment():
form = CommentContent().validate_for_api()
member = get_current_member()
data = {
'member_id': member.id,
**form.data
}
Comment.add(data, throw=True)
return Success(msg='评论成功')
|
from tkinter import *
import tkinter
import first_face_dataset, registeruser, second_face_training, gallery
import mysql.connector
import tkinter.scrolledtext as scrolledtext
from fpdf import FPDF
from PIL import Image,ImageTk
from tkinter import filedialog
from tkinter import ttk
from datetime import datetime
#connector
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="",
db="mynotebook"
)
#delete note
def delete(*values):
mycursor = mydb.cursor()
sql="Delete from user"+str(values[0])+" where id='"+str(values[1])+"'"
mycursor.execute(sql)
mydb.commit()
mycursor = mydb.cursor()
sql="Delete from images where note_id='"+str(values[1])+"'"
mycursor.execute(sql)
mydb.commit()
alert=Tk()
alert.title('Successfull!')
alert.minsize(200, 50)
alert.maxsize(200, 50)
alert.configure(background='#456')
Label(alert, text = "Successfully Deleted!",font=('Impact', -20),bg='#456',fg="#42ba96").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
#update note
def update(*values):
if len(values[0])>0 and len(values[1])>0:
mycursor = mydb.cursor()
sql="Update user"+str(values[2])+" set subject='"+str(values[1])+"',note='"+str(values[0])+"' where id='"+str(values[3])+"'"
mycursor.execute(sql)
mydb.commit()
alert=Tk()
alert.title('Successfull!')
alert.minsize(400, 50)
alert.maxsize(400, 50)
alert.configure(background='#456')
Label(alert, text = "Successfully Updated!",font=('Impact', -20),bg='#456',fg="#42ba96").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
else:
alert=Tk()
alert.title('Alert')
alert.minsize(800, 400)
alert.maxsize(800, 400)
alert.configure(background='#fff')
Label(alert, text = "Nothing to update due to empty!",font=('Impact', -20),bg='#fff',fg="#df4759").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
#update note function call
def update_text(*values):
noteedit=Tk()
noteedit.title('Edit Notes')
mycursor = mydb.cursor()
sql="SELECT * FROM user"+str(values[2])+" where id="+str(values[3])
mycursor.execute(sql)
for student in mycursor:
for j in range(len(student)):
if j==0:
e = Label(noteedit,width=15, text=student[j],
borderwidth=2,relief='ridge', anchor="w",font=('Impact', -20), bg='#fff', fg='#000')
e.pack(side=TOP, anchor=NW)
e.config(anchor=CENTER)
if j==2:
date_time=datetime.strptime(str(student[j]), '%Y-%m-%d %H:%M:%S')
d = date_time.strftime("%d %B, %Y")
d+=", "
d+= date_time.strftime("%I:%M:%S %p")
e = Label(noteedit,width=30, text=str(d),
borderwidth=2,relief='ridge', anchor="w",font=('Impact', -20), bg='#fff', fg='#000')
e.pack(side=TOP, anchor=NE)
e.config(anchor=CENTER)
sub = Entry(noteedit,font = ('courier', 15, 'bold'), width=50,foreground = 'green',borderwidth=15, relief=tkinter.SUNKEN)
mycursor.execute(sql)
for student in mycursor:
for j in range(len(student)):
if j==1:
sub.insert(tkinter.INSERT,student[j])
sub.pack(side=TOP, anchor=NW,expand=True, fill='both')
txt = scrolledtext.ScrolledText(noteedit, undo=True)
txt['font'] = ('consolas', '12')
mycursor.execute(sql)
for student in mycursor:
for j in range(len(student)):
if j==3:
txt.insert(tkinter.INSERT,student[j])
txt.pack(expand=True, fill='both')
txt.config(font=("consolas", 12), undo=True, wrap='word')
txt.config(borderwidth=3, relief="sunken")
btn = tkinter.Button(noteedit,width=15, text="Update",font=('Impact', -20),fg='#fff', command= lambda:[update(txt.get('1.0', 'end-1c'),str(sub.get()),values[2], values[3])])
btn.configure(background='#5bc0de')
btn.pack()
#get text for inserting note
def get_text(*values):
if len(values[0])>0 and len(values[1])>0:
mycursor = mydb.cursor()
sql="Insert into user"+str(values[2])+" (subject,note) values('"+str(values[1])+"','"+str(values[0])+"')"
mycursor.execute(sql)
mydb.commit()
alert=Tk()
alert.title('Successfull!')
alert.minsize(400, 50)
alert.maxsize(400, 50)
alert.configure(background='#456')
Label(alert, text = "Note Added!",font=('Impact', -20),bg='#456',fg="#42ba96").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
else:
alert=Tk()
alert.title('Alert')
alert.minsize(400, 50)
alert.maxsize(400, 50)
alert.configure(background='#fff')
Label(alert, text = "Nothing to add due to empty!",bg="#fff",font=('Impact', -20),fg="#df4759").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
#save as pdf
def pdf(*values):
# save FPDF() class into a
# variable pdf
pdf = FPDF()
# Add a page
pdf.add_page()
# set style and size of font
# that you want in the pdf
pdf.set_font("Arial", style='B', size = 15)
# create a cell
pdf.cell(200, 10, txt = values[1],
ln = 1, align = 'C')
pdf.set_font("Helvetica", size = 15)
#add images
mycursor4 = mydb.cursor()
sql4="SELECT * FROM images where note_id='"+str(values[3])+"' and user_id='"+str(values[2])+"'"
mycursor4.execute(sql4)
if mycursor4:
for images in mycursor4:
for j in range(len(images)):
if j==2:
im = Image.open(images[j])
width, height = im.size
pdf.image(name=images[j], x = None, y = None, w = 190, h = 100, type = '', link = '')
# add another cell
pdf.cell(200, 10, txt = values[0],
ln = 2, align = 'C')
# save the pdf with name .pdf
pdf.output(str(values[2])+"-"+str(values[3])+".pdf")
alert=Tk()
alert.title('Successfull!')
alert.minsize(200, 50)
alert.maxsize(200, 50)
alert.configure(background='#456')
Label(alert, text = "Successfully Saved!",font=('Impact', -20),bg='#456',fg="#42ba96").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
#view separate note images
def view_image(*values):
novi = Toplevel()
canvas = Canvas(novi, width = 600, height = 600)
canvas.pack(expand = YES, fill = BOTH)
gif1 = ImageTk.PhotoImage(file = values[0])
#image not visual
canvas.create_image(0, 0, image = gif1, anchor = NW)
#assigned the gif1 to the canvas object
canvas.gif1 = gif1
#view note
def view(*values):
noteview=Tk()
noteview.title('Notes')
mycursor = mydb.cursor()
sql="SELECT * FROM user"+str(values[0])+" where id="+str(values[1])
mycursor.execute(sql)
for student in mycursor:
for j in range(len(student)):
if j==0:
e = Label(noteview,width=15, text=student[j],
borderwidth=2,relief='ridge', anchor="w",font=('Impact', -20), bg='#fff', fg='#000')
e.pack(side=TOP, anchor=NW)
e.config(anchor=CENTER)
if j==2:
date_time=datetime.strptime(str(student[j]), '%Y-%m-%d %H:%M:%S')
d = date_time.strftime("%d %B, %Y")
d+=", "
d+= date_time.strftime("%I:%M:%S %p")
e = Label(noteview,width=30, text=str(d),
borderwidth=2,relief='ridge', anchor="w",font=('Impact', -20), bg='#fff', fg='#000')
e.pack(side=TOP, anchor=NE)
e.config(anchor=CENTER)
sub = Entry(noteview,font = ('courier', 15, 'bold'), width=50,foreground = 'green',borderwidth=15, relief=tkinter.SUNKEN)
mycursor.execute(sql)
for student in mycursor:
for j in range(len(student)):
if j==1:
sub.insert(tkinter.INSERT,student[j])
sub.pack(side=TOP, anchor=NW,expand=True, fill='both')
sub.config(state=DISABLED)
mycursor4 = mydb.cursor()
sql4="SELECT * FROM images where note_id='"+str(values[1])+"' and user_id='"+str(values[0])+"'"
mycursor4.execute(sql4)
txt = scrolledtext.ScrolledText(noteview, undo=True)
txt['font'] = ('consolas', '12')
txt.pack(expand=True, fill='both')
txt1=noteview
counter=0
if mycursor4:
for images in mycursor4:
for j in range(len(images)):
if j==2:
counter+=1
txt1.showoriginal = Button(txt1,width=10, text = "View Image "+str(counter),font=('Impact', -10),fg="#fff",command=lambda images=images: view_image(images[2]))
txt1.showoriginal.configure(background='#f0ad4e')
txt1.showoriginal.pack(side=tkinter.RIGHT)
mycursor.execute(sql)
for student in mycursor:
for j in range(len(student)):
if j==3:
txt.insert(tkinter.INSERT,student[j])
txt.config(font=("consolas", 12), undo=True, wrap='word')
txt.config(borderwidth=5, relief="sunken")
txt.config(state=DISABLED)
btn = tkinter.Button(noteview,width=15, text="Edit",font=('Impact', -20),fg='#fff', command= lambda:[update_text(txt.get('1.0', 'end-1c'),str(sub.get()),values[0], values[1])])
btn.configure(background='#5cb85c')
btn.pack()
btn1 = tkinter.Button(noteview,width=15, text="Save as PDF", font=('Impact', -20),fg='#fff', command= lambda:[pdf(txt.get('1.0', 'end-1c'),str(sub.get()),values[0], values[1])])
btn1.configure(background='#0275d8')
btn1.pack()
#inserting images
def add_images(*values):
mycursor1 = mydb.cursor()
sql="SELECT * FROM user"+str(values[0])+" order by date desc limit 0,1"
mycursor1.execute(sql)
myresult = mycursor1.fetchone()
if myresult:
yourImage=filedialog.askopenfilenames(title = "Select your image",filetypes = [("Image Files","*.png"),("Image Files","*.jpg")])
for i in yourImage:
mycursor2 = mydb.cursor()
id=myresult[0]+1
sql=sql="Insert into images (note_id, path, user_id) values('"+str(id)+"','"+str(i)+"','"+str(values[0])+"')"
mycursor2.execute(sql)
mydb.commit()
alert=Tk()
alert.title('Successfull!')
alert.minsize(200, 50)
alert.maxsize(200, 50)
alert.configure(background='#456')
Label(alert, text = "Image Added!",font=('Impact', -20),bg='#456',fg="#42ba96").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
else:
yourImage=filedialog.askopenfilenames(title = "Select your image",filetypes = [("Image Files","*.png"),("Image Files","*.jpg")])
for i in yourImage:
mycursor2 = mydb.cursor()
id=1
sql=sql="Insert into images (note_id, path) values('"+str(id)+"','"+str(i)+"')"
mycursor2.execute(sql)
mydb.commit()
alert=Tk()
alert.title('Successfull!')
alert.minsize(200, 50)
alert.maxsize(200, 50)
alert.configure(background='#456')
Label(alert, text = "Image Added!",font=('Impact', -20),bg='#456',fg="#42ba96").place(relx = 0.5,
rely = 0.5,
anchor = 'center')
#add new note
def addnew(*values):
add=Tk()
add.title('Add a new note')
sub = Entry(add,font = ('courier', 15, 'bold'), width=50,foreground = 'green',borderwidth=15, relief=tkinter.SUNKEN)
sub.insert(0, "Subject:")
sub.pack(side=TOP, anchor=NW,expand=True, fill='both')
txt = scrolledtext.ScrolledText(add, undo=True)
txt['font'] = ('consolas', '12')
txt.pack(expand=True, fill='both')
txt.config(font=("consolas", 12), undo=True, wrap='word')
txt.config(borderwidth=5, relief="sunken")
add.showoriginal = tkinter.Button(add,width=15, text="Insert",font=('Impact', -20),fg='#fff', command= lambda:[get_text(txt.get('1.0', 'end-1c'),str(sub.get()),values[0])])
add.showoriginal.configure(background='#5cb85c')
add.showoriginal.pack()
add.showoriginal1 = tkinter.Button(add,width=15, text="Add Images",font=('Impact', -20),fg='#fff', command= lambda:[add_images(values[0])])
add.showoriginal1.configure(background='#0275d8')
add.showoriginal1.pack()
#Gallery
def gallerygo(values):
gallery.galleryview(values)
#profile function
def myprofile(id):
profile=Tk()
profile.title("Profile")
main_frame=Frame(profile)
main_frame.pack(fill=BOTH, expand=1)
profile2=Canvas(main_frame,width = 920, height = 400)
profile2.pack(side=LEFT, fill=BOTH, expand=1)
sb = ttk.Scrollbar(main_frame, orient=VERTICAL, command=profile2.yview)
sb.pack(side = RIGHT, fill = Y)
profile2.configure(yscrollcommand = sb.set )
profile2.bind('<Configure>', lambda e: profile2.configure(scrollregion = profile2.bbox("all")))
profile1=Frame(profile2)
profile2.create_window((0,0), window=profile1, anchor='nw')
Label(profile1, text = "Name: "+str(id), font=('Impact', -15),borderwidth=1, relief="raised", fg='#000').grid(column= 0, row = 1)
mycursor = mydb.cursor()
sql="CREATE TABLE if not exists user"+str(id)+" (id INT(6) UNSIGNED AUTO_INCREMENT PRIMARY KEY, subject VARCHAR(255) NULL, date TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NULL, note TEXT NULL)"
mycursor.execute(sql)
sql="SELECT * FROM user"+str(id)+" order by date desc"
mycursor.execute(sql)
profile1.showoriginal = Button(profile1, text = "Add a new note",font=('Impact', -15), fg='#fff', command=lambda:[addnew(id)])
profile1.showoriginal.configure(background='#5bc0de')
profile1.showoriginal.grid(column= 1, row = 1)
profile1.showoriginal = Button(profile1, text = "Refresh",font=('Impact', -15), fg='#fff', command=lambda:[myprofile(id)])
profile1.showoriginal.configure(background='#ffff00')
profile1.showoriginal.grid(column= 3, row = 1)
profile1.showoriginal = Button(profile1, text = "Gallery",font=('Impact', -15), fg='#fff', command=lambda:[gallerygo(id)])
profile1.showoriginal.configure(background='#16ca60')
profile1.showoriginal.grid(column= 2, row = 1)
e=Label(profile1,width=15,text='Id',borderwidth=3, relief='ridge',anchor='w',bg='yellow',font=('Impact', -15), fg='#000')
e.config(anchor=CENTER)
e.grid(row=4,column=0)
e=Label(profile1,width=50,text='Subject',borderwidth=3, relief='ridge',anchor='w',bg='yellow',font=('Impact', -15), fg='#000')
e.config(anchor=CENTER)
e.grid(row=4,column=1)
e=Label(profile1,width=30,text='Date',borderwidth=3, relief='ridge',anchor='w',bg='yellow',font=('Impact', -15), fg='#000')
e.config(anchor=CENTER)
e.grid(row=4,column=2)
i=5
for student in mycursor:
for j in range(len(student)):
if j==0:
e = Label(profile1,width=15, text=student[j],fg='#000',bg='#fff',
borderwidth=3,relief='ridge', anchor="w",font=('Impact', -15))
e.config(anchor=CENTER)
e.grid(row=i, column=j)
if j==1:
e = Label(profile1,width=50, text=student[j],fg='#000',bg='#fff',
borderwidth=3,relief='ridge', anchor="w",font=('Impact', -15))
e.config(anchor=CENTER)
e.grid(row=i, column=j)
if j==2:
date_time=datetime.strptime(str(student[j]), '%Y-%m-%d %H:%M:%S')
d = date_time.strftime("%d %B, %Y")
d+=", "
d+= date_time.strftime("%I:%M:%S %p")
e = Label(profile1,width=30, text=str(d),fg='#000',bg='#fff',
borderwidth=3,relief='ridge', anchor="w",font=('Impact', -15))
e.config(anchor=CENTER)
e.grid(row=i, column=j)
if j==3:
profile1.showoriginal = Button(profile1,width=15, text = "View",font=('Impact', -15),fg='#fff',command=lambda student=student: view(id,student[0]),cursor="mouse")
profile1.showoriginal.configure(background='#5cb85c')
profile1.showoriginal.grid(column= 3, row = i)
profile1.showoriginal = Button(profile1,width=15, text = "Delete",font=('Impact', -15),fg='#fff',command=lambda student=student: delete(id,student[0]),cursor="pirate")
profile1.showoriginal.configure(background='#d9534f')
profile1.showoriginal.grid(column= 4, row = i)
i=i+1
if __name__ == '__main__':
# test1.py executed as script
# do something
myprofile(id)
|
import argparse
def init_argparser(step):
parser = argparse.ArgumentParser(
description=f'CLI for the {step} starbucks')
parser.add_argument(
'load_path', type=str, help=f'Loading files for {step}')
parser.add_argument(
'save_path', type=str, help=f'Saving files from {step}')
parser.add_argument(
'--save', type=bool, default=False, help=f'Should the files be saved?')
args = parser.parse_args()
load_path = args.load_path
save_path = args.save_path
save = args.save
return (load_path, save_path, save)
if __name__ == '__main__':
init_argparser('test')
|
#!/usr/bin/env python
#
# test_colourbutton.py -
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
import wx
import mock
import pytest
from . import run_with_wx, simclick
import fsleyes_widgets.colourbutton as cb
def test_Create():
run_with_wx(_test_Create)
def _test_Create():
frame = wx.GetApp().GetTopWindow()
btn = cb.ColourButton(frame)
assert list(btn.GetValue()) == [0, 0, 0, 255]
btn.SetValue((20, 30, 40))
assert list(btn.GetValue()) == [20, 30, 40, 255]
btn.SetValue((20, 30, 40, 50))
assert list(btn.GetValue()) == [20, 30, 40, 50]
with pytest.raises(ValueError): btn.SetValue([0])
with pytest.raises(ValueError): btn.SetValue([0, 1])
with pytest.raises(ValueError): btn.SetValue([0, 1, 2, 3, 4])
with pytest.raises(ValueError): btn.SetValue([-1, 0, 2])
with pytest.raises(ValueError): btn.SetValue([256, 20, 6])
class MockColourDialog(object):
retval = wx.ID_OK
colour = [0, 0, 0, 255]
def __init__(self, parent, colourData):
self.colourData = colourData
def ShowModal(self):
self.colourData.SetColour(MockColourDialog.colour)
return MockColourDialog.retval
def GetColourData(self):
return self.colourData
def test_Event():
run_with_wx(_test_Event)
def _test_Event():
sim = wx.UIActionSimulator()
frame = wx.GetApp().GetTopWindow()
btn = cb.ColourButton(frame)
colours = [(50, 60, 70),
(90, 100, 150, 200)]
result = [None]
def handler(ev):
result[0] = ev.colour
btn.Bind(cb.EVT_COLOUR_BUTTON_EVENT, handler)
for colour in colours:
MockColourDialog.colour = colour
with mock.patch('fsleyes_widgets.colourbutton.wx.ColourDialog',
side_effect=MockColourDialog):
simclick(sim, btn)
if len(colour) == 3:
colour = list(colour) + [255]
assert list(result[0]) == list(colour)
assert list(btn.GetValue()) == list(colour)
# Test dialog cancel
MockColourDialog.retval = wx.ID_CANCEL
MockColourDialog.colour = (20, 20, 20, 10)
colour = (150, 160, 170)
result = [None]
btn.SetValue(colour)
with mock.patch('fsleyes_widgets.colourbutton.wx.ColourDialog',
side_effect=MockColourDialog):
simclick(sim, btn)
assert result[0] is None
assert list(btn.GetValue()) == list(colour) + [255]
|
from tests import TEST_ROLE, client
def test_role_base():
response = client.get(f'/role/{TEST_ROLE}')
assert response.status_code == 200
assert response.text.startswith("<html>\n <head>\n")
assert response.text.count(TEST_ROLE) == 17
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Update a rotation schedule."""
from googlecloudsdk.api_lib.cloudkms import base as cloudkms_base
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.kms import flags
class SetRotationSchedule(base.UpdateCommand):
r"""Update the rotation schedule for a CryptoKey.
Updates the rotation schedule for the the given CryptoKey. The schedule
automatically creates a new primary CryptoKeyVersion for the CryptoKey
according to the `--next-rotation-time` and `--rotation-period` flags.
The flag `--next-rotation-time` must be in ISO or RFC3339 format,
and `--rotation-period` must be in the form INTEGER[UNIT], where units
can be one of seconds (s), minutes (m), hours (h) or days (d).
Key rotations performed manually via `update-primary-version` and the
CryptoKeyVersion `create` do not affect the stored `--next-rotation-time`.
## EXAMPLES
The following command sets a 30 day rotation period for the CryptoKey
named `frodo` within the KeyRing `fellowship` and location `global`
starting at the specified time:
$ {command} frodo \
--location global \
--keyring fellowship \
--rotation-period 30d \
--next-rotation-time 2017-10-12T12:34:56.1234Z
"""
@staticmethod
def Args(parser):
flags.AddCryptoKeyArgument(parser, 'to update the schedule of')
flags.AddRotationPeriodFlag(parser)
flags.AddNextRotationTimeFlag(parser)
def Run(self, args):
client = cloudkms_base.GetClientInstance()
messages = cloudkms_base.GetMessagesModule()
crypto_key_ref = flags.ParseCryptoKeyName(args)
req = messages.CloudkmsProjectsLocationsKeyRingsCryptoKeysPatchRequest(
projectsId=crypto_key_ref.projectsId,
locationsId=crypto_key_ref.locationsId,
keyRingsId=crypto_key_ref.keyRingsId,
cryptoKeysId=crypto_key_ref.cryptoKeysId,
cryptoKey=messages.CryptoKey())
flags.SetNextRotationTime(args, req.cryptoKey)
flags.SetRotationPeriod(args, req.cryptoKey)
fields_to_update = []
if args.rotation_period is not None:
fields_to_update.append('rotationPeriod')
if args.next_rotation_time is not None:
fields_to_update.append('nextRotationTime')
if not fields_to_update:
raise exceptions.ToolException(
'At least one of --next-rotation-time or --rotation-period must be '
'specified.')
req.updateMask = ','.join(fields_to_update)
return client.projects_locations_keyRings_cryptoKeys.Patch(req)
|
# Cryomagnetics_CS4, Cryomagnetics CS4 magnet power supply driver
# Reinier Heeres <reinier@heeres.eu>, 2008
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from instrument import Instrument
import visa
import types
import logging
import re
import math
import time
import re
class Cryomagnetics_4G(Instrument):
UNITS = ['A', 'G']
MARGIN = 0.001 # 1 Gauss
RE_ANS = re.compile(r'(-?\d*\.?\d*)([a-zA-Z]+)')
def __init__(self, name, address, reset=False, axes=('Z')):
Instrument.__init__(self, name)
self._axes = {}
for i in range(len(axes)):
self._axes[i+1] = axes[i]
self._address = address
#>>>>>>>>>>>>>>
assert False, "pyvisa syntax has changed, tweak the line below according to the instructions in qtlab/instrument_plugins/README_PYVISA_API_CHANGES"
#self._visa = visa.instrument(self._address)
#<<<<<<<<<<<<<<
self.add_parameter('identification',
flags=Instrument.FLAG_GET)
self.add_parameter('units',
flags=Instrument.FLAG_GETSET,
channels=axes,
option_list=self.UNITS,
type=types.StringType)
self.add_parameter('rate0',
flags=Instrument.FLAG_GETSET,
type=types.FloatType,
channels=axes,
minval=0,
units='A/s')
self.add_parameter('rate1',
flags=Instrument.FLAG_GETSET,
type=types.FloatType,
channels=axes,
minval=0,
units='A/s')
self.add_parameter('heater',
flags=Instrument.FLAG_GETSET,
channels=axes,
type=types.BooleanType,
doc='''Persistent switch heater on?''')
self.add_parameter('magnetout',
flags=Instrument.FLAG_GET | Instrument.FLAG_SET,
channels=axes,
type=types.FloatType,
units='kG', format='%.05f',
doc='''Magnet current (or field in kG)''')
self.add_parameter('supplyout',
flags=Instrument.FLAG_GET,
channels=axes,
type=types.FloatType,
units='kG', format='%.05f',
doc='''Power supply current (or field in kG)''')
self.add_parameter('sweep',
flags=Instrument.FLAG_GETSET,
channels=axes,
option_list=['UP', 'UP FAST', 'DOWN', 'DOWN FAST', 'PAUSE', 'ZERO'],
type=types.StringType)
self.add_parameter('lowlim',
flags=Instrument.FLAG_GETSET,
channels=axes,
type=types.FloatType,
minval=-90.0, maxval=90.0,
units='kG', format='%.05f')
self.add_parameter('uplim',
flags=Instrument.FLAG_GETSET,
channels=axes,
type=types.FloatType,
minval=-90.0, maxval=90.0,
units='kG', format='%.05f')
self.add_parameter('field',
flags=Instrument.FLAG_GETSET,
channels=axes,
type=types.FloatType,
minval=-90, maxval=90.0,
units='kG', format='%.02f',
tags=['sweep'],
doc='''Field in Gauss (or Amperes)''')
self.add_function('local')
self.add_function('remote')
self.add_function('sweep_up')
self.add_function('sweep_down')
self.add_function('pause')
self.add_function('zero')
if reset:
self.reset()
else:
self.get_all()
def reset(self):
self._visa.write('*RST')
def get_all(self):
self.get_identification()
for ax in self._axes.values():
self.get('units%s' % ax)
self.get('rate0%s' % ax)
self.get('rate1%s' % ax)
self.get('heater%s' % ax)
self.get('magnetout%s' % ax)
self.get('supplyout%s' % ax)
self.get('lowlim%s' % ax)
self.get('uplim%s' % ax)
self.get('field%s' % ax)
self.get('sweep%s' % ax)
def do_get_identification(self):
return self._visa.ask('*IDN?')
def _update_units(self, unit, channel):
if unit == 'G':
unit = 'kG'
self.set_parameter_options('magnetout%s' % channel, units=unit)
self.set_parameter_options('supplyout%s' % channel, units=unit)
self.set_parameter_options('lowlim%s' % channel, units=unit)
self.set_parameter_options('uplim%s' % channel, units=unit)
def do_get_nchannels(self):
ans = self._visa.ask('CHAN?')
if ans not in ('1', '2'):
return 2
else:
return 1
def _select_channel(self, channel):
for i, v in self._axes.iteritems():
if v == channel:
self._visa.write('CHAN %d' % i)
return True
raise ValueError('Unknown axis %s' % channel)
def do_get_units(self, channel):
self._select_channel(channel)
ans = self._visa.ask('UNITS?')
self._update_units(ans, channel)
return ans
def do_set_units(self, unit, channel):
if unit not in self.UNITS:
logging.error('Trying to set invalid unit: %s', unit)
return False
self._select_channel(channel)
self._visa.write('UNITS %s' % unit)
self._update_units(unit, channel)
def _check_ans_unit(self, ans, channel):
m = self.RE_ANS.match(ans)
if not m:
logging.warning('Unable to parse answer: %s', ans)
return False
val, unit = m.groups((0,1))
try:
val = float(val)
except:
val = None
set_unit = self.get('units%s' % channel, query=False)
if set_unit == 'G':
set_unit = 'kG'
if unit != set_unit:
logging.warning('Returned units (%s) differ from set units (%s)!',
unit, set_unit)
return None
return val
def do_get_rate0(self, channel):
self._select_channel(channel)
ans = self._visa.ask('RATE? 0')
return float(ans)
def do_get_rate1(self, channel):
self._select_channel(channel)
ans = self._visa.ask('RATE? 1')
return float(ans)
def do_set_rate0(self, rate, channel):
self._select_channel(channel)
self._visa.write('RATE 0 %.03f\n' % rate)
def do_set_rate1(self, rate, channel):
self._select_channel(channel)
self._visa.write('RATE 1 %.03f\n' % rate)
def do_get_heater(self, channel):
self._select_channel(channel)
ans = self._visa.ask('PSHTR?')
if len(ans) > 0 and ans[0] == '1':
return True
else:
return False
def do_set_heater(self, on, channel):
if on:
text = 'ON'
else:
text = 'OFF'
self._select_channel(channel)
self._visa.write('PSHTR %s' % text)
def local(self):
self._visa.write('LOCAL')
def remote(self):
self._visa.write('REMOTE')
def do_get_magnetout(self, channel):
self._select_channel(channel)
ans = self._visa.ask('IMAG?')
return self._check_ans_unit(ans, channel)
def do_set_magnetout(self, val, channel):
self._select_channel(channel)
ans = self._visa.write('IMAG %f\n' % val)
return True
def do_get_supplyout(self, channel):
self._select_channel(channel)
ans = self._visa.ask('IOUT?')
return self._check_ans_unit(ans, channel)
def do_get_sweep(self, channel):
self._select_channel(channel)
ans = self._visa.ask('SWEEP?')
return ans
def do_set_sweep(self, val, channel):
self._select_channel(channel)
val = val.upper()
if val not in ['UP', 'UP FAST', 'DOWN', 'DOWN FAST', 'PAUSE', 'ZERO']:
logging.warning('Invalid sweep mode selected')
return False
self._visa.write('SWEEP %s' % val)
def sweep_up(self, channel, fast=False):
cmd = 'UP'
if fast:
cmd += ' FAST'
return self.set('sweep%s' % channel, cmd)
def sweep_down(self, channel, fast=False):
cmd = 'DOWN'
if fast:
cmd += ' FAST'
return self.set('sweep%s' % channel, cmd)
def do_get_lowlim(self, channel):
self._select_channel(channel)
ans = self._visa.ask('LLIM?')
return self._check_ans_unit(ans, channel)
def do_set_lowlim(self, val, channel):
self._select_channel(channel)
self._visa.write('LLIM %f\n' % val)
def do_get_uplim(self, channel):
self._select_channel(channel)
ans = self._visa.ask('ULIM?')
return self._check_ans_unit(ans, channel)
def do_set_uplim(self, val, channel):
self._select_channel(channel)
self._visa.write('ULIM %f\n' % val)
def do_set_field(self, val, channel, wait=False):
self._select_channel(channel)
units = self.get('units%s' % channel, query=False)
if units != 'G':
logging.warning('Unable to set field when units not in Gauss!')
return False
if not self.get('heater%s' % channel, query=False):
logging.warning('Unable to sweep field when heater off')
return False
cur_magnet = self.get('magnetout%s' % channel)
cur_supply = self.get('supplyout%s' % channel)
if math.fabs(cur_magnet - cur_supply) > self.MARGIN:
logging.warning('Unable to set field when magnet (%f) and supply (%f) not equal!', cur_magnet, cur_supply)
return False
if val > cur_magnet:
self.set('uplim%s' % channel, val)
self.sweep_up(channel)
else:
self.set('lowlim%s' % channel, val)
self.sweep_down(channel)
if wait:
while math.fabs(val - self.get('magnetout%s' % channel)) > self.MARGIN:
time.sleep(0.050)
return True
def do_get_field(self, channel):
self._select_channel(channel)
unit = self.get('units%s' % channel, query=False)
if unit != 'G':
logging.warning('Unable to determine field if units are not Gauss')
return None
magnet_field = self.get('magnetout%s' % channel)
return magnet_field
def pause(self):
for ax in self._axes.values():
self.set('sweep%s' % ax, 'PAUSE')
def zero(self):
for ax in self._axes.values():
self.set('sweep%s' % ax, 'ZERO')
|
#Language Coverage
languageDict = {
'Afrikaans':['Egrave','egrave','Eacute','eacute','Ecircumflex','ecircumflex','Edieresis','edieresis','Icircumflex','icircumflex','Idieresis','idieresis','napostrophe','Ocircumflex','ocircumflex','Ucircumflex','ucircumflex'],
'Albanian':['Ccedilla','ccedilla','Edieresis','edieresis'],
'Basque':['Ntilde','ntilde','Udieresis','udieresis'],
'Belarusian':['Cacute','cacute','Ccaron','ccaron','Lslash','lslash','Nacute','nacute','Sacute','sacute','Scaron','scaron','Ubreve','ubreve','Zacute','zacute','Zcaron','zcaron'],
'Bosnian':['Cacute','cacute','Ccaron','ccaron','Dcroat','dcroat','Scaron','scaron','Zcaron','zcaron'],
'Catalan':['Agrave','agrave','Ccedilla','ccedilla','Egrave','egrave','Eacute','eacute','Iacute','iacute','Idieresis','idieresis','Ldot','ldot','Ntilde','ntilde','Ograve','ograve','Oacute','oacute','Uacute','uacute','Udieresis','udieresis'],
'Croatian':['Cacute','cacute','Ccaron','ccaron','Dcroat','dcroat','Scaron','scaron','Zcaron','zcaron'],
'Czech':['Aacute','aacute','Ccaron','ccaron','Dcaron','dcaron','Eacute','eacute','Ecaron','ecaron','Iacute','iacute','Ncaron','ncaron','Oacute','oacute','Rcaron','rcaron','Scaron','scaron','Tcaron','tcaron','Uacute','uacute','Uring','uring','Yacute','yacute','Zcaron','zcaron'],
'Danish':['Aacute','aacute','Aring','aring','Aringacute','aringacute','AE','ae','AEacute','aeacute','Eacute','eacute','Iacute','iacute','Oacute','oacute','Oslash','oslash','Oslashacute','oslashacute','Uacute','uacute','Yacute','yacute'],
'Dutch':['Aacute','aacute','Acircumflex','acircumflex','Adieresis','adieresis','Egrave','egrave','Eacute','eacute','Ecircumflex','ecircumflex','Edieresis','edieresis','Iacute','iacute','Idieresis','idieresis','Oacute','oacute','Ocircumflex','ocircumflex','Odieresis','odieresis','Uacute','uacute','Ucircumflex','ucircumflex','Udieresis','udieresis','IJ','ij','IJacute','ijacute'],
'English':['AE','ae','Ccedilla','ccedilla','Idieresis','idieresis','Ocircumflex','ocircumflex','OE','oe','Aacute','aacute','Egrave','egrave','Eacute','eacute','Edieresis','edieresis','Ecircumflex','ecircumflex','Ntilde','ntilde','Odieresis','odieresis'],
'Esperanto':['Ccircumflex','ccircumflex','Gcircumflex','gcircumflex','Hcircumflex','hcircumflex','Jcircumflex','jcircumflex','Scircumflex','scircumflex','Ubreve','ubreve'],
'Estonian':['Adieresis','adieresis','Otilde','otilde','Odieresis','odieresis','Scaron','scaron','Udieresis','udieresis','Zcaron','zcaron'],
'Filipino':['Ntilde','ntilde'],
'Finnish':['Adieresis','adieresis','Aring','aring','AE','ae','Odieresis','odieresis','Otilde','otilde','Oslash','oslash','Scaron','scaron','Udieresis','udieresis','Zcaron','zcaron'],
'French':['Agrave','agrave','Acircumflex','acircumflex','Ccedilla','ccedilla','Egrave','egrave','Eacute','eacute','Ecircumflex','ecircumflex','Edieresis','edieresis','Icircumflex','icircumflex','Idieresis','idieresis','Ocircumflex','ocircumflex','OE','oe','Ugrave','ugrave','Ucircumflex','ucircumflex','Udieresis','udieresis','Ydieresis','ydieresis','AE','ae'],
'Galician':['Aacute','aacute','Eacute','eacute','Iacute','iacute','Ntilde','ntilde','Oacute','oacute','Uacute','uacute','Udieresis','udieresis'],
'German':['Adieresis','adieresis','Odieresis','odieresis','uni1E9E','germandbls','Udieresis','udieresis','Agrave','agrave','Eacute','eacute'],
'Hawaiian':['Amacron','amacron','Emacron','emacron','Imacron','imacron','Omacron','omacron','Umacron','umacron'],
'Hungarian':['Aacute','aacute','Eacute','eacute','Iacute','iacute','Oacute','oacute','Odieresis','odieresis','Ohungarumlaut','ohungarumlaut','Uacute','uacute','Udieresis','udieresis','Uhungarumlaut','uhungarumlaut'],
'Icelandic':['Aacute','aacute','AE','ae','Eth','eth','Eacute','eacute','Iacute','iacute','Oacute','oacute','Odieresis','odieresis','Thorn','thorn','Uacute','uacute','Yacute','yacute','AEacute','aeacute','Oslashacute','oslashacute'],
'Indonesian':['Eacute','eacute'],
'Irish':['Agrave','agrave','Aacute','aacute','Egrave','egrave','Eacute','eacute','Igrave','igrave','Iacute','iacute','Ograve','ograve','Oacute','oacute','Uacute','uacute','Ugrave','ugrave'],
'Italian':['Agrave','agrave','Egrave','egrave','Eacute','eacute','Igrave','igrave','Ograve','ograve','Oacute','oacute','Ugrave','ugrave','Aacute','aacute','Iacute','iacute','Icircumflex','icircumflex','Idieresis','idieresis','Uacute','uacute'],
'Kurdish':['Amacron','amacron','Abreve','abreve','Ccedilla','ccedilla','Eacute','eacute','Ecircumflex','ecircumflex','Emacron','emacron','Ebreve','ebreve','Iacute','iacute','Icircumflex','icircumflex','Imacron','imacron','Ibreve','ibreve','Omacron','omacron','Obreve','obreve','Scommaaccent','scommaaccent','Uacute','uacute','Ucircumflex','ucircumflex','Umacron','umacron','Ubreve','ubreve'],
'Latvian':['Amacron','amacron','Ccaron','ccaron','Emacron','emacron','Gcommaaccent','gcommaaccent','Imacron','imacron','Kcommaaccent','kcommaaccent','Lcommaaccent','lcommaaccent','Ncommaaccent','ncommaaccent','Scaron','scaron','Umacron','umacron','Zcaron','zcaron','Omacron','omacron','Rcommaaccent','rcommaaccent'],
'Lithuanian':['Aogonek','aogonek','Ccaron','ccaron','Eogonek','eogonek','Edotaccent','edotaccent','Iogonek','iogonek','Scaron','scaron','Uogonek','uogonek','Umacron','umacron','Zcaron','zcaron' ],
'Maltese':['Agrave','agrave','Aacute','aacute','Acircumflex','acircumflex','Cdotaccent','cdotaccent','Egrave','egrave','Eacute','eacute','Ecircumflex','ecircumflex','Gdotaccent','gdotaccent','Hbar','hbar','Igrave','igrave','Iacute','iacute','Icircumflex','icircumflex','Ograve','ograve','Oacute','oacute','Ocircumflex','ocircumflex','Ugrave','ugrave','Uacute','uacute','Ucircumflex','ucircumflex','Zdotaccent','zdotaccent'],
'Nahuatl':['Amacron','amacron','Emacron','emacron','Imacron','imacron','Omacron','omacron','Umacron','umacron','Udieresis','udieresis','Ymacron','ymacron'],
'Norwegian':['AE','ae','Oslash','oslash','Aring','aring','Agrave','agrave','Eacute','eacute','Ecircumflex','ecircumflex','Oacute','oacute','Ograve','ograve','Ocircumflex','ocircumflex','Adieresis','adieresis','Odieresis','odieresis','Udieresis','udieresis'],
'Polish': ['Aogonek','aogonek','Cacute','cacute','Eogonek','eogonek','Lslash','lslash','Nacute','nacute','Oacute','oacute','Sacute','sacute','Zacute','zacute','Zdotaccent','zdotaccent'],
'Portuguese':['Agrave','agrave','Aacute','aacute','Acircumflex','acircumflex','Atilde','atilde','Ccedilla','ccedilla','Eacute','eacute','Ecircumflex','ecircumflex','Iacute','iacute','Oacute','oacute','Ocircumflex','ocircumflex','Otilde','otilde','Uacute','uacute','Udieresis','udieresis','Egrave','egrave','Ograve','ograve'],
'Romanian':['Acircumflex','acircumflex','Abreve','abreve','Icircumflex','icircumflex','Scommaaccent','scommaaccent','uni021A','uni021B'],
'Serbian':['Cacute','cacute','Ccaron','ccaron','Dcroat','dcroat','Scaron','scaron','Zcaron','zcaron'],
'Slovak':['Aacute','aacute','Adieresis','adieresis','Ccaron','ccaron','Dcaron','dcaron','Eacute','eacute','Iacute','iacute','Lacute','lacute','Lcaron','lcaron','Ncaron','ncaron','Oacute','oacute','Ocircumflex','ocircumflex','Racute','racute','Scaron','scaron','Tcaron','tcaron','Uacute','uacute','Yacute','yacute','Zcaron','zcaron'],
'Slovenian':['Ccaron','ccaron','Scaron','scaron','Zcaron','zcaron','Cacute','cacute','Dcroat','dcroat','Adieresis','adieresis','Odieresis','odieresis','Udieresis','udieresis'],
'Spanish':['Aacute','aacute','Eacute','eacute','Iacute','iacute','Ntilde','ntilde','Oacute','oacute','Uacute','uacute','Udieresis','udieresis'],
'Swedish':['Adieresis','adieresis','Aring','aring','Eacute','eacute','Odieresis','odieresis','Aacute','aacute','Agrave','agrave','Edieresis','edieresis','Udieresis','udieresis'],
'Tahitian':['Amacron','amacron','Emacron','emacron','Imacron','imacron','Omacron','omacron','Umacron','umacron'],
'Turkish':['Acircumflex','acircumflex','Ccedilla','ccedilla','Gbreve','gbreve','Icircumflex','icircumflex','Idotaccent','dotlessi','Odieresis','odieresis','Scommaaccent','scommaaccent','Ucircumflex','ucircumflex','Udieresis','udieresis'],
'Welsh':['Aacute','aacute','Agrave','agrave','Acircumflex','acircumflex','Adieresis','adieresis','Eacute','eacute','Egrave','egrave','Ecircumflex','ecircumflex','Edieresis','edieresis','Iacute','iacute','Igrave','igrave','Icircumflex','icircumflex','Idieresis','idieresis','Oacute','oacute','Ograve','ograve','Ocircumflex','ocircumflex','Odieresis','odieresis','Uacute','uacute','Ugrave','ugrave','Ucircumflex','ucircumflex','Udieresis','udieresis','Yacute','yacute','Ygrave','ygrave','Ycircumflex','ycircumflex','Ydieresis','ydieresis','Wacute','wacute','Wgrave','wgrave','Wcircumflex','wcircumflex','Wdieresis','wdieresis']
}
|
from django_object_actions import BaseDjangoObjectActions as ObjectActions
from django.contrib import admin
from django.contrib.admin import ModelAdmin, RelatedOnlyFieldListFilter
from core.admin.utils import (
get_change_view_link,
get_changelist_view_link,
get_html_preview,
get_image_preview
)
from ..models import (
SemaBaseVehicle,
SemaBrand,
SemaCategory,
SemaDataset,
SemaDescriptionPiesAttribute,
SemaDigitalAssetsPiesAttribute,
SemaEngine,
SemaMake,
SemaMakeYear,
SemaModel,
SemaProduct,
SemaSubmodel,
SemaVehicle,
SemaYear
)
from .actions import (
SemaBaseVehicleActions,
SemaBrandActions,
SemaCategoryActions,
SemaDatasetActions,
SemaDescriptionPiesAttributeActions,
SemaDigitalAssetsPiesAttributeActions,
SemaEngineActions,
SemaMakeActions,
SemaMakeYearActions,
SemaModelActions,
SemaProductActions,
SemaSubmodelActions,
SemaVehicleActions,
SemaYearActions
)
from .filters import (
SemaYearByDecade,
ByCategoryLevel,
HasCategory,
HasCategoryPath,
HasHtml,
HasItem,
HasPremierManufacturer,
HasPremierProduct,
HasPrimaryImage,
HasShopifyCollection,
HasShopifyProduct,
HasShopifyVendor,
HasVehicle,
HasVendor,
SemaBaseVehicleByDecade,
SemaBaseVehicleMayBeRelevant,
SemaCategoryMayBeRelevant,
SemaBrandMayBeRelevant,
SemaDescriptionPiesAttributeMayBeRelevant,
SemaDigitalAssetsPiesAttributeMayBeRelevant,
SemaEngineByDecade,
SemaEngineMayBeRelevant,
SemaMakeYearByDecade,
SemaMakeYearMayBeRelevant,
SemaModelMayBeRelevant,
SemaProductMayBeRelevant,
SemaSubmodelMayBeRelevant,
SemaYearMayBeRelevant,
SemaVehicleByDecade,
SemaVehicleMayBeRelevant
)
from .inlines import (
SemaBaseVehicleVehiclesTabularInline,
SemaBrandDatasetsTabularInline,
SemaCategoryDatasetsTabularInline,
SemaCategoryChildCategoriesTabularInline,
SemaCategoryParentCategoriesTabularInline,
SemaCategoryProductsTabularInline,
SemaDatasetCategoriesTabularInline,
# SemaDatasetProductsTabularInline,
SemaDatasetVehiclesTabularInline,
SemaMakeMakeYearsTabularInline,
SemaMakeYearBaseVehiclesTabularInline,
SemaModelBaseVehiclesTabularInline,
SemaProductCategoriesTabularInline,
SemaProductDescriptionPiesAttributeTabularInline,
SemaProductDigitalAssetsPiesAttributeTabularInline,
SemaProductVehiclesTabularInline,
SemaSubmodelVehiclesTabularInline,
SemaVehicleDatasetsTabularInline,
# SemaVehicleEnginesTabularInline,
# SemaVehicleProductsTabularInline,
SemaYearMakeYearsTabularInline
)
@admin.register(SemaBrand)
class SemaBrandModelAdmin(ObjectActions, ModelAdmin, SemaBrandActions):
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
search_fields = (
'brand_id',
'name'
)
list_display = (
'detail_link',
'brand_id',
'name',
'dataset_count',
'primary_image_preview',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaBrandMayBeRelevant,
HasVendor,
HasPremierManufacturer,
HasShopifyVendor,
HasPrimaryImage
)
fieldsets = (
(
None, {
'fields': (
'vendor_link',
'premier_manufacturer_link',
'shopify_vendor_link',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Brand', {
'fields': (
'brand_id',
'name'
)
}
),
(
'Images', {
'fields': (
('primary_image_url', 'primary_image_preview'),
)
}
)
)
inlines = (
SemaBrandDatasetsTabularInline,
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'primary_image_preview',
'vendor_link',
'premier_manufacturer_link',
'shopify_vendor_link',
'dataset_count'
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def vendor_link(self, obj):
if not obj or not obj.pk or not hasattr(obj, 'vendor'):
return None
return get_change_view_link(obj.vendor, 'See Vendor')
vendor_link.short_description = ''
def premier_manufacturer_link(self, obj):
if (not obj or not obj.pk or not hasattr(obj, 'vendor')
or not obj.vendor.premier_manufacturer):
return None
return get_change_view_link(
obj.vendor.premier_manufacturer,
'See Premier Manufacturer',
)
premier_manufacturer_link.short_description = ''
def shopify_vendor_link(self, obj):
if (not obj or not obj.pk or not hasattr(obj, 'vendor')
or not obj.vendor.shopify_vendor):
return None
return get_change_view_link(
obj.vendor.shopify_vendor,
'See Shopify Vendor',
)
shopify_vendor_link.short_description = ''
def dataset_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._dataset_relevant_count}/{obj._dataset_count}'
dataset_count.admin_order_field = '_dataset_relevant_count'
dataset_count.short_description = 'dataset count'
def primary_image_preview(self, obj):
if not obj or not obj.pk or not obj.primary_image_url:
return None
return get_image_preview(obj.primary_image_url)
primary_image_preview.short_description = ''
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'brand_id',
)
return readonly_fields
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'brand_id',
'name'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaDataset)
class SemaDatasetModelAdmin(ObjectActions, ModelAdmin, SemaDatasetActions):
list_select_related = (
'brand',
)
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action',
'update_dataset_categories_queryset_action', # TO NOTE: too long
'update_dataset_vehicles_queryset_action' # TO NOTE: too long
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
change_actions = (
'update_dataset_categories_object_action',
'update_dataset_vehicles_object_action' # TO NOTE: too long
)
search_fields = (
'dataset_id',
'name',
'brand__brand_id',
'brand__name'
)
list_display = (
'detail_link',
'dataset_id',
'name',
'brand',
'category_count',
'vehicle_count',
'product_count',
'is_authorized',
'is_relevant',
'relevancy_warnings',
'relevancy_errors'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
)
list_filter = (
'is_authorized',
'is_relevant',
'brand'
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'is_relevant',
'relevancy_warnings',
'relevancy_errors'
)
}
),
(
'Brand', {
'fields': (
'brand_link',
'brand'
),
'classes': (
'collapse',
)
}
),
(
'Dataset', {
'fields': (
'dataset_id',
'name'
)
}
)
)
autocomplete_fields = (
'brand',
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'detail_link',
'brand_link',
'category_count',
'vehicle_count',
'product_count'
)
inlines = (
SemaDatasetCategoriesTabularInline,
SemaDatasetVehiclesTabularInline,
# SemaDatasetProductsTabularInline # TO NOTE: too long
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def brand_link(self, obj):
if not obj or not obj.pk or not obj.brand:
return None
return get_change_view_link(
obj.brand, 'See Full Brand')
brand_link.short_description = ''
def category_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj.category_relevant_count}/{obj.category_count}'
# category_count.admin_order_field = '_category_relevant_count'
category_count.short_description = 'category count'
def vehicle_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj.vehicle_relevant_count}/{obj.vehicle_count}'
# vehicle_count.admin_order_field = '_vehicle_relevant_count'
vehicle_count.short_description = 'vehicle count'
def product_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj.product_relevant_count}/{obj.product_count}'
# product_count.admin_order_field = '_product_relevant_count'
product_count.short_description = 'product count'
# def get_queryset(self, request): # FIXME
# return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'dataset_id',
'brand',
'name'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'dataset_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaYear)
class SemaYearModelAdmin(ObjectActions, ModelAdmin, SemaYearActions):
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
search_fields = (
'year',
)
list_display = (
'detail_link',
'year',
'make_year_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaYearMayBeRelevant,
SemaYearByDecade
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Year', {
'fields': (
'year',
)
}
)
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'make_year_count'
)
inlines = (
SemaYearMakeYearsTabularInline,
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def make_year_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._make_year_relevant_count}/{obj._make_year_count}'
make_year_count.admin_order_field = '_make_year_relevant_count'
make_year_count.short_description = 'make year count'
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'year',
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'year',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaMake)
class SemaMakeModelAdmin(ObjectActions, ModelAdmin, SemaMakeActions):
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
search_fields = (
'make_id',
'name',
)
list_display = (
'detail_link',
'make_id',
'name',
'make_year_count',
'is_authorized',
'is_relevant',
'relevancy_warnings',
'relevancy_errors'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
)
list_filter = (
'is_authorized',
'is_relevant'
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'is_relevant',
'relevancy_warnings',
'relevancy_errors'
)
}
),
(
'Make', {
'fields': (
'make_id',
'name'
)
}
)
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'detail_link',
'make_year_count'
)
inlines = (
SemaMakeMakeYearsTabularInline,
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def make_year_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._make_year_relevant_count}/{obj._make_year_count}'
make_year_count.admin_order_field = '_make_year_relevant_count'
make_year_count.short_description = 'make year count'
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'make_id',
'name'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'make_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaModel)
class SemaModelModelAdmin(ObjectActions, ModelAdmin, SemaModelActions):
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action',
)
search_fields = (
'model_id',
'name'
)
list_display = (
'detail_link',
'model_id',
'name',
'base_vehicle_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaModelMayBeRelevant
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Model', {
'fields': (
'model_id',
'name'
)
}
)
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'base_vehicle_count'
)
inlines = (
SemaModelBaseVehiclesTabularInline,
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def base_vehicle_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._base_vehicle_relevant_count}/{obj._base_vehicle_count}'
base_vehicle_count.admin_order_field = '_base_vehicle_relevant_count'
base_vehicle_count.short_description = 'base vehicle count'
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'model_id',
'name'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'model_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaSubmodel)
class SemaSubmodelModelAdmin(ObjectActions, ModelAdmin, SemaSubmodelActions):
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
search_fields = (
'submodel_id',
'name'
)
list_display = (
'detail_link',
'submodel_id',
'name',
'vehicle_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaSubmodelMayBeRelevant
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Submodel', {
'fields': (
'submodel_id',
'name'
)
}
)
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'vehicle_count'
)
inlines = (
SemaSubmodelVehiclesTabularInline,
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def vehicle_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._vehicle_relevant_count}/{obj._vehicle_count}'
vehicle_count.admin_order_field = '_vehicle_relevant_count'
vehicle_count.short_description = 'vehicle count'
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'submodel_id',
'name'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'submodel_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaMakeYear)
class SemaMakeYearModelAdmin(ObjectActions, ModelAdmin, SemaMakeYearActions):
list_select_related = (
'year',
'make'
)
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
search_fields = (
'id',
'year__year',
'make__make_id',
'make__name'
)
list_display = (
'detail_link',
'id',
'year',
'make',
'base_vehicle_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaMakeYearMayBeRelevant,
SemaMakeYearByDecade,
('make', RelatedOnlyFieldListFilter)
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception',
'id'
)
}
),
(
'Year', {
'fields': (
'year_link',
'year'
),
'classes': (
'collapse',
)
}
),
(
'Make', {
'fields': (
'make_link',
'make'
),
'classes': (
'collapse',
)
}
)
)
readonly_fields = (
'id',
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'year_link',
'make_link',
'base_vehicle_count'
)
autocomplete_fields = (
'year',
'make'
)
inlines = (
SemaMakeYearBaseVehiclesTabularInline,
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def year_link(self, obj):
if not obj or not obj.pk or not obj.year:
return None
return get_change_view_link(obj.year, 'See Full Year')
year_link.short_description = ''
def make_link(self, obj):
if not obj or not obj.pk or not obj.make:
return None
return get_change_view_link(obj.make, 'See Full Make')
make_link.short_description = ''
def base_vehicle_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._base_vehicle_relevant_count}/{obj._base_vehicle_count}'
base_vehicle_count.admin_order_field = '_base_vehicle_relevant_count'
base_vehicle_count.short_description = 'base vehicle count'
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'year',
'make'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaBaseVehicle)
class SemaBaseVehicleModelAdmin(ObjectActions, ModelAdmin,
SemaBaseVehicleActions):
list_select_related = (
'make_year',
'model'
)
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action', # TO NOTE: too long
# 'import_class_action', # TO NOTE: too long
# 'unauthorize_class_action', # TO NOTE: too long
# 'sync_class_action' # TO NOTE: too long
)
search_fields = (
'base_vehicle_id',
'make_year__year__year',
'make_year__make__make_id',
'make_year__make__name',
'model__model_id',
'model__name'
)
list_display = (
'detail_link',
'base_vehicle_id',
'make_year',
'model',
'vehicle_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaBaseVehicleMayBeRelevant,
SemaBaseVehicleByDecade,
('make_year__make', RelatedOnlyFieldListFilter),
('model', RelatedOnlyFieldListFilter)
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Base Vehicle', {
'fields': (
'base_vehicle_id',
)
}
),
(
'Make Year', {
'fields': (
'make_year_link',
'make_year'
),
'classes': (
'collapse',
)
}
),
(
'Model', {
'fields': (
'model_link',
'model'
),
'classes': (
'collapse',
)
}
)
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'make_year_link',
'model_link',
'vehicle_count'
)
autocomplete_fields = (
'make_year',
'model'
)
inlines = (
SemaBaseVehicleVehiclesTabularInline,
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def make_year_link(self, obj):
if not obj or not obj.pk or not obj.make_year:
return None
return get_change_view_link(obj.make_year, 'See Full Make Year')
make_year_link.short_description = ''
def model_link(self, obj):
if not obj or not obj.pk or not obj.model:
return None
return get_change_view_link(obj.model, 'See Full Model')
model_link.short_description = ''
def vehicle_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._vehicle_relevant_count}/{obj._vehicle_count}'
vehicle_count.admin_order_field = '_vehicle_relevant_count'
vehicle_count.short_description = 'vehicle count'
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'base_vehicle_id',
'make_year',
'model'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'base_vehicle_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaVehicle)
class SemaVehicleModelAdmin(ObjectActions, ModelAdmin, SemaVehicleActions):
list_select_related = (
'base_vehicle',
'submodel'
)
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action', # TO NOTE: too long
# 'import_class_action', # TO NOTE: too long
# 'unauthorize_class_action', # TO NOTE: too long
# 'sync_class_action' # TO NOTE: too long
)
search_fields = (
'vehicle_id',
'base_vehicle__base_vehicle_id',
'base_vehicle__make_year__year__year',
'base_vehicle__make_year__make__make_id',
'base_vehicle__make_year__make__name',
'base_vehicle__model__model_id',
'base_vehicle__model__name',
'submodel__submodel_id',
'submodel__name'
)
list_display = (
'detail_link',
'vehicle_id',
'base_vehicle',
'submodel',
'engine_count',
'dataset_count',
'product_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaVehicleMayBeRelevant,
SemaVehicleByDecade,
('base_vehicle__make_year__make', RelatedOnlyFieldListFilter),
('base_vehicle__model', RelatedOnlyFieldListFilter),
('submodel', RelatedOnlyFieldListFilter)
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Vehicle', {
'fields': (
'vehicle_id',
)
}
),
(
'Base Vehicle', {
'fields': (
'base_vehicle_link',
'base_vehicle'
),
'classes': (
'collapse',
)
}
),
(
'Submodel', {
'fields': (
'submodel_link',
'submodel'
),
'classes': (
'collapse',
)
}
)
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'base_vehicle_link',
'submodel_link',
'engine_count',
'dataset_count',
'product_count'
)
autocomplete_fields = (
'base_vehicle',
'submodel'
)
inlines = (
# SemaVehicleEnginesTabularInline, # TO NOTE: too long
SemaVehicleDatasetsTabularInline,
# SemaVehicleProductsTabularInline # TO NOTE: too long
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def base_vehicle_link(self, obj):
if not obj or not obj.pk or not obj.base_vehicle:
return None
return get_change_view_link(
obj.base_vehicle,
'See Full Base Vehicle'
)
base_vehicle_link.short_description = ''
def submodel_link(self, obj):
if not obj or not obj.pk or not obj.submodel:
return None
return get_change_view_link(obj.submodel, 'See Full Submodel')
submodel_link.short_description = ''
def engine_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._engine_relevant_count}/{obj._engine_count}'
engine_count.admin_order_field = '_engine_relevant_count'
engine_count.short_description = 'engine count'
def dataset_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._dataset_relevant_count}/{obj._dataset_count}'
dataset_count.admin_order_field = '_dataset_relevant_count'
dataset_count.short_description = 'dataset count'
def product_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._product_relevant_count}/{obj._product_count}'
product_count.admin_order_field = '_product_relevant_count'
product_count.short_description = 'product count'
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'vehicle_id',
'base_vehicle',
'submodel'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'vehicle_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaEngine)
class SemaEngineModelAdmin(ObjectActions, ModelAdmin, SemaEngineActions):
list_select_related = (
'vehicle',
)
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
changelist_actions = (
'import_new_class_action', # TO NOTE: too long
# 'import_class_action', # TO NOTE: too long
# 'unauthorize_class_action', # TO NOTE: too long
# 'sync_class_action' # TO NOTE: too long
)
search_fields = (
'id',
'vehicle__vehicle_id',
'vehicle__base_vehicle__base_vehicle_id',
'vehicle__base_vehicle__make_year__year__year',
'vehicle__base_vehicle__make_year__make__make_id',
'vehicle__base_vehicle__make_year__make__name',
'vehicle__base_vehicle__model__model_id',
'vehicle__base_vehicle__model__name',
'vehicle__submodel__submodel_id',
'vehicle__submodel__name'
)
list_display = (
'detail_link',
'id',
'vehicle',
'litre',
'block_type',
'cylinders',
'cylinder_head_type',
'fuel_type',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaEngineMayBeRelevant,
'fuel_type',
'manufacturer',
'litre',
'block_type',
'cylinders',
'valves_per_engine',
'cylinder_head_type',
'ignition_system_type',
SemaEngineByDecade,
('vehicle__base_vehicle__make_year__make', RelatedOnlyFieldListFilter),
('vehicle__base_vehicle__model', RelatedOnlyFieldListFilter),
('vehicle__submodel', RelatedOnlyFieldListFilter)
)
fieldsets = (
(
None, {
'fields': (
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception',
'id'
)
}
),
(
'Vehicle', {
'fields': (
'vehicle_link',
'vehicle'
),
'classes': (
'collapse',
)
}
),
(
'Engine', {
'fields': (
'litre',
'cc',
'cid',
'cylinders',
'block_type',
'engine_bore_in',
'engine_bore_metric',
'engine_stroke_in',
'engine_stroke_metric',
'valves_per_engine',
'aspiration',
'cylinder_head_type',
'fuel_type',
'ignition_system_type',
'manufacturer',
'horse_power',
'kilowatt_power',
'engine_designation'
)
}
)
)
readonly_fields = (
'id',
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'vehicle_link'
)
autocomplete_fields = (
'vehicle',
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def vehicle_link(self, obj):
if not obj or not obj.pk or not obj.vehicle:
return None
return get_change_view_link(
obj.vehicle,
'See Full Vehicle'
)
vehicle_link.short_description = ''
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'vehicle',
'litre',
'cc',
'cid',
'cylinders',
'block_type',
'engine_bore_in',
'engine_bore_metric',
'engine_stroke_in',
'engine_stroke_metric',
'valves_per_engine',
'aspiration',
'cylinder_head_type',
'fuel_type',
'ignition_system_type',
'manufacturer',
'horse_power',
'kilowatt_power',
'engine_designation'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaCategory)
class SemaCategoryModelAdmin(ObjectActions, ModelAdmin, SemaCategoryActions):
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action',
'update_category_products_queryset_action'
)
changelist_actions = (
# 'import_new_class_action', # TO NOTE: does not add m2m
'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
change_actions = (
'update_category_products_object_action',
)
search_fields = (
'category_id',
'name'
)
list_display = (
'detail_link',
'category_id',
'name',
'level',
'parent_category_count',
'child_category_count',
'dataset_count',
'product_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaCategoryMayBeRelevant,
ByCategoryLevel,
HasCategoryPath,
HasShopifyCollection
)
fieldsets = (
(
None, {
'fields': (
'category_paths_link',
'shopify_collections_link',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Category', {
'fields': (
'category_id',
'name',
'level'
)
}
)
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'level',
'detail_link',
'category_paths_link',
'shopify_collections_link',
'parent_category_count',
'child_category_count',
'dataset_count',
'product_count'
)
inlines = (
SemaCategoryParentCategoriesTabularInline,
SemaCategoryChildCategoriesTabularInline,
SemaCategoryDatasetsTabularInline,
SemaCategoryProductsTabularInline
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def category_paths_link(self, obj):
if not obj or not obj.pk:
return None
if obj.level == '1':
category_path_model = obj.root_category_paths.first()._meta.model
query = f'sema_root_category={obj.pk}'
elif obj.level == '2':
category_path_model = obj.branch_category_paths.first()._meta.model
query = f'sema_branch_category={obj.pk}'
else:
category_path_model = obj.leaf_category_paths.first()._meta.model
query = f'sema_leaf_category={obj.pk}'
return get_changelist_view_link(
category_path_model,
'See Category Paths',
query=query
)
category_paths_link.short_description = ''
def shopify_collections_link(self, obj):
if not obj or not obj.pk:
return None
if obj.level == '1':
o = obj.root_category_paths.first().shopify_root_collection
elif obj.level == '2':
o = obj.branch_category_paths.first().shopify_branch_collection
else:
o = obj.leaf_category_paths.first().shopify_leaf_collection
return get_change_view_link(
o,
'See Shopify Collection'
)
shopify_collections_link.short_description = ''
def parent_category_count(self, obj):
if not obj or not obj.pk:
return None
return (
f'{obj._parent_category_relevant_count}'
f'/{obj._parent_category_count}'
)
parent_category_count.admin_order_field = '_parent_category_relevant_count'
parent_category_count.short_description = 'parent count'
def child_category_count(self, obj):
if not obj or not obj.pk:
return None
return (
f'{obj._child_category_relevant_count}'
f'/{obj._child_category_count}'
)
child_category_count.admin_order_field = '_child_category_relevant_count'
child_category_count.short_description = 'child count'
def dataset_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._dataset_relevant_count}/{obj._dataset_count}'
dataset_count.admin_order_field = '_dataset_relevant_count'
dataset_count.short_description = 'dataset count'
def product_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj._product_relevant_count}/{obj._product_count}'
product_count.admin_order_field = '_product_relevant_count'
product_count.short_description = 'product count'
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_queryset(self, request):
return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'category_id',
'name',
'parent_categories'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'category_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
@admin.register(SemaProduct)
class SemaProductModelAdmin(ObjectActions, ModelAdmin, SemaProductActions):
list_select_related = (
'dataset',
)
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action',
'update_html_queryset_action',
'update_product_vehicles_queryset_action',
'update_description_pies_queryset_action',
'update_digital_assets_pies_queryset_action'
)
changelist_actions = (
'import_new_class_action',
# 'import_class_action',
# 'unauthorize_class_action',
# 'sync_class_action'
)
change_actions = (
'update_html_object_action',
'update_product_vehicles_object_action',
'update_description_pies_object_action',
'update_digital_assets_pies_object_action'
)
search_fields = (
'product_id',
'part_number',
'dataset__dataset_id',
'dataset__name',
'dataset__brand__brand_id',
'dataset__brand__name'
)
list_display = (
'detail_link',
'product_id',
'part_number',
'dataset',
'description_pies_attribute_count',
'digital_assets_pies_attribute_count',
'category_count',
'vehicle_count',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant',
SemaProductMayBeRelevant,
('dataset__brand', RelatedOnlyFieldListFilter),
HasItem,
HasPremierProduct,
HasShopifyProduct,
HasCategory,
HasVehicle,
HasHtml
)
fieldsets = (
(
None, {
'fields': (
'item_link',
'premier_product_link',
'shopify_product_link',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Dataset', {
'fields': (
'dataset_link',
'dataset'
),
'classes': (
'collapse',
)
}
),
(
'Product', {
'fields': (
'product_id',
'part_number'
)
}
),
(
'HTML', {
'fields': (
'html',
),
'classes': (
'collapse',
)
}
),
(
None, {
'fields': (
'html_preview',
)
}
)
)
autocomplete_fields = (
'dataset',
)
readonly_fields = (
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'html_preview',
'detail_link',
'dataset_link',
'item_link',
'premier_product_link',
'shopify_product_link',
'description_pies_attribute_count',
'digital_assets_pies_attribute_count',
'category_count',
'vehicle_count'
)
inlines = (
SemaProductDescriptionPiesAttributeTabularInline,
SemaProductDigitalAssetsPiesAttributeTabularInline,
SemaProductCategoriesTabularInline,
SemaProductVehiclesTabularInline
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def item_link(self, obj):
if not obj or not obj.pk or not hasattr(obj, 'item'):
return None
return get_change_view_link(obj.item, 'See Item')
item_link.short_description = ''
def premier_product_link(self, obj):
if (not obj or not obj.pk
or not hasattr(obj, 'item') or not obj.item.premier_product):
return None
return get_change_view_link(
obj.item.premier_product,
'See Premier Product',
)
premier_product_link.short_description = ''
def shopify_product_link(self, obj):
if (not obj or not obj.pk
or not hasattr(obj, 'item') or not obj.item.shopify_product):
return None
return get_change_view_link(
obj.item.shopify_product,
'See Shopify Product',
)
shopify_product_link.short_description = ''
def dataset_link(self, obj):
if not obj or not obj.pk or not obj.dataset:
return None
return get_change_view_link(obj.dataset, 'See Full Dataset')
dataset_link.short_description = ''
def description_pies_attribute_count(self, obj):
if not obj or not obj.pk:
return None
return obj.description_pies_attribute_count
# description_pies_attribute_count.admin_order_field = (
# '_description_pies_attribute_count'
# )
description_pies_attribute_count.short_description = 'description count'
def digital_assets_pies_attribute_count(self, obj):
if not obj or not obj.pk:
return None
return obj.digital_assets_pies_attribute_count
# digital_assets_pies_attribute_count.admin_order_field = (
# '_digital_assets_pies_attribute_count'
# )
digital_assets_pies_attribute_count.short_description = (
'digital assets count'
)
def category_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj.category_relevant_count}/{obj.category_count}'
# category_count.admin_order_field = '_category_relevant_count'
category_count.short_description = 'category count'
def vehicle_count(self, obj):
if not obj or not obj.pk:
return None
return f'{obj.vehicle_relevant_count}/{obj.vehicle_count}'
# vehicle_count.admin_order_field = '_vehicle_relevant_count'
vehicle_count.short_description = 'vehicle count'
def html_preview(self, obj):
if not obj or not obj.pk:
return None
return get_html_preview(obj.clean_html)
html_preview.short_description = ''
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
# def get_queryset(self, request): # FIXME
# return super().get_queryset(request).with_admin_data()
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'product_id',
'part_number',
'dataset',
'html',
'categories',
'vehicles'
)
}
),
)
return super().get_fieldsets(request, obj)
def get_readonly_fields(self, request, obj=None):
readonly_fields = super().get_readonly_fields(request, obj)
if obj and not request.user.is_superuser:
readonly_fields += (
'product_id',
)
return readonly_fields
def get_inline_instances(self, request, obj=None):
if not obj:
return []
return super().get_inline_instances(request, obj)
class SemaPiesAttributeBaseModelAdmin(ModelAdmin):
list_select_related = (
'product',
)
actions = (
'mark_as_relevant_queryset_action',
'mark_as_irrelevant_queryset_action'
)
search_fields = (
'id',
'segment',
'value',
'product__product_id',
'product__part_number',
'product__dataset__dataset_id',
'product__dataset__name',
'product__dataset__brand__brand_id',
'product__dataset__brand__name'
)
list_display = (
'detail_link',
'id',
'product',
'segment',
'value',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
list_display_links = (
'detail_link',
)
list_editable = (
'is_relevant',
'relevancy_exception'
)
list_filter = (
'is_authorized',
'is_relevant'
)
fieldsets = (
(
None, {
'fields': (
'id',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Product', {
'fields': (
'product_link',
'product'
),
'classes': (
'collapse',
)
}
),
(
'PIES Attribute', {
'fields': (
'segment',
'value'
)
}
)
)
autocomplete_fields = (
'product',
)
readonly_fields = (
'id',
'relevancy_warnings',
'relevancy_errors',
'may_be_relevant_flag',
'detail_link',
'product_link'
)
def detail_link(self, obj):
if not obj or not obj.pk:
return None
return get_change_view_link(obj, 'Details')
detail_link.short_description = ''
def product_link(self, obj):
if not obj or not obj.pk or not obj.product:
return None
return get_change_view_link(obj.product, 'See Full Product')
product_link.short_description = ''
def may_be_relevant_flag(self, obj):
if not obj or not obj.pk:
return None
if obj.is_relevant != obj.may_be_relevant:
return '~'
else:
return ''
may_be_relevant_flag.short_description = ''
def get_fieldsets(self, request, obj=None):
if not obj:
return (
(
None, {
'fields': (
'segment',
'value'
)
}
),
)
return super().get_fieldsets(request, obj)
@admin.register(SemaDescriptionPiesAttribute)
class SemaDescriptionPiesAttributeModelAdmin(ObjectActions,
SemaPiesAttributeBaseModelAdmin,
SemaDescriptionPiesAttributeActions):
def get_list_filter(self, request):
return super().get_list_filter(request) + (
SemaDescriptionPiesAttributeMayBeRelevant,
)
@admin.register(SemaDigitalAssetsPiesAttribute)
class SemaSemaDigitalAssetsPiesAttributeModelAdmin(ObjectActions,
SemaPiesAttributeBaseModelAdmin,
SemaDigitalAssetsPiesAttributeActions):
fieldsets = (
(
None, {
'fields': (
'id',
'is_authorized',
'may_be_relevant_flag',
'is_relevant',
'relevancy_warnings',
'relevancy_errors',
'relevancy_exception'
)
}
),
(
'Product', {
'fields': (
'product_link',
'product'
),
'classes': (
'collapse',
)
}
),
(
'PIES Attribute', {
'fields': (
'segment',
('value', 'image_preview')
)
}
)
)
def image_preview(self, obj):
if not obj or not obj.pk or not obj.value:
return None
try:
return get_image_preview(obj.value)
except Exception as err:
return str(err)
image_preview.short_description = ''
def get_actions(self, request):
self.actions = self.actions + (
'update_relevancy_queryset_action',
)
return super().get_actions(request)
def get_list_display(self, request):
return super().get_list_display(request) + (
'image_preview',
)
def get_list_filter(self, request):
return super().get_list_filter(request) + (
SemaDigitalAssetsPiesAttributeMayBeRelevant,
)
def get_change_actions(self, request, object_id, form_url):
self.change_actions = self.change_actions + (
'update_relevancy_object_action',
)
return super().get_change_actions(request, object_id, form_url)
def get_readonly_fields(self, request, obj=None):
return super().get_readonly_fields(request, obj) + (
'image_preview',
)
|
"""Create the Flask app"""
from flask import Flask, Request, abort, jsonify, make_response
from denseedia.config import CONFIG
from denseedia.routes import register_routes
app = Flask(__name__)
@app.errorhandler(404)
def on_route_not_found(error):
"""Override the HTML 404 default."""
content = {"msg": "Page not found"}
return jsonify(content), 404
@app.errorhandler(405)
def on_method_not_allowed(error):
"""Override the HTML 405 default."""
content = {"msg": "Method not allowed"}
return jsonify(content), 405
def on_json_loading_failed(req, error):
"""Abort with a custom JSON message."""
content = {"msg": "JSON not valid", "error": error.args[0]}
abort(make_response(jsonify(content), 400))
Request.on_json_loading_failed = on_json_loading_failed
@app.after_request
def fix_cors(response):
"""Allow the React App to access the API."""
response.headers["Access-Control-Allow-Origin"] = "http://localhost:59131"
return response
register_routes(app)
def run_server():
flask_config = CONFIG["flask"]
app.run(debug=flask_config["debug"], port=flask_config["port"])
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2020-12-22 13:16
from hanlp_common.constant import HANLP_URL
OPEN_TOK_POS_NER_SRL_DEP_SDP_CON_ELECTRA_SMALL_ZH = HANLP_URL + 'mtl/open_tok_pos_ner_srl_dep_sdp_con_electra_small_20201223_035557.zip'
"Electra (:cite:`clark2020electra`) small version of joint tok, pos, ner, srl, dep, sdp and con model trained on open-source Chinese corpus."
OPEN_TOK_POS_NER_SRL_DEP_SDP_CON_ELECTRA_BASE_ZH = HANLP_URL + 'mtl/open_tok_pos_ner_srl_dep_sdp_con_electra_base_20201223_201906.zip'
"Electra (:cite:`clark2020electra`) base version of joint tok, pos, ner, srl, dep, sdp and con model trained on open-source Chinese corpus."
CLOSE_TOK_POS_NER_SRL_DEP_SDP_CON_ELECTRA_SMALL_ZH = HANLP_URL + 'mtl/close_tok_pos_ner_srl_dep_sdp_con_electra_small_20210111_124159.zip'
"Electra (:cite:`clark2020electra`) small version of joint tok, pos, ner, srl, dep, sdp and con model trained on close-source Chinese corpus."
CLOSE_TOK_POS_NER_SRL_DEP_SDP_CON_ELECTRA_BASE_ZH = HANLP_URL + 'mtl/close_tok_pos_ner_srl_dep_sdp_con_electra_base_20210111_124519.zip'
"Electra (:cite:`clark2020electra`) base version of joint tok, pos, ner, srl, dep, sdp and con model trained on close-source Chinese corpus."
CLOSE_TOK_POS_NER_SRL_DEP_SDP_CON_ERNIE_GRAM_ZH = HANLP_URL + 'mtl/close_tok_pos_ner_srl_dep_sdp_con_ernie_gram_base_aug_20210904_145403.zip'
"ERNIE (:cite:`xiao-etal-2021-ernie`) base version of joint tok, pos, ner, srl, dep, sdp and con model trained on close-source Chinese corpus."
UD_ONTONOTES_TOK_POS_LEM_FEA_NER_SRL_DEP_SDP_CON_MT5_SMALL = HANLP_URL + 'mtl/ud_ontonotes_tok_pos_lem_fea_ner_srl_dep_sdp_con_mt5_small_20210228_123458.zip'
'mT5 (:cite:`xue-etal-2021-mt5`) small version of joint tok, pos, lem, fea, ner, srl, dep, sdp and con model trained on UD and OntoNotes5 corpus.'
UD_ONTONOTES_TOK_POS_LEM_FEA_NER_SRL_DEP_SDP_CON_XLMR_BASE = HANLP_URL + 'mtl/ud_ontonotes_tok_pos_lem_fea_ner_srl_dep_sdp_con_xlm_base_20220608_003435.zip'
'''
XLM-R (:cite:`conneau-etal-2020-unsupervised`) base version of joint tok, pos, lem, fea, ner, srl, dep, sdp and con model trained on UD 2.10 and OntoNotes5 corpus.
The following 130 languages are supported: ``Afrikaans, Akkadian, Akuntsu, Albanian, Amharic, AncientGreek (to 1453), Ancient Hebrew, Apurinã, Arabic, Armenian, AssyrianNeo-Aramaic, Bambara, Basque, Beja, Belarusian, Bengali, Bhojpuri, Breton, Bulgarian, Catalan, Cebuano, Central Siberian Yupik, Chinese, Chukot, ChurchSlavic, Coptic, Croatian, Czech, Danish, Dutch, Emerillon, English, Erzya, Estonian, Faroese, Finnish, French, Galician, German, Gothic, Guajajára, Guarani, Hebrew, Hindi, Hittite, Hungarian, Icelandic, Indonesian, Irish, Italian, Japanese, Javanese, K\'iche\', Kangri, Karelian, Karo(Brazil), Kazakh, Khunsari, Komi-Permyak, Komi-Zyrian, Korean, Latin, Latvian, Ligurian, LiteraryChinese, Lithuanian, Livvi, LowGerman, Madi, Makuráp, Maltese, Manx, Marathi, MbyáGuaraní, Modern Greek (1453-), Moksha, Mundurukú, Nayini, Neapolitan, Nigerian Pidgin, NorthernKurdish, Northern Sami, Norwegian, OldFrench (842-ca. 1400), OldRussian, Old Turkish, Persian, Polish, Portuguese, Romanian, Russia Buriat, Russian, Sanskrit, ScottishGaelic, Serbian, SkoltSami, Slovak, Slovenian, Soi, South Levantine Arabic, Spanish, Swedish, SwedishSign Language, SwissGerman, Tagalog, Tamil, Tatar, Telugu, Thai, Tupinambá, Turkish, Uighur, Ukrainian, Umbrian, UpperSorbian, Urdu, Urubú-Kaapor, Vietnamese, Warlpiri, Welsh, Western Armenian, WesternFrisian, Wolof, Xibe, Yakut, Yoruba, YueChinese``.
Performance: ``{con UCM: 20.31% LCM: 16.82% UP: 77.50% UR: 76.63% UF: 77.06% LP: 71.25% LR: 70.46% LF: 70.85%}{ner P: 79.93% R: 80.76% F1: 80.34%}{sdp/dm UF: 93.71% LF: 93.00%}{sdp/pas UF: 97.63% LF: 96.37%}{sdp/psd UF: 93.08% LF: 80.95%}{srl [predicate P: 90.95% R: 84.25% F1: 87.47%][e2e P: 78.89% R: 67.32% F1: 72.65%]}{tok P: 98.50% R: 98.70% F1: 98.60%}{ud [lemmas Accuracy:85.95%][upos Accuracy:89.95%][deps UAS: 85.78% LAS: 78.51%][feats Accuracy:82.18%]}``.
'''
NPCMJ_UD_KYOTO_TOK_POS_CON_BERT_BASE_CHAR_JA = HANLP_URL + 'mtl/npcmj_ud_kyoto_tok_pos_ner_dep_con_srl_bert_base_char_ja_20210914_133742.zip'
'BERT (:cite:`devlin-etal-2019-bert`) base char encoder trained on NPCMJ/UD/Kyoto corpora with decoders including tok, pos, ner, dep, con, srl.'
# Will be filled up during runtime
ALL = {}
|
"""Common code for the omics ingest."""
import base64
from contextlib import contextmanager
import datetime
import os
import os.path
import pathlib
import subprocess # nosec
import tempfile
import typing
import dateutil.parser
from irods_capability_automated_ingest.sync_irods import irods_session
from irods.meta import iRODSMeta
from .settings import (
RODEOS_HASHDEEP_ALGO as HASHDEEP_ALGO,
RODEOS_HASHDEEP_THREADS as HASHDEEP_THREADS,
RODEOS_MANIFEST_LOCAL as MANIFEST_LOCAL,
RODEOS_MANIFEST_IRODS as MANIFEST_IRODS,
RODEOS_MOVE_AFTER_INGEST as _MOVE_AFTER_INGEST,
)
#: AVU key to use for ``last_update`` attribute.
KEY_LAST_UPDATE = "rodeos::ingest::last_update"
#: AVU key to use destionation run folder ingestion status.
KEY_STATUS = "rodeos::ingest::status"
#: AVU key to use for ``first_seen`` attribute.
KEY_FIRST_SEEN = "rodeos::ingest::first_seen"
#: AVU key for manifest status
KEY_MANIFEST_STATUS = "rodeos::ingest::manifest_status"
#: AVU key with manifest detailed message
KEY_MANIFEST_MESSAGE = "rodeos::ingest::manifest_message"
MOVE_AFTER_INGEST = _MOVE_AFTER_INGEST
@contextmanager
def cleanuping(thing):
try:
yield thing
finally:
thing.cleanup()
def to_ingested_path(orig_path: typing.Union[str, pathlib.Path]) -> pathlib.Path:
"""Convert a run folder path to an "ingested" path."""
orig_path = pathlib.Path(orig_path)
ingested_base = orig_path.parent.parent / (orig_path.parent.name + "-INGESTED")
return ingested_base / orig_path.name
def _compare_manifests(path_local, path_irods, logger):
"""Compare manifests at paths ``path_local`` and ``path_irods``."""
# Load file sizes and checksums.
info_local = {}
with open(path_local, "rt") as inputf:
for line in inputf:
if line.startswith("#") or line.startswith("%"):
continue
line = line.strip()
size, chksum, path = line.split(",", 2)
info_local[path] = (size, chksum)
info_irods = {}
with open(path_irods, "rt") as inputf:
for line in inputf:
line = line.strip()
size, chksum, path = line.split(",", 2)
if chksum.startswith("sha2:"):
chksum = base64.b64decode(chksum[5:]).hex()
info_irods[path] = (size, chksum)
# Compare file sizes and checksums.
problem = None
for path in sorted(info_local.keys() & info_irods.keys()):
size_local, chksum_local = info_local[path]
size_irods, chksum_irods = info_irods[path]
if size_local != size_irods:
problem = "file size mismatch %s vs %s for %s" % (size_local, size_irods, path)
logger.error(
"file size does not match %s vs %s for %s" % (size_local, size_irods, path)
)
if chksum_local != chksum_irods:
problem = "file checksum mismatch %s vs %s for %s" % (chksum_local, chksum_irods, path)
logger.error(
"file checksum does not match %s vs %s for %s" % (chksum_local, chksum_irods, path)
)
# Find extra items on either side.
extra_local = info_local.keys() - info_irods.keys()
if sorted(extra_local):
problem = "extra file in local: %s" % list(sorted(extra_local))[0]
logger.error(
"%d items locally that are not in irods, up to 10 shown:\n %s"
% (len(extra_local), " \n".join(list(sorted(extra_local))[:10]))
)
extra_irods = info_irods.keys() - info_local.keys()
if sorted(extra_irods):
problem = "extra file in irods : %s" % list(sorted(extra_irods))[0]
logger.error(
"%d items in irods that are not present locally, up to 10 shown:\n %s"
% (len(extra_irods), " \n".join(list(sorted(extra_irods))[:10]))
)
if problem:
raise RuntimeError("Difference in manifests: %s" % problem)
def _post_job_run_folder_done(
logger,
session,
src_folder,
dst_collection,
is_folder_done: typing.Callable[[typing.Union[pathlib.Path, str]], bool],
delay_until_at_rest,
):
"""Handle run folder being done:
- Move into ingested folder on source.
- Update status meta data in destination collection.
"""
src_folder = pathlib.Path(src_folder)
# Get "last updated" time from meta data.
last_update = None
for meta in dst_collection.metadata.get_all(KEY_LAST_UPDATE):
value = dateutil.parser.parse(meta.value)
if last_update is None or value > last_update:
last_update = value
now = datetime.datetime.now()
last_update_age = now - (last_update or now)
# Do not proceed if not marked as done.
if not is_folder_done(src_folder): # pragma: no cover
logger.info("folder %s is not marked as done" % src_folder)
return
# Compute and check manifest and move if data is considered at rest.
if last_update_age >= delay_until_at_rest:
logger.info(
"age of last update of %s is %s (<%s) -- will finalize (manifest+move)"
% (dst_collection.path, last_update_age, delay_until_at_rest)
)
run_ichksum(dst_collection.path, recurse=True)
local_path = compute_local_manifest(logger, src_folder)
irods_path = compute_irods_manifest(dst_collection, logger, src_folder)
# Compare the manifest files.
try:
_compare_manifests(local_path, irods_path, logger)
except RuntimeError as e: # pragma: no cover
dst_collection.metadata[KEY_MANIFEST_STATUS] = iRODSMeta(
KEY_MANIFEST_STATUS, "failed", ""
)
dst_collection.metadata[KEY_MANIFEST_MESSAGE] = iRODSMeta(
KEY_MANIFEST_MESSAGE, str(e), ""
)
raise
else:
dst_collection.metadata[KEY_MANIFEST_STATUS] = iRODSMeta(
KEY_MANIFEST_STATUS, "success", ""
)
dst_collection.metadata[KEY_MANIFEST_MESSAGE] = iRODSMeta(
KEY_MANIFEST_MESSAGE, "all good", ""
)
# Put local hashdeep manifest.
local_manifest_dest = os.path.join(dst_collection.path, MANIFEST_LOCAL)
session.data_objects.put(local_path, local_manifest_dest)
run_ichksum(local_manifest_dest)
# Put manifest built from irods.
irods_manifest_dest = os.path.join(dst_collection.path, MANIFEST_IRODS)
session.data_objects.put(irods_path, irods_manifest_dest)
run_ichksum(irods_manifest_dest)
# Move folder.
if MOVE_AFTER_INGEST:
new_src_folder = to_ingested_path(src_folder)
logger.info("attempting move %s => %s" % (src_folder, new_src_folder))
try:
new_src_folder.parent.mkdir(exist_ok=True)
src_folder.rename(new_src_folder)
except OSError as e: # pragma: no cover
logger.error("could not move to ingested: %s" % e)
else:
logger.info("configured to not move %s" % src_folder)
# Update ``status`` meta data.
dst_collection.metadata[KEY_STATUS] = iRODSMeta(KEY_STATUS, "complete", "")
else:
logger.info(
"age of last update of %s is %s (<%s) -- not moving to ingested"
% (dst_collection.path, last_update_age, delay_until_at_rest)
)
def compute_irods_manifest(dst_collection, logger, src_folder):
"""Compute manifest from irods checksums."""
logger.info("pull irods checksums into manifest")
irods_path = os.path.join(src_folder, MANIFEST_IRODS)
try:
with tempfile.TemporaryFile("w+t") as tmp_f:
# Obtain information for files directly in destination collection.
cmd = [
"iquest",
"%d,%s,%s/%s",
(
"SELECT DATA_SIZE, DATA_CHECKSUM, COLL_NAME, DATA_NAME "
"WHERE COLL_NAME = '%s' AND DATA_NAME != '%s' AND DATA_NAME != '%s'"
)
% (dst_collection.path, MANIFEST_LOCAL, MANIFEST_IRODS),
]
subprocess.run(cmd, stdout=tmp_f, encoding="utf-8", check=True) # nosec
# Obtain information for files destination subcollections.
cmd_sub = [
"iquest",
"%d,%s,%s/%s",
(
"SELECT DATA_SIZE, DATA_CHECKSUM, COLL_NAME, DATA_NAME "
"WHERE COLL_NAME like '%s/%%'"
)
% dst_collection.path,
]
subprocess.run(cmd_sub, stdout=tmp_f, encoding="utf-8", check=True) # nosec
# Copy to final output file.
tmp_f.flush()
tmp_f.seek(0)
with open(irods_path, "wt") as chk_f:
for line in tmp_f:
line = line.strip()
if line.startswith("CAT_NO_ROWS_FOUND"):
continue
size, chksum, path = line.split(",", 2)
path = ".%s" % path[len(dst_collection.path) :]
print(",".join([size, chksum, path]), file=chk_f)
except subprocess.CalledProcessError as e: # pragma: no cover
logger.warn("Creation of iRODS manifest failed, aborting: %s" % e)
os.remove(irods_path)
raise
return irods_path
def compute_local_manifest(logger, src_folder):
"""Compute local hashdeep manifest."""
local_path = os.path.join(src_folder, MANIFEST_LOCAL)
logger.info("compute checksums and store to %s" % local_path)
try:
with open(local_path, "wt") as chk_f:
cmd_find = [
"find",
".",
"-type",
"f",
"-and",
"-not",
"-path",
"./%s" % MANIFEST_LOCAL,
"-and",
"-not",
"-path",
"./%s" % MANIFEST_IRODS,
]
p_find = subprocess.Popen(cmd_find, cwd=src_folder, stdout=subprocess.PIPE,) # nosec
subprocess.run( # nosec
["hashdeep", "-c", HASHDEEP_ALGO, "-f", "/dev/stdin", "-j", str(HASHDEEP_THREADS),],
cwd=src_folder,
stdin=p_find.stdout,
stdout=chk_f,
encoding="utf-8",
check=True,
)
if p_find.wait() != 0: # pragma: no cover
raise subprocess.CalledProcessError(
cmd_find, "Problem running find: %s" % p_find.returncode
)
except subprocess.CalledProcessError as e: # pragma: no cover
logger.warn("Computing checksums failed, aborting: %s" % e)
os.remove(local_path)
raise
return local_path
def pre_job(hdlr_mod, logger, meta):
"""Set the ``first_seen`` meta data value."""
src_root = pathlib.Path(meta["root"])
with cleanuping(irods_session(hdlr_mod=hdlr_mod, meta=meta, logger=logger)) as session:
dst_root = session.collections.get(meta["target"])
dst_collections = {c.name: c for c in dst_root.subcollections}
for src_folder in sorted([f.name for f in sorted(src_root.iterdir())]):
if src_folder in dst_collections:
coll = dst_collections[src_folder]
if not coll.metadata.get_all(KEY_FIRST_SEEN):
coll.metadata[KEY_FIRST_SEEN] = iRODSMeta(
KEY_FIRST_SEEN, datetime.datetime.now().isoformat(), ""
)
else:
logger.info("Skipping %s as it corresponds to no destination collection")
def post_job(
hdlr_mod,
logger,
meta,
is_folder_done: typing.Callable[[typing.Union[pathlib.Path, str]], bool],
delay_until_at_rest,
):
"""Move completed run folders into the "ingested" area."""
src_root = pathlib.Path(meta["root"])
with cleanuping(irods_session(hdlr_mod=hdlr_mod, meta=meta, logger=logger)) as session:
dst_root = session.collections.get(meta["target"])
dst_collections = {c.name: c for c in dst_root.subcollections}
for src_folder in sorted([f.name for f in sorted(src_root.iterdir())]):
if src_folder in dst_collections:
_post_job_run_folder_done(
logger,
session,
src_root / src_folder,
dst_collections[src_folder],
is_folder_done,
delay_until_at_rest,
)
else:
logger.info("Skipping %s as it corresponds to no destination collection")
def refresh_last_update_metadata(logger, session, meta):
"""Update the ``last_update`` and ``status`` meta data value."""
_ = logger
# Get path in irods that corresponds to root and update the meta data there.
path = pathlib.Path(meta["path"])
root = pathlib.Path(meta["root"])
target = pathlib.Path(meta["target"])
logger.info("meta = %s" % meta)
print("meta = %s" % meta)
rel_root_path = path.relative_to(root) # relative to root
print("... %s" % rel_root_path)
rel_folder_path = "/".join(str(rel_root_path).split("/")[1:]) # relative to run folder
print("... %s" % rel_folder_path)
root_target = str(target)[: -(len(str(rel_folder_path)) + 1)]
print("... %s" % root_target)
with cleanuping(session) as wrapped_session:
coll = wrapped_session.collections.get(root_target)
# Replace ``last_update`` and ``status`` meta data.
coll.metadata[KEY_LAST_UPDATE] = iRODSMeta(
KEY_LAST_UPDATE, datetime.datetime.now().isoformat(), ""
)
coll.metadata[KEY_STATUS] = iRODSMeta(KEY_STATUS, "running", "")
def run_ichksum(irods_path: str, recurse: bool = False) -> None:
"""Run ``ichksum $irods_path``."""
args = ["ichksum", irods_path]
if recurse:
args.insert(1, "-r")
subprocess.run(args, check=True)
|
from src.feature_extractor.interface import IBoWFeatureExtractor, IW2VFeatureExtractor
from src.feature_extractor.impl import (
TFIDFFeatureExtractor,
CountFeatureExtractor,
FastTextFeatureExtractor,
BERTFeatureExtractor,
RobertaFeatureExtractor,
)
|
from flask import Flask, request, jsonify
from flask_restful import Resource, Api
app = Flask(__name__)
api = Api(app)
def bubbleSort(arr):
n = len(arr)
# Traverse through all array elements
for i in range(n):
# Last i elements are already in place
for j in range(0, n-i-1):
# traverse the array from 0 to n-i-1
# Swap if the element found is greater
# than the next element
if arr[j] > arr[j+1] :
arr[j], arr[j+1] = arr[j+1], arr[j]
return arr
class BubbleSort(Resource):
def post(self):
args = [int(i) for i in str(request.data,'utf-8').split(",")]
result = bubbleSort(args)
return result
api.add_resource(BubbleSort, '/')
if __name__ == '__main__':
app.run(host='0.0.0.0', port="8081")
|
# -*- coding: utf-8 -*-
from numpy import pi
def comp_torque(self, out_dict, N0):
"""Compute the electrical average torque
Parameters
----------
self : Electrical
an Electrical object
out_dict : dict
Dict containing all magnetic quantities that have been calculated in comp_parameters of EEC
"""
omega = 2 * pi * N0 / 60
P = out_dict["Pem_av_ref"]
Tem_av_ref = (P - out_dict["Pj_losses"]) / omega
out_dict["Tem_av_ref"] = Tem_av_ref
return out_dict
|
# __author__ = "Mio"
# __email__: "liurusi.101@gmail.com"
# created: 5/20/21 4:55 PM
from pathlib import Path
from docx import Document
Path('docx').mkdir(exist_ok=True)
for file in Path('output_docx').glob('*.docx'):
doc = Document(file)
for i in doc.paragraphs:
# print(i.text)
cn_index = i.text.find('化学品中文名')
en_index = i.text.find('化学品英文名')
if cn_index >= 0:
cn_name = i.text[cn_index:].split(' ')[0].split(': ')[1]
# if en_index >= 0:
# ene_name = i.text[en_index:].split(' ')[0].split(': ')[1]
for t in doc.tables:
if t.rows[0].cells[0].text == '组分':
en_name = t.rows[1].cells[0].text
cas_no = t.rows[1].cells[2].text
print(cn_name, en_name, cas_no)
name = f'{file.stem}__{en_name}__{cas_no}{file.suffix}'
doc.save(Path('docx') / name)
# for i in doc.paragraphs:
# print(i.text)
# if i.text.lower().find('xixisys') >= 0:
# i.text = i.text.replace("XiXisys.com 免费提供,仅供参考。", " ")
# i.text = i.text.replace(" 如有疑问,请联系 sds@xixisys.com 咨询。", " ")
# for i in doc.paragraphs:
# # print(i.text)
# cn_index = i.text.find('化学品中文名')
# en_index = i.text.find('化学品英文名')
# if cn_index >= 0:
# cn_name = i.text[cn_index:].split(' ')[0].split(': ')[1]
# if en_index >= 0:
# ene_name = i.text[en_index:].split(' ')[0].split(': ')[1]
# break
# doc.save(f"{Path('output_docx')/file.stem}.docx")
# doc = Document('乙二胺四乙酸.docx')
# for i in doc.paragraphs:
# cn_index = i.text.find('化学品中文名')
# en_index = i.text.find('化学品英文名')
# if cn_index >= 0:
# print(i.text[cn_index:].split(' ')[0].split(': ')[1])
# if en_index >= 0:
# print(i.text[en_index:].split(' ')[0].split(': ')[1])
|
import builtins
from unittest import TestCase
import mock
from engine.game import InputHandler
from engine.game.BattleEngine import BattleEngine
from engine.pkmn.types.ClassicTypesRuleSet import ClassicTypesRuleSet
from models.game.battle.BattleGameState import BattleGameState
from models.game.trainer.PokemonTrainer import PokemonTrainer
from models.game.trainer.utils.ArenaBadge import ArenaBadge
from models.pkmn.PokemonModel import PokemonModel
from models.pkmn.moves.PokemonMove import PokemonMove, MoveCategory
from models.pkmn.natures.PokemonNature import PokemonNature
from models.pkmn.stats.StatsDict import StatsDict
from models.pkmn.types.PokemonType import PokemonType
class TestBattleEngine(TestCase):
def setUp(self) -> None:
self.Pikachu = PokemonModel(
name="Pikachu",
types=(PokemonType.Electric, None),
level=100,
nature=PokemonNature.Jolly,
moves=[
PokemonMove(
name="Volt Tackle",
move_type=PokemonType.Electric,
category=MoveCategory.Physical,
pp=24,
power=120
),
PokemonMove(
name="Iron Tail",
move_type=PokemonType.Steel,
category=MoveCategory.Physical,
pp=24,
power=100,
accuracy=75
),
PokemonMove(
name="Thunderbolt",
move_type=PokemonType.Electric,
category=MoveCategory.Special,
pp=24,
power=90
)
],
base_stats=StatsDict(hp=35, atk=55, phys_def=40, spe_atk=50, spe_def=50, spd=90),
evs=StatsDict(hp=0, atk=252, phys_def=0, spe_atk=4, spe_def=0, spd=252),
ivs=StatsDict(hp=31, atk=31, phys_def=31, spe_atk=31, spe_def=31, spd=31)
)
self.Pidgeot = PokemonModel(
name="Pidgeot",
types=(PokemonType.Flying, PokemonType.Normal),
level=100,
nature=PokemonNature.Jolly,
moves=[
PokemonMove(
name="Double Edge",
move_type=PokemonType.Normal,
category=MoveCategory.Physical,
pp=24,
power=120
),
PokemonMove(
name="Brave Bird",
move_type=PokemonType.Flying,
category=MoveCategory.Physical,
pp=24,
power=120
)
],
base_stats=StatsDict(hp=83, atk=80, phys_def=75, spe_atk=70, spe_def=70, spd=101),
evs=StatsDict(hp=0, atk=252, phys_def=0, spe_atk=0, spe_def=4, spd=252),
ivs=StatsDict(hp=31, atk=31, phys_def=31, spe_atk=31, spe_def=31, spd=31)
)
self.Blastoise = PokemonModel(
name="Blastoise",
types=(PokemonType.Water, None),
level=100,
nature=PokemonNature.Modest,
moves=[
PokemonMove(
name="Hydro Pump",
move_type=PokemonType.Water,
category=MoveCategory.Special,
pp=8,
power=110,
accuracy=80
),
PokemonMove(
name="Ice Beam",
move_type=PokemonType.Ice,
category=MoveCategory.Special,
pp=16,
power=90
)
],
base_stats=StatsDict(hp=79, atk=83, phys_def=100, spe_atk=85, spe_def=105, spd=78),
evs=StatsDict(hp=252, atk=0, phys_def=0, spe_atk=252, spe_def=4, spd=0),
ivs=StatsDict(hp=31, atk=31, phys_def=31, spe_atk=31, spe_def=31, spd=31)
)
self.Red = PokemonTrainer(
name="Red",
team=[self.Pikachu],
badges=[ArenaBadge.Boulder, ArenaBadge.Cascade, ArenaBadge.Thunder, ArenaBadge.Rainbow,
ArenaBadge.Soul, ArenaBadge.Marsh, ArenaBadge.Marsh, ArenaBadge.Earth]
)
self.Blue = PokemonTrainer(
name="Blue",
team=[self.Pidgeot, self.Blastoise],
badges=[ArenaBadge.Boulder, ArenaBadge.Cascade, ArenaBadge.Thunder, ArenaBadge.Rainbow,
ArenaBadge.Soul, ArenaBadge.Marsh, ArenaBadge.Marsh, ArenaBadge.Earth]
)
self.BattleGameState = BattleGameState(player=self.Red, opponent=self.Blue)
def test_red_blue_battle(self):
with mock.patch.object(InputHandler, 'getCancelableNumberInput', return_value=0):
with mock.patch.object(InputHandler, 'getNumberInput', return_value=0):
with mock.patch.object(InputHandler, 'getDecisionType', return_value='m'):
with mock.patch.object(builtins, 'input', return_value=''):
engine = BattleEngine(battleGameState=self.BattleGameState, typesRuleSet=ClassicTypesRuleSet())
assert not engine.startGame()
|
from faker.generator import Generator
class MGenerator(Generator):
def add_provider(self, provider, *args, **kwargs):
"""
使add_provider支持传参给provider对象
"""
if isinstance(provider, type):
provider = provider(self, *args, **kwargs)
self.providers.insert(0, provider)
for method_name in dir(provider):
# skip 'private' method
if method_name.startswith('_'):
continue
faker_function = getattr(provider, method_name)
if callable(faker_function):
# add all faker method to generator
self.set_formatter(method_name, faker_function)
|
import logging
import copy
import yfinance as yf
import pandas as pd
import numpy as np
import pandas as pd
from pypfopt import black_litterman
from pypfopt.expected_returns import mean_historical_return
from pypfopt.black_litterman import BlackLittermanModel
from pypfopt.risk_models import CovarianceShrinkage
from sklearn.linear_model import LinearRegression
from typing import Dict, List
logging.basicConfig(filename='output.log', filemode='a',
format='%(asctime)s - %(levelname)-4s [%(filename)s:%(lineno)d] %(message)s', level=logging.INFO)
class MarketModels:
def __init__(self, historical_prices: pd.DataFrame, model: str, views_dict: Dict[str, float] = {},
confidences: List[float] = [], mcaps: pd.DataFrame = pd.DataFrame(),
ff_factors_df: pd.DataFrame = pd.DataFrame()) -> None:
self.historical_prices_df = historical_prices # they have to be backfilled
self.tickers = list(historical_prices.columns) # tickers lst
self.model_summary = {} # dictionary containingy the summary
# data validation for views and confidences
assert len(views_dict) == len(confidences), "Views and confidences need to be of the same size"
self.views_dict = views_dict
self.confidences = confidences
self.S = None # covar matrix historical
self.mu = None # mean historical returns
# get the market prices for the sp500 -> main index asset
logging.info(f"Initiating download of the main index: 'sp500'")
self.sp500 = yf.download("SPY", period="max")["Adj Close"]
# bl params
self.delta = None # market implied risk aversion
self.market_prior = None # compute the market priors -> this needs to be done according to a parameter
self.mcaps = mcaps
#self.market_prior = self.market_priors(self.mkt_data_reader.mcaps, self.delta, self.S)
# ff params
self.ff_factors = ff_factors_df # ff factors df (can get them form the dr class)
self.df_stocks_ff = None
self.risk_factors = list(self.ff_factors.columns)
self.er_fama_df = None # df of expected returns of the ff model
self.ff_betas = None # ff-betas dict
self.ff_scores = None # ff-R^2 of the stocks
self.ret_ff = None # annualized expected (mean) normal (not log) returns of the ff-model pd.Series
if model == "bl":
self.prepare_black_litterman(include_ff = False) # call the prepare bl method
elif model == "bl-ff":
self.prepare_black_litterman(include_ff = True) # prepare bl method with fama-french as views
elif model == "vanilla-ff":
self.prepare_ff()
def prepare_ff(self):
logging.info(f"Computing the expected returns and covar matrix given the FF model")
# compute log returns
ln_rt = (np.log(self.historical_prices_df / self.historical_prices_df.shift(1)))[1:] # log returns
ln_rt.index = pd.to_datetime(ln_rt.index, format= '%Y%m%d') # format date
self.df_stocks_ff = ln_rt.merge(self.ff_factors, left_index = True, right_index = True) # join with the ff factors to expand the dataset
ff_factors_cols = list(self.ff_factors.columns) # columns of the FF factors -> Here we could remove/add
betas={}
scores={}
er_fama = pd.DataFrame()
for ticker in self.tickers:
ff_factors_ticker_cols = ff_factors_cols + [ticker]
ff_factors_ticker_df = pd.DataFrame()
ff_factors_ticker_df = copy.deepcopy(self.df_stocks_ff[ff_factors_ticker_cols])
ff_factors_ticker_df[ticker + "-RF"] = ff_factors_ticker_df[ticker] - ff_factors_ticker_df["RF"]
# set up the linear regression problem
x_columns = list(filter(lambda ff_factor: ff_factor != "RF", ff_factors_cols))
Y = ff_factors_ticker_df.iloc[:,-1] # dependent var ticker -rf
X = (ff_factors_ticker_df)[x_columns] # indep vars (risk factors)
reg = LinearRegression(fit_intercept = True).fit(X, Y) #regresion
score = reg.score(X, Y) # R^2 of the lin reg
coefs = reg.coef_ # betas
betas[ticker] = coefs
scores[ticker] = score
er_fama[ticker] = np.sum(X * coefs, axis = 1) + ff_factors_ticker_df['RF'] # fama-french expected returns df
# save the model output
self.er_fama_df = er_fama
self.ff_betas = betas
self.ff_scores = scores
# get the mean of the df, convert to normal returns and anualize with 252 trading days
self.ret_ff = (np.exp(self.er_fama_df.mean()) - 1) * 252
def prepare_black_litterman(self, include_ff = False):
logging.info(f"Computing the expected returns and covar matrices given the Black-Litterman model")
#mu_prior = self.compute_estimated_returns(self.historical_prices_df)
S_prior = self.covar_matrix(self.historical_prices_df)
self.S = S_prior
# delta compute the market implied risk aversion parameter
self.delta = black_litterman.market_implied_risk_aversion(self.sp500)
self.market_prior = self.market_priors(self.mcaps, self.delta, S_prior)
bl, ret_bl, S_bl = self.black_litterman(S_prior, self.market_prior, self.delta, self.views_dict, self.confidences)
self.model_summary = {
"name": "Black-litterman",
"model": bl
}
self.ret_bl = ret_bl # mean matrix for black litterman, pd.Series
self.S_bl = S_bl # co-variance matrix for black litterman, pd.DataFrame
@staticmethod
def black_litterman(S, market_prior, delta, viewdict, confidences):
# bl model
bl = BlackLittermanModel(
S,
pi = market_prior,
absolute_views = viewdict,
risk_aversion = delta,
omega = "idzorek",
view_confidences = confidences
)
ret_bl = bl.bl_returns() # mean matrix for black litterman
S_bl = bl.bl_cov() # co-variance matrix for black litterman
return bl, ret_bl, S_bl
@staticmethod
def compute_estimated_returns(mkt_data):
logging.info(f"Computing estimated returns using: 'mean_historical_return'")
return mean_historical_return(mkt_data)
@staticmethod
def covar_matrix(mkt_data):
logging.info(f"Computing the sample covariance matrix using: Ledoit-Wolf shrinkage")
return CovarianceShrinkage(mkt_data).ledoit_wolf()
@staticmethod
def market_priors(mcaps, delta, S):
logging.info(f"Computing the market implied prior returns using: mcaps, (delta) market implied risk aversion, (S) sample covar matrix")
return black_litterman.market_implied_prior_returns(mcaps, delta, S)
|
import os
import importlib
import numpy as np
from .helpers import util
from .helpers.data import WSGenerator, WSRandGenerator
from sim.helpers.util import get_path as get_network_path
from sim.helpers.data import DataInfo
def compute(args):
network = args.NETWORK
epoch = args.epoch
anon = not args.include_uid
repo = args.datarepo
dataset = args.DATASET
randc = args.randcomp
dinfo = DataInfo(repo)
wpath = get_network_path(repo, network)+str(epoch)+'.h5'
nmod = importlib.import_module('sim.networks.'+network)
model = nmod.model(dinfo)
model.load_weights(wpath)
if randc:
numSamples = 2000000
print("Creating WSRand-generator for "+str(dataset))
gen = WSRandGenerator(dinfo, dataset, numSamples)
tss = []
sims = np.empty((0,2))
print("Generating random similarities for "+str(dataset)+" with "+str(numSamples)+" authors.")
per = 0
for i, (ts, X) in enumerate(gen):
if i >= per*len(gen):
print(str(round(per*100))+"%")
per += max(0.01, 1.0/len(gen))
if per > 1.0:
break
sims = np.vstack([sims, model.predict(X)])
tss += ts
simOut = util.get_path(repo, dataset, network)+'data-random.csv'
with open(simOut, 'w') as fsim:
for (ts,sim) in zip(tss,sims):
fsim.write(str(ts[0])+';'+str(ts[1])+';'+str(sim[1])+'\n')
else:
print("Creating WS-generator for "+str(dataset))
gen = WSGenerator(dinfo, dataset)
res = []
print("Generating similarities for "+str(dataset)+" with "+str(len(gen))+" authors.")
per = 0
for i, (uid, ts, ls, Xs) in enumerate(gen):
if i >= per*len(gen):
print(str(round(per*100))+"%")
per += max(0.01, 1.0/len(gen))
sims = np.empty((0,2))
for x in Xs:
sims = np.vstack([sims, model.predict(x)])
res.append((uid, ts, ls, sims))
simOut = util.get_path(repo, dataset, network)+'data-sim.csv'
metaOut = util.get_path(repo, dataset, network)+'data-meta.csv'
with open(simOut, 'w') as fsim, open(metaOut, 'w') as fmeta:
for (uid,ts,ls,sims) in res:
if anon:
uid = 'author'
fsim.write(str(uid)+';'+';'.join([str(sim[1]) for sim in sims])+'\n')
fmeta.write(str(uid)+';'+';'.join([str(l)+','+str(t) for l,t in zip(ls,ts)])+'\n')
|
# -*- encoding: utf-8 -*-
"""
@File Name : verification.py
@Create Time : 2021/7/14 21:00
@Description :
@Version :
@License :
@Author : diklios
@Contact Email : diklios5768@gmail.com
@Github : https://github.com/diklios5768
@Blog :
@Motto : All our science, measured against reality, is primitive and childlike - and yet it is the most precious thing we have.
"""
__auth__ = 'diklios'
from sqlalchemy import Column, String, Integer, Boolean
from app.models.base import Base
class EmailVerification(Base):
email = Column(String(255), nullable=False)
# 验证码
verification_code = Column(String(10), nullable=False)
# 有效期
expiration = Column(Integer, nullable=False)
# 用途
use = Column(String(20), nullable=False)
# 现在是否失效(可能被使用,也可能手动过期)
valid = Column(Boolean, default=True)
class PhoneVerification(Base):
# phone_code=Column(String(4),nullable=False)
phone = Column(String(12), nullable=False)
verification_code = Column(String(10), nullable=False)
expiration = Column(Integer, nullable=False)
use = Column(String(20), nullable=False)
valid = Column(Boolean, default=True)
|
import requests
import datetime
import fnmatch
import re
import logging
import subprocess
import os
import sys
from github import Github, Label
REPO_NAME = 'AOSC-Dev/aosc-os-abbs'
TARGET_LABEL = 'security'
AFTER_DATE = datetime.datetime(2019, 11, 6, 0, 0, 0)
TOKEN = os.getenv('TOKEN')
CVE_PATTERN = r'(?:\*\*)?CVE IDs:(?:\*\*)?\s*((?:(?!\n\n).)*)'
ARCH_PATTERN = r'(?:\*\*)?Architectural progress:(?:\*\*)?\s*((?:(?!\n\n).)*)'
OTHER_PATTERN = r'(?:\*\*)?Other security advisory IDs:(?:\*\*)?\s*((?:(?!\n\n).)*)'
AOSA_PATTERN = r'(AOSA-\d{4}-\d+)'
SUPERSEDED_PATTERN = r'[Ss]uperseded by (#\d+)'
REFERENCE_REPO = 'https://packages.aosc.io/repo/amd64/stable?page=all&type=json'
HEAD_TEMPLATE = """Hi all,
Here below is a comprehensive list of AOSC OS Security Advisories announced in the period between {date_start} and {date_end}. This is the 1st issue/batch of security advisories announced here for {date_month} on the security mailing list.
Please update your system at your earliest convenience!
"""
def minimatch(names, pattern):
actual_pattern = pattern.replace('{', '[').replace('}', ']')
return fnmatch.filter(names, actual_pattern)
def get_updated_version_simple(issue):
title = issue.title
if title.find('to') > 0:
return title.split('to')[-1].strip(' ^')
return None
def get_updated_version_timeline(issue):
# TODO: deduce patched version from Git commits
pass
def get_updated_version_guess(issue):
names = get_expanded_names(issue, packages)
def fetch_version(name):
logging.warning('%s: Using heuristics to determine patched version' % name)
# dangerous escaping...
data = {
'q': "select epoch, version, release from package_versions where commit_time < strftime('%s', 'now', '-1 days') and package = '{}' and (branch = 'stable' or branch = 'stable-proposed') order by epoch, version, release desc;".format(name)}
resp = requests.post("https://packages.aosc.io/query/",
data=data, headers={'X-Requested-With': 'XMLHttpRequest'})
resp.raise_for_status()
resp = resp.json()['rows']
if not resp:
return ''
resp = resp[0]
version = ''
if resp[0]:
version = resp[0] + ':'
version += resp[1]
if resp[2]:
version += '-' + resp[2]
return version
return ', '.join([fetch_version(name) for name in names])
def get_updated_version(issue):
for method in [get_updated_version_simple, get_updated_version_timeline, get_updated_version_guess]:
result = method(issue)
if result:
return result
return None
def get_bulletin_number(issue):
body = issue.body
numbers = []
result = re.search(CVE_PATTERN, body)
if result:
cve = result.group(1).strip().replace('N/A', '')
if cve:
numbers.append(cve)
result = re.search(OTHER_PATTERN, body)
if result:
other = result.group(1).strip().replace('N/A', '')
if other:
numbers.append(other)
return numbers
def get_aosa_number(issue):
aosa = None
for page in range(issue.get_comments().totalCount):
for comment in issue.get_comments().get_page(page):
result = re.search(AOSA_PATTERN, comment.body)
superseded = re.search(SUPERSEDED_PATTERN, comment.body)
if result:
aosa = result.group(1).strip()
if superseded:
aosa = 'skip'
return aosa
def get_issues_after(date: datetime.datetime, repo, label):
issues = []
count = 0
for issue in repo.get_issues(state='closed', labels=[label], since=date):
issues.append(issue)
print('\rEnumerating issues... %s' % count, end='', flush=True, file=sys.stderr)
count += 1
print('... done.', file=sys.stderr)
return issues
def get_expanded_names_bash(name):
if re.search(pattern=r'[;\n#]', string=name):
return None
try:
result = subprocess.check_output(['bash', '-rc', 'echo %s' % name]).decode('utf-8')
return result.split()
except Exception:
return None
def get_expanded_names(issue, packages):
if ':' not in issue.title:
return [issue.title]
pattern = issue.title.split(': ')[0]
return minimatch(packages, pattern) or get_expanded_names_bash(pattern) or [pattern]
def generate_head(start, end):
return HEAD_TEMPLATE.format(date_start=start.strftime('%B %-d'), date_end=end.strftime('%B %-d'), date_month=end.strftime('%B of %Y'))
def main():
logging.info('Fetching issues from GitHub...')
gh = Github(base_url="https://api.github.com",
login_or_token=TOKEN)
repo = gh.get_repo(REPO_NAME)
label = repo.get_label(TARGET_LABEL)
bulletins = get_issues_after(AFTER_DATE, repo, label)
output = ''
for bulletin in bulletins:
aosa = get_aosa_number(bulletin)
version = get_updated_version(bulletin)
bulletin_number = ', '.join(get_bulletin_number(bulletin))
name = ', '.join(get_expanded_names(bulletin, packages))
if not aosa:
aosa = 'AOSA-????-????'
logging.warning('AOSA number not found for: %s' %
bulletin.html_url)
if aosa == 'skip':
logging.warning('AOSA skipped for: %s due to obsoletion' % bulletin.html_url)
continue
output += ('- %s: Update %s to %s (%s).\n' %
(aosa, name, version, bulletin_number))
print(
'\n\n' + generate_head(bulletins[-1].created_at, bulletins[0].created_at) + output)
if __name__ == "__main__":
logging.getLogger().setLevel(logging.INFO)
logging.info("Fetching information from package site...")
resp = requests.get(REFERENCE_REPO)
resp.raise_for_status()
packages = resp.json()['packages']
packages = [i['name'] for i in packages]
main()
|
# *****************************************************************************
# Copyright 2017 Karl Einar Nelson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# *****************************************************************************
import sys
import jpype
import common
from jpype.types import *
from jpype import java
def passThrough(item):
al = JClass("java.util.ArrayList")()
al.add(item)
return al.get(0)
class BoxedTestCase(common.JPypeTestCase):
__name__ = "BoxedTestCase"
def setUp(self):
common.JPypeTestCase.setUp(self)
self.TestBoxed = jpype.JClass('jpype.boxed.Boxed')
self.Number = jpype.JClass('java.lang.Number')
self.Comparable = jpype.JClass('java.lang.Comparable')
def testShort(self):
c1 = 12345
# Check passed from and passed to
d1 = self.TestBoxed.newShort(c1)
d2 = java.lang.Short(c1)
self.assertEqual(d1, c1)
self.assertEqual(d2, c1)
self.assertEqual(c1, d1)
self.assertEqual(c1, d2)
self.assertEqual(d1, d2)
self.assertEqual(self.TestBoxed.callShort(c1),
self.TestBoxed.callShort(d2))
# Verify ops
self.assertEqual(d1 + 2, d1 + 2)
self.assertEqual(d1 * 2, d1 * 2)
def testInteger(self):
c1 = 12345
# Check passed from and passed to
d1 = self.TestBoxed.newInteger(c1)
d2 = java.lang.Integer(c1)
self.assertEqual(d1, c1)
self.assertEqual(d2, c1)
self.assertEqual(c1, d1)
self.assertEqual(c1, d2)
self.assertEqual(d1, d2)
self.assertEqual(self.TestBoxed.callInteger(c1),
self.TestBoxed.callInteger(d2))
# Verify ops
self.assertEqual(d1 + 2, d1 + 2)
self.assertEqual(d1 * 2, d1 * 2)
def testLong(self):
c1 = 12345
# Check passed from and passed to
d1 = self.TestBoxed.newLong(c1)
d2 = java.lang.Long(c1)
self.assertEqual(d1, c1)
self.assertEqual(d2, c1)
self.assertEqual(c1, d1)
self.assertEqual(c1, d2)
self.assertEqual(d1, d2)
self.assertEqual(self.TestBoxed.callLong(c1),
self.TestBoxed.callLong(d2))
# Verify ops
self.assertEqual(d1 + 2, d1 + 2)
self.assertEqual(d1 * 2, d1 * 2)
def testDoubleFromFloat(self):
java.lang.Double(1.0)
def testFloatFromInt(self):
java.lang.Float(1)
def testDoubleFromInt(self):
java.lang.Double(1)
def testBoxed2(self):
java.lang.Short(java.lang.Integer(1))
java.lang.Integer(java.lang.Integer(1))
java.lang.Long(java.lang.Integer(1))
java.lang.Float(java.lang.Integer(1))
java.lang.Float(java.lang.Long(1))
java.lang.Double(java.lang.Integer(1))
java.lang.Double(java.lang.Long(1))
java.lang.Double(java.lang.Float(1))
def testFloat(self):
c1 = 123124 / 256.0
# Check passed from and passed to
d1 = self.TestBoxed.newFloat(c1)
d2 = java.lang.Float(c1)
self.assertEqual(d1, c1)
self.assertEqual(d2, c1)
self.assertEqual(c1, d1)
self.assertEqual(c1, d2)
self.assertEqual(d1, d2)
self.assertEqual(self.TestBoxed.callFloat(c1),
self.TestBoxed.callFloat(d2))
# Verify ops
self.assertEqual(d1 + 2, d1 + 2)
self.assertEqual(d1 * 2, d1 * 2)
self.assertTrue(d2 < c1 + 1)
self.assertTrue(d2 > c1 - 1)
def testDouble(self):
c1 = 123124 / 256.0
# Check passed from and passed to
d1 = self.TestBoxed.newDouble(c1)
d2 = java.lang.Double(c1)
self.assertEqual(d1, c1)
self.assertEqual(d2, c1)
self.assertEqual(c1, d1)
self.assertEqual(c1, d2)
self.assertEqual(d1, d2)
self.assertEqual(self.TestBoxed.callDouble(c1),
self.TestBoxed.callDouble(d2))
# Verify ops
self.assertEqual(d1 + 2, d1 + 2)
self.assertEqual(d1 * 2, d1 * 2)
self.assertTrue(d2 < c1 + 1)
self.assertTrue(d2 > c1 - 1)
def testShortResolve(self):
self.assertEqual(self.TestBoxed.whichShort(1), 1)
self.assertEqual(self.TestBoxed.whichShort(java.lang.Short(1)), 2)
def testIntegerResolve(self):
self.assertEqual(self.TestBoxed.whichInteger(1), 1)
self.assertEqual(self.TestBoxed.whichInteger(java.lang.Integer(1)), 2)
def testLongResolve(self):
self.assertEqual(self.TestBoxed.whichLong(1), 1)
self.assertEqual(self.TestBoxed.whichLong(java.lang.Long(1)), 2)
def testFloatResolve(self):
self.assertEqual(self.TestBoxed.whichFloat(1.0), 1)
self.assertEqual(self.TestBoxed.whichFloat(java.lang.Float(1.0)), 2)
def testDoubleResolve(self):
self.assertEqual(self.TestBoxed.whichDouble(1.0), 1)
self.assertEqual(self.TestBoxed.whichDouble(java.lang.Double(1.0)), 2)
def testPrivitiveToBoxed(self):
java.lang.Boolean(JBoolean(0))
java.lang.Byte(JByte(0))
java.lang.Short(JShort(0))
java.lang.Integer(JInt(0))
java.lang.Long(JLong(0))
java.lang.Float(JFloat(0))
java.lang.Double(JDouble(0))
def testBooleanBad(self):
# java.lang.Boolean(X) works like bool(X)
# Explicit is a cast
self.assertFalse(java.lang.Boolean(tuple()))
self.assertFalse(java.lang.Boolean(list()))
self.assertFalse(java.lang.Boolean(dict()))
self.assertFalse(java.lang.Boolean(set()))
self.assertTrue(java.lang.Boolean(tuple(['a'])))
self.assertTrue(java.lang.Boolean(['a']))
self.assertTrue(java.lang.Boolean({'a': 1}))
self.assertTrue(java.lang.Boolean(set(['a', 'b'])))
# Implicit does not automatically cast
fixture = JClass('jpype.common.Fixture')()
with self.assertRaises(TypeError):
fixture.callBoxedBoolean(tuple())
with self.assertRaises(TypeError):
fixture.callBoxedBoolean(list())
with self.assertRaises(TypeError):
fixture.callBoxedBoolean(dict())
with self.assertRaises(TypeError):
fixture.callBoxedBoolean(set())
def testByteBad(self):
with self.assertRaises(TypeError):
java.lang.Byte(tuple())
def testCharacterBad(self):
with self.assertRaises(TypeError):
java.lang.Character(tuple())
def testShortBad(self):
with self.assertRaises(TypeError):
java.lang.Short(tuple())
def testIntegerBad(self):
with self.assertRaises(TypeError):
java.lang.Integer(tuple())
def testLongBad(self):
with self.assertRaises(TypeError):
java.lang.Long(tuple())
def testFloatBad(self):
with self.assertRaises(TypeError):
java.lang.Float(tuple())
def testDoubleBad(self):
with self.assertRaises(TypeError):
java.lang.Double(tuple())
def testBooleanBad2(self):
with self.assertRaises(TypeError):
java.lang.Boolean(tuple(), tuple())
def testByteBad2(self):
with self.assertRaises(TypeError):
java.lang.Byte(tuple(), tuple())
def testCharacterBad2(self):
with self.assertRaises(TypeError):
java.lang.Character(tuple(), tuple())
def testShortBad2(self):
with self.assertRaises(TypeError):
java.lang.Short(tuple(), tuple())
def testIntegerBad2(self):
with self.assertRaises(TypeError):
java.lang.Integer(tuple(), tuple())
def testLongBad2(self):
with self.assertRaises(TypeError):
java.lang.Long(tuple(), tuple())
def testFloatBad2(self):
with self.assertRaises(TypeError):
java.lang.Float(tuple(), tuple())
def testDoubleBad2(self):
with self.assertRaises(TypeError):
java.lang.Double(tuple(), tuple())
def compareTest(self, u, v):
self.assertEqual(u, v)
self.assertNotEqual(u, v - 1)
self.assertTrue(u > v - 1)
self.assertFalse(u > v + 1)
self.assertTrue(u >= v)
self.assertTrue(u <= v)
self.assertFalse(u < v)
self.assertFalse(u > v)
self.assertTrue(u < v + 1)
self.assertTrue(u > v - 1)
def testByteBoxOps(self):
u = JObject(81, JByte)
self.assertIsInstance(u, jpype.java.lang.Byte)
self.compareTest(u, 81)
def testCharBoxOps(self):
u = JObject('Q', JChar)
self.assertIsInstance(u, jpype.java.lang.Character)
self.compareTest(u, 81)
def testShortBoxOps(self):
u = JObject(81, JShort)
self.assertIsInstance(u, jpype.java.lang.Short)
self.compareTest(u, 81)
def testIntBoxOps(self):
u = JObject(81, JInt)
self.assertIsInstance(u, jpype.java.lang.Integer)
self.compareTest(u, 81)
def testLongBoxOps(self):
u = JObject(81, JLong)
self.assertIsInstance(u, jpype.java.lang.Long)
self.compareTest(u, 81)
def testIntBoxOps(self):
u = JObject(81, JFloat)
self.assertIsInstance(u, jpype.java.lang.Float)
self.compareTest(u, 81)
def testLongBoxOps(self):
u = JObject(81, JDouble)
self.assertIsInstance(u, jpype.java.lang.Double)
self.compareTest(u, 81)
def testCharBox(self):
u = passThrough(JChar('Q'))
self.assertIsInstance(u, jpype.java.lang.Character)
self.assertEqual(u, jpype.java.lang.Character('Q'))
def testBooleanBox(self):
u = passThrough(JBoolean(True))
self.assertIsInstance(u, jpype.java.lang.Boolean)
self.assertEqual(u, jpype.java.lang.Boolean(True))
self.assertEqual(u, True)
u = passThrough(JBoolean(False))
self.assertIsInstance(u, jpype.java.lang.Boolean)
self.assertEqual(u, jpype.java.lang.Boolean(False))
self.assertEqual(u, False)
def testByteBox(self):
u = passThrough(JByte(5))
self.assertIsInstance(u, java.lang.Byte)
self.assertEqual(u, java.lang.Byte(5))
def testShortBox(self):
u = passThrough(JShort(5))
self.assertIsInstance(u, java.lang.Short)
self.assertEqual(u, java.lang.Short(5))
def testIntBox(self):
u = passThrough(JInt(5))
self.assertIsInstance(u, java.lang.Integer)
self.assertEqual(u, java.lang.Integer(5))
def testLongBox(self):
u = passThrough(JLong(5))
self.assertIsInstance(u, java.lang.Long)
self.assertEqual(u, java.lang.Long(5))
def testFloatBox(self):
u = passThrough(JFloat(5))
self.assertIsInstance(u, java.lang.Float)
self.assertEqual(u, java.lang.Float(5))
def testDoubleBox(self):
u = passThrough(JDouble(5))
self.assertIsInstance(u, java.lang.Double)
self.assertEqual(u, java.lang.Double(5))
def testBooleanNull(self):
n = JObject(None, JBoolean)
self.assertIsInstance(n, java.lang.Boolean)
self.assertEqual(n, None)
self.assertNotEqual(n, True)
self.assertNotEqual(n, False)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testCharNull(self):
n = JObject(None, JChar)
self.assertIsInstance(n, java.lang.Character)
self.assertNotEqual(n, 0)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testByteNull(self):
n = JObject(None, JByte)
self.assertIsInstance(n, java.lang.Byte)
self.assertNotEqual(n, 0)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testShortNull(self):
n = JObject(None, JShort)
self.assertIsInstance(n, java.lang.Short)
self.assertNotEqual(n, 0)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testIntNull(self):
n = JObject(None, JInt)
self.assertIsInstance(n, java.lang.Integer)
self.assertNotEqual(n, 0)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testLongNull(self):
n = JObject(None, JLong)
self.assertIsInstance(n, java.lang.Long)
self.assertNotEqual(n, 0)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testFloatNull(self):
n = JObject(None, JFloat)
self.assertIsInstance(n, java.lang.Float)
self.assertNotEqual(n, 0)
self.assertNotEqual(n, 0.0)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testDoubleNull(self):
n = JObject(None, JDouble)
self.assertIsInstance(n, java.lang.Double)
self.assertNotEqual(n, 0)
self.assertNotEqual(n, 0.0)
with self.assertRaises(TypeError):
int(n)
with self.assertRaises(TypeError):
float(n)
self.assertEqual(str(n), str(None))
self.assertEqual(repr(n), str(None))
self.assertEqual(hash(n), hash(None))
u = passThrough(n)
self.assertEqual(u, None)
def testAsNumber(self):
self.assertIsInstance(java.lang.Byte(1), java.lang.Number)
self.assertIsInstance(java.lang.Short(1), java.lang.Number)
self.assertIsInstance(java.lang.Integer(1), java.lang.Number)
self.assertIsInstance(java.lang.Long(1), java.lang.Number)
self.assertIsInstance(java.lang.Float(1), java.lang.Number)
self.assertIsInstance(java.lang.Double(1), java.lang.Number)
|
from __future__ import print_function
import sys
from AppKit import NSObject
from PyObjCTools import AppHelper
import vanilla
class SimpleAppAppDelegate(NSObject):
def applicationDidFinishLaunching_(self, notification):
SimpleAppWindow()
class SimpleAppWindow(object):
def __init__(self):
self.w = vanilla.Window((250, 120), "Simple App Window", closable=False)
self.w.text = vanilla.TextBox((10, 10, -10, 70), "This is a simple window. It doesn't do much. You see that button? Press it and some text will be printed in Console.app.")
self.w.button = vanilla.Button((10, 90, -10, 20), "Press me", callback=self.buttonCallback)
self.w.open()
def buttonCallback(self, sender):
print("You pressed the button!")
sys.stdout.flush()
if __name__ == "__main__":
AppHelper.runEventLoop()
|
import cv2
import numpy as np
import os, re
import face_recognition.api as face_recognition
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read('trainer/trainer_lbp.yml')
cascadePath = "cascade_data/lbpcascade_frontalface.xml"
faceCascade = cv2.CascadeClassifier(cascadePath)
unidentified_image_dir = "../unidentified_images"
font = cv2.FONT_HERSHEY_SIMPLEX
#iniciate id counter
id = 0
def image_files_in_folder(folder):
return [os.path.join(folder, f) for f in os.listdir(folder) if re.match(r'.*\.(jpg|jpeg|png)', f, flags=re.I)]
print("Start LBPH image recognize Program.")
for img_path in image_files_in_folder(unidentified_image_dir):
#load image from the image file
image = face_recognition.load_image_file(img_path)
# convert the image to gray scale
gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor = 1.2,
minNeighbors = 5,
)
for(x,y,w,h) in faces:
id, confidence = recognizer.predict(gray[y:y+h,x:x+w])
# Check if confidence is less them 100 ==> "0" is perfect match
if (confidence < 100):
#id = names[id]
confidence = " {0}%".format(round(100 - confidence))
print("Found person id : {} ".format(id))
print("Confidence Level - {}".format( confidence))
else:
id = "unknown"
confidence = " {0}%".format(round(100 - confidence))
print("Found person id : Unknown")
print("Confidence Level - {}".format( confidence))
print("\n Exiting Program.")
|
import soundset
# create random score for C3~C5note == 130.8~523.3hz
#-> score object
s1 = soundset.score.random(length=32,tempo=120,beat=16,chord=3,pitch=3,register=25,random_state=None)
# create score piano roll
#-> 2-dim binaly numpy array, size of (length, 128)
roll = s1.to_roll(ignore_out_of_range=False)
assert roll.dtype==int, 'it is binaly array but dtype is int'
assert roll.shape==(32,128), 'shape is to (32,128)'
assert roll.min()==0 and roll.max()==1, 'binaly array'
assert all( roll.sum(axis=1)==3 ), 'each line has 3 notes'
roll = s1.to_roll(ignore_out_of_range=True)
assert roll.shape == (32,25), 'the roll shape is to (32,25) when ignore_out_of_range is True'
# synthesize score
#-> 1 or 2-dim float on [-1,1] numpy array, size of (length):mono or (length, 2):stereo
wave = s1.to_wave(instrument=0,stereo=True,rate=44100) # inst0=piano
assert wave.dtype==float
assert wave.shape==(44100*32*4/16*60/120, 2)
assert -1<=wave.min() and wave.max()<=1
# synthesize score with system soundfont
wave = s1.to_wave(instrument=40) # synthesize violin with default font
# refaences
## score
## you can generate specific score with pianoroll
# s2 = soundset.score.generate(roll=roll) # the size must be (length, 128)
## (in future) you can generate score from midi file
# s2 = soundset.score.load(filename='midi/file/name.midi')
## (in future) you can save score as midi file
# s2.save(filename='midifilename.midi')
## synthesize
## (in future) wave is Periodic function which period is 2 pi, and range is [-1,1]
# s2.to_wave(wave=np.sin)
print('')
print('all tests pass.')
print('done.')
|
## -*- coding: UTF8 -*-
## view.py
## Copyright (c) 2020 libcommon
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in all
## copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
## SOFTWARE.
# pylint: disable=W0613
import os
from sqlalchemy import Column
from sqlalchemy.engine.interfaces import Compiled
from sqlalchemy.event import listen
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.schema import DDLElement, MetaData, Table
from sqlalchemy.sql.expression import FromClause
__author__ = "libcommon"
class CreateViewExpression(DDLElement):
"""Custom DDL element to create SQL view.
NOTE: Implementation taken from
http://www.jeffwidman.com/blog/847/using-sqlalchemy-to-create-and-manage-postgresql-materialized-views/
"""
def __init__(self, name: str, selectable: FromClause) -> None:
self.name = name
self.selectable = selectable
@compiles(CreateViewExpression)
def generate_view_create_expression(element: CreateViewExpression, compiler: Compiled, **kwargs) -> str:
return "CREATE VIEW {} AS {}".format(element.name,
compiler.sql_compiler.process(element.selectable,
literal_binds=True))
class CreateMaterializedViewExpression(CreateViewExpression):
"""Custom DDL Element to create Postgres materialized view (see: CreateViewExpression)."""
@compiles(CreateMaterializedViewExpression, "postgresql")
def generate_mview_create_expression(element, compiler: Compiled, **kwargs) -> str:
return "CREATE MATERIALIZED VIEW {} AS {}".format(element.name,
compiler.sql_compiler.process(element.selectable,
literal_binds=True))
class DropViewExpression(DDLElement):
"""Custom DDL element to drop SQL view."""
def __init__(self, name: str) -> None:
self.name = name
@compiles(DropViewExpression)
def generate_view_drop_expression(element, compiler: Compiled, **kwargs) -> str:
return "DROP VIEW IF EXISTS {}".format(element.name)
class DropMaterializedViewExpression(DropViewExpression):
"""Cusotm DDL element to drop Postgres materialized view."""
@compiles(DropMaterializedViewExpression, "postgresql")
def generate_mview_drop_expression(element, compiler: Compiled, **kwargs) -> str:
return "DROP MATERIZLIZED VIEW IF EXISTS {}".format(element.name)
def create_view(name: str, selectable: FromClause, metadata: MetaData, materialized: bool = False) -> Table:
"""
Args:
name => name of materialized view to create
selectable => query to create view as
metadata => metadata to listen for events on
materialized => whether to create standard or materialized view
Returns:
Table object bound to temporary MetaData object with columns
returned from selectable (essentially creates table as view).
NOTE:
For non-postgresql backends, creating a materialized view
will result in a standard view, which cannot be indexed.
Preconditions:
N/A
Raises:
N/A
"""
_tmp_mt = MetaData()
tbl = Table(name, _tmp_mt)
for column in selectable.c:
tbl.append_column(Column(column.name, column.type, primary_key=column.primary_key))
listen(metadata,
"after_create",
(CreateMaterializedViewExpression(name, selectable)
if materialized else CreateViewExpression(name, selectable)))
listen(metadata,
"before_drop",
DropMaterializedViewExpression(name) if materialized else DropViewExpression(name))
return tbl
if os.environ.get("ENVIRONMENT") == "TEST":
from datetime import datetime
import unittest
from sqlalchemy.engine import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import select
from tests.common import BaseTable, User, Post
PostAuditTimeline = create_view("post_audit_timeline",
select([User.id, User.first_name, Post.created_at], order_by=Post.created_at),
BaseTable.metadata)
class TestViewUtilities(unittest.TestCase):
"""Tests for view creation/drop utilities."""
def setUp(self):
# Create SQLAlchemy engine for in-memory SQLite database
# See: https://docs.sqlalchemy.org/en/13/core/engines.html#sqlite
self.engine = create_engine("sqlite://")
# Create all tables in database
BaseTable.metadata.create_all(self.engine)
# Bind sessionmaker instance to engine
self.session_factory = sessionmaker(bind=self.engine)
# Create session
self.session = self.session_factory()
def test_create_view_expression_single_table(self):
"""Test that view creation query compiles correctly
where view selects from a single table.
"""
# Create view query
view_query = select([User.id, User.first_name, User.last_name])
# Set expected output
ddl_statement = ("CREATE VIEW user_names AS "
"SELECT \"user\".id, \"user\".first_name, \"user\".last_name FROM \"user\"")
# NOTE: have to remove newline because SQLAlchemy's select inserts them by default
self.assertEqual(ddl_statement,
str(CreateViewExpression("user_names", view_query)).replace("\n", ""))
def test_create_view_expression_join(self):
"""Test that view creation query compiles correctly
where view selects from two tables (with join).
"""
# Create view query
view_query = select([
User.id,
(User.first_name + User.last_name).label("full_name"),
Post.content
])
ddl_statement = ("CREATE VIEW user_posts AS "
"SELECT \"user\".id, \"user\".first_name || \"user\".last_name AS full_name, post.content "
"FROM \"user\", post")
self.assertEqual(ddl_statement,
str(CreateViewExpression("user_posts", view_query)).replace("\n", ""))
def test_drop_view_expression_single_table(self):
"""Test that drop view query compiles correctly."""
self.assertEqual("DROP VIEW IF EXISTS user_names", str(DropViewExpression("user_names")))
def test_select_from_created_view(self):
"""Test that PostAuditTimeline was created in database and
has the right columns by:
1) Adding a User record
2) Adding a Post record tied to User
3) Selecting from view
"""
# Add User record to database
user = User(first_name="Susan", last_name="Sarandon", email="susan.sarandon@gmail.com")
# NOTE: Pylint doesn't see these methods on Session type
self.session.add(user) # pylint: disable=E1101
self.session.commit() # pylint: disable=E1101
# Add Post record to database
created_at_datetime = datetime.utcnow()
post = Post(user_id=user.id, content="<h1>This is a post</h1>", created_at=created_at_datetime)
self.session.add(post) # pylint: disable=E1101
self.session.commit() # pylint: disable=E1101
# Select records from post_audit_timeline and ensure match up with records in database
self.assertEqual([(1, "Susan", created_at_datetime)], self.session.query(PostAuditTimeline).all()) # pylint: disable=E1101
def tearDown(self):
self.session.close() # pylint: disable=E1101
self.engine.dispose()
|
"""Add binvox_rw to PYTHONPATH
Usage:
import _init_binvox
import binvox
"""
import os.path as osp
import sys
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
this_dir = osp.dirname(osp.realpath(__file__))
binvox_dir = osp.abspath(osp.join(this_dir, '..', 'libs', 'binvox-rw-py'))
# Add lib to PYTHONPATH
add_path(binvox_dir)
|
# Recuerda que el radio es la mitad del diámetro.
# Por otro lado, si nosotros quisieramos saber cuanto es 51 entre 17 lo podemos guardar en una variable
# Resultado_Sorprendente = 51 / 17
# Y luego esa podemos usarla para otros calculos como:
# Resultado_Sorprendete_2 = (Resultado_Sorprendente ** 2) * 11
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.danmakuwall, name='danmakuwall'),
]
|
# -*- coding: utf-8 -*-
import codecs
import os, sys
import copy
import random
import json
import math
import decimal
import datetime
import threading
import exceptions
import time
import base64
import md5
from gevent import socket
import urllib, urllib2, urlparse
from socket import error
import errno
import subprocess
from multiprocessing import Process, Queue, current_process, freeze_support
import shutil
import re
#from PIL import Image
import StringIO
import cgi
import uuid
import copy
from contextlib import contextmanager
from gevent import pywsgi
import gevent
import gevent.fileobject
from gevent.local import local
from gevent.subprocess import check_output
import pymongo
import gridfs
from bson.objectid import ObjectId
try:
from geventhttpclient import HTTPClient, URL
except:
print('geventhttpclient import error')
try:
import geventwebsocket
from geventwebsocket.handler import WebSocketHandler
except:
print('geventwebsocket import error')
# try:
# from pysimplesoap.server import SoapDispatcher, WSGISOAPHandler
# from pysimplesoap.client import SoapClient, SoapFault
# except:
# print('pysimplesoap import error')
try:
from PIL import Image
except :
print('PIL import error')
try:
from lxml import etree
except:
print('lxml import error')
try:
import czml
except:
print('czml import error')
try:
from py3o.template import Template
except:
print('import py3o.template error')
import werkzeug
from werkzeug.wrappers import Request, BaseResponse
from werkzeug.local import LocalProxy
from werkzeug.contrib.sessions import FilesystemSessionStore
from werkzeug.utils import dump_cookie, parse_cookie
from werkzeug.routing import Map, Rule, BaseConverter, ValidationError, HTTPException
from sessions import MongoClient, MongodbSessionStore
import configobj
import db_util
import bayes_util
from module_locator import module_path, dec, dec1, enc, enc1
ENCODING = None
ENCODING1 = None
STATICRESOURCE_DIR = None
STATICRESOURCE_CSS_DIR = None
STATICRESOURCE_JS_DIR = None
STATICRESOURCE_IMG_DIR = None
UPLOAD_PHOTOS_DIR = None
UPLOAD_VOICE_DIR = None
gConfig = None
gStaticCache = {}
gTileCache = {}
#deprecated
gSatTileCache = {}
gMapTileCache = {}
gTerrainCache = {}
gGreenlets = {}
gClusterProcess = {}
gLoginToken = {}
gSecurityConfig = {}
gWebSocketsMap = {}
gTcpReconnectCounter = 0
gTcpSock = None
gHttpClient = {}
gFormTemplate = []
_SPECIAL = re.escape('()<>@,;:\\"/[]?={} \t')
_RE_SPECIAL = re.compile('[%s]' % _SPECIAL)
_QSTR = '"(?:\\\\.|[^"])*"' # Quoted string
_VALUE = '(?:[^%s]+|%s)' % (_SPECIAL, _QSTR) # Save or quoted string
_OPTION = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_SPECIAL, _VALUE)
_RE_OPTION = re.compile(_OPTION) # key=value part of an Content-Type like header
gSessionStore = None
gRequests = None
gRequest = None
gProxyRequest = None
gJoinableQueue = None
class BooleanConverter(BaseConverter):
def __init__(self, url_map, randomify=False):
super(BooleanConverter, self).__init__(url_map)
self.regex = '(?:true|false)'
def to_python(self, value):
return value == 'true'
def to_url(self, value):
return value and 'true' or 'false'
class Py3oItem(object):
pass
gUrlMap = Map([
Rule('/', endpoint='firstaccess'),
Rule('/websocket', endpoint='handle_websocket'),
#Rule('/auth_check/<username>/isnew/<bool:isnew>', endpoint='saveuser'),
Rule('/get_salt', endpoint='get_salt'),
Rule('/auth_check/<username>', endpoint='auth_check'),
Rule('/auth_check', endpoint='auth_check'),
Rule('/register/<username>/<password>', endpoint='user_add'),
Rule('/register/<username>', endpoint='user_add'),
Rule('/register', endpoint='user_add'),
Rule('/unregister/<username>', endpoint='user_delete'),
Rule('/unregister', endpoint='user_delete'),
Rule('/login/<username>/<password>', endpoint='login'),
Rule('/login/<username>', endpoint='login'),
Rule('/login', endpoint='login'),
Rule('/logout', endpoint='logout'),
Rule('/reset_password/<username>/<password>', endpoint='reset_password'),
Rule('/reset_password/<username>', endpoint='reset_password'),
Rule('/reset_password', endpoint='reset_password'),
Rule('/user_check', endpoint='user_check'),
Rule('/user_query', endpoint='user_query'),
Rule('/user_update', endpoint='user_update'),
Rule('/function_add', endpoint='function_add'),
Rule('/function_query', endpoint='function_query'),
Rule('/function_update', endpoint='function_update'),
Rule('/function_delete', endpoint='function_delete'),
Rule('/role_add', endpoint='role_add'),
Rule('/role_update', endpoint='role_update'),
Rule('/role_query', endpoint='role_query'),
Rule('/role_delete', endpoint='role_delete'),
Rule('/role_template_save', endpoint='role_template_save'),
Rule('/role_template_get', endpoint='role_template_get'),
Rule('/workflow_add', endpoint='workflow_add'),
Rule('/workflow_query', endpoint='workflow_query'),
Rule('/workflow_query/<_id>', endpoint='workflow_query'),
Rule('/workflow_update', endpoint='workflow_update'),
Rule('/workflow_delete', endpoint='workflow_delete'),
Rule('/workflow_delete/<_id>', endpoint='workflow_delete'),
Rule('/workflow_template_add', endpoint='workflow_template_add'),
Rule('/workflow_template_query', endpoint='workflow_template_query'),
Rule('/workflow_template_query/<_id>', endpoint='workflow_template_query'),
Rule('/workflow_template_update', endpoint='workflow_template_update'),
Rule('/workflow_template_delete', endpoint='workflow_template_delete'),
Rule('/workflow_template_delete/<_id>', endpoint='workflow_template_delete'),
Rule('/workflow_form_fill', endpoint='workflow_form_fill'),
Rule('/workflow_form_blank', endpoint='workflow_form_blank'),
Rule('/user_add', endpoint='user_add'),
Rule('/user_get', endpoint='user_get'),
Rule('/all_user_get', endpoint='all_user_get'),
Rule('/user_remove', endpoint='user_remove'),
Rule('/group_add', endpoint='group_add'),
Rule('/group_get', endpoint='group_get'),
Rule('/group_update', endpoint='group_update'),
Rule('/group_remove', endpoint='group_remove'),
Rule('/user_group_get', endpoint='user_group_get'),
Rule('/user_contact_get', endpoint='user_contact_get'),
Rule('/chat_broadcast', endpoint='chat_broadcast'),
Rule('/chat_log_query', endpoint='chat_log_query'),
Rule('/chat_log_remove', endpoint='chat_log_remove'),
Rule('/gridfs/upload', endpoint='gridfs_upload'),
Rule('/gridfs/get', endpoint='gridfs_get'),
Rule('/gridfs/get/<_id>', endpoint='gridfs_get'),
Rule('/gridfs/get/<_id>/thumbnail/<width>/<height>', endpoint='gridfs_get'),
Rule('/gridfs/query/<width>/<height>', endpoint='gridfs_query'),
Rule('/gridfs/query/<width>/<height>/<limit>', endpoint='gridfs_query'),
Rule('/gridfs/query/<width>/<height>/<limit>/<skip>', endpoint='gridfs_query'),
Rule('/gridfs/delete', endpoint='gridfs_delete'),
Rule('/gridfs/delete/<_id>', endpoint='gridfs_delete'),
Rule('/antibird/get_equip_list', endpoint='get_equip_list'),
Rule('/antibird/get_latest_records_by_imei', endpoint='get_latest_records_by_imei'),
Rule('/antibird/equip_tower_mapping', endpoint='equip_tower_mapping'),
Rule('/state_examination/save', endpoint='state_examination_save'),
Rule('/state_examination/query', endpoint='state_examination_query'),
Rule('/state_examination/query/line_names', endpoint='state_examination_query_line_names'),
Rule('/state_examination/delete', endpoint='state_examination_delete'),
Rule('/state_examination/delete/<_id>', endpoint='state_examination_delete'),
Rule('/bayesian/query/graphiz', endpoint='bayesian_query_graphiz'),
Rule('/bayesian/query/node', endpoint='bayesian_query_node'),
Rule('/bayesian/query/predict', endpoint='bayesian_query_predict'),
Rule('/bayesian/save/node', endpoint='bayesian_save_node'),
Rule('/bayesian/delete/node', endpoint='bayesian_delete_node'),
Rule('/bayesian/delete/node/<_id>', endpoint='bayesian_delete_node'),
Rule('/bayesian/query/domains_range', endpoint='bayesian_query_domains_range'),
Rule('/bayesian/save/domains_range', endpoint='bayesian_save_domains_range'),
Rule('/bayesian/delete/domains_range', endpoint='bayesian_delete_domains_range'),
Rule('/bayesian/delete/domains_range/<_id>', endpoint='bayesian_delete_domains_range'),
Rule('/bayesian/reset/unit', endpoint='bayesian_reset_unit'),
], converters={'bool': BooleanConverter})
@contextmanager
def session_manager(environ):
global gRequests, gRequest
if gRequests is None:
gRequests = local()
gRequest = LocalProxy(lambda: gRequests.request)
gRequests.request = Request(environ)
yield
gRequests.request = None
def init_global():
global ENCODING, ENCODING1, STATICRESOURCE_DIR, STATICRESOURCE_CSS_DIR, STATICRESOURCE_JS_DIR, STATICRESOURCE_IMG_DIR, UPLOAD_PHOTOS_DIR, UPLOAD_VOICE_DIR
global gConfig, gStaticCache, gGreenlets, gClusterProcess, gSecurityConfig, gJoinableQueue
ENCODING = 'utf-8'
ENCODING1 = 'gb18030'
STATICRESOURCE_DIR = os.path.join(module_path(), 'static')
#CONFIGFILE = os.path.join(module_path(), 'ogc-config.ini')
#gConfig = configobj.ConfigObj(db_util.CONFIGFILE, encoding='UTF8')
gConfig = db_util.gConfig
if gConfig['web'].has_key('webroot') and len(gConfig['web']['webroot'])>0:
if os.path.exists(gConfig['web']['webroot']):
STATICRESOURCE_DIR = gConfig['web']['webroot']
STATICRESOURCE_CSS_DIR = os.path.join(STATICRESOURCE_DIR, 'css')
STATICRESOURCE_JS_DIR = os.path.join(STATICRESOURCE_DIR, 'js')
STATICRESOURCE_IMG_DIR = os.path.join(STATICRESOURCE_DIR, 'img')
UPLOAD_PHOTOS_DIR = os.path.join(STATICRESOURCE_DIR,'photos', 'upload')
UPLOAD_VOICE_DIR = os.path.join(STATICRESOURCE_DIR,'voice')
if gConfig['wsgi']['application'].lower() == 'authorize_platform':
gSecurityConfig = db_util.mongo_find_one(gConfig['authorize_platform']['mongodb']['database'],
gConfig['authorize_platform']['mongodb']['collection_security_config'],
{},
'authorize_platform'
)
if gSecurityConfig is None:
gSecurityConfig = {}
if gConfig['wsgi']['application'].lower() in ['pay_platform', 'fake_gateway_alipay']:
gJoinableQueue = gevent.queue.JoinableQueue(maxsize=int(gConfig['pay_platform']['queue']['max_queue_size']))
l = db_util.mongo_find(gConfig['pay_platform']['mongodb']['database'],
gConfig['pay_platform']['mongodb']['collection_config'],
{},
0,
'pay_platform'
)
for i in l:
del i['_id']
key = i.keys()[0]
gSecurityConfig[key] = i[key]
if len(l) == 0:
gSecurityConfig = {}
if gConfig['wsgi']['application'].lower() == 'chat_platform':
gJoinableQueue = gevent.queue.JoinableQueue(maxsize=int(gConfig['chat_platform']['queue']['max_queue_size']))
def handle_static(environ, aUrl):
global ENCODING, gConfig
global STATICRESOURCE_DIR, STATICRESOURCE_JS_DIR, STATICRESOURCE_CSS_DIR, STATICRESOURCE_IMG_DIR, UPLOAD_VOICE_DIR
statuscode, contenttype, body = '404 Not Found', 'text/plain;charset=' + ENCODING, '404 Not Found'
surl = dec(aUrl)#.replace('//', '').replace('/', os.path.sep)
if surl[0:2] == '//':
surl = surl[2:]
if surl[0] == '/':
surl = surl[1:]
p = os.path.join(STATICRESOURCE_DIR , surl)
isBin = False
ext = os.path.splitext(p)[1]
if '.' in surl:
ext = surl[surl.rindex('.'):]
else:
ext = os.path.splitext(p)[1]
print('handle_static p=%s' % p)
if len(ext)>0:
if gConfig['mime_type'].has_key(ext):
if 'image/' in gConfig['mime_type'][ext]:
isBin = True
if '/octet-stream' in gConfig['mime_type'][ext]:
isBin = True
if '/pdf' in gConfig['mime_type'][ext]:
isBin = True
contenttype = gConfig['mime_type'][ext]
if ext == '.js':
if not os.path.exists(p):
p = os.path.join(STATICRESOURCE_JS_DIR, aUrl[aUrl.rindex('/')+1:])
elif ext == '.css':
if not os.path.exists(p):
p = os.path.join(STATICRESOURCE_CSS_DIR, aUrl[aUrl.rindex('/')+1:])
elif 'image/' in gConfig['mime_type'][ext]:
if not os.path.exists(p):
p = os.path.abspath(os.path.join(STATICRESOURCE_IMG_DIR, aUrl[aUrl.rindex('/')+1:]))
if not os.path.exists(p):
p = os.path.join(STATICRESOURCE_DIR , aUrl)
#p = os.path.abspath(p)
p = dec(p)
if os.path.exists(p):
statuscode = '200 OK'
mode = 'r'
if isBin:
mode = 'rb'
with open(p, mode) as f:
f1 = gevent.fileobject.FileObjectThread(f, mode)
body = f1.read()
else:
statuscode = '404 Not Found'
body = '404 Not Found'
else:
contenttype = 'application/octet-stream'
if os.path.exists(p):
statuscode = '200 OK'
with open(p, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
body = f1.read()
else:
if ext == '.3gp':
id = surl[surl.rindex('/') + 1:]
id = id.replace('.3gp', '')
fn = get_voice_file_latest(id)
if fn:
with open(os.path.join(UPLOAD_VOICE_DIR, fn), 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
body = f1.read()
statuscode = '200 OK'
else:
contenttype = 'text/plain;charset=' + ENCODING
statuscode = '500 Internal Server Error'
body = '500 Internal Server Error'
headers = {}
headers['Content-Type'] = str(contenttype)
return statuscode, headers, body
def handle_wfs_GetCapabilities(params):
headers = {}
headers['Content-Type'] = 'text/xml;charset=' + ENCODING
s = create_wfs_GetCapabilities()
return '200 OK', headers, s
def handle_wfs_GetFeature(params):
headers = {}
headers['Content-Type'] = 'text/xml;charset=' + ENCODING
s = create_wfs_GetFeature()
return '200 OK', headers, s
def create_wfs_GetCapabilities():
namespace = {'ows':"http://www.opengis.net/ows",
'ogc':"http://www.opengis.net/ogc",
'wfs':"http://www.opengis.net/wfs",
'gml':"http://www.opengis.net/gml",
'xlink':"http://www.w3.org/1999/xlink",
'xsi':"http://www.w3.org/2001/XMLSchema-instance",
'schemaLocation':"http://www.opengis.net/wfs/1.1.0/WFS.xsd",
'my':"http://localhost:88/my"
}
wfs = '{%s}' % namespace['wfs']
ogc = '{%s}' % namespace['ogc']
ows = '{%s}' % namespace['ows']
xlink = '{%s}' % namespace['xlink']
root = etree.Element(wfs+"WFS_Capabilites", xmlns="http://www.opengis.net/wfs", nsmap=namespace, version="1.1.0", updateSequence="0")
#ServiceIdentification
ServiceIdentification = etree.SubElement(root, ows + "ServiceIdentification")
Title = etree.SubElement(ServiceIdentification, ows + "Title").text = gConfig['wfs']['ServiceIdentification_Title']
ServiceType = etree.SubElement(ServiceIdentification, ows + "ServiceType").text = 'WFS'
ServiceTypeVersion = etree.SubElement(ServiceIdentification, ows + "ServiceTypeVersion").text = '1.1.0'
#OperationsMetadata
OperationsMetadata = etree.SubElement(root, ows + "OperationsMetadata")
Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="GetCapabilities")
DCP= etree.SubElement(Operation, ows + "DCP")
HTTP= etree.SubElement(DCP, ows + "HTTP")
href = xlink + 'href'
Get= etree.SubElement(HTTP, ows + "Get", {href:gConfig['wfs']['url']})
#Constraint= etree.SubElement(Get, ows + "Constraint", name="GetEncoding")
#AllowedValues= etree.SubElement(Constraint, ows + "AllowedValues")
#Value= etree.SubElement(AllowedValues, ows + "Value").text = 'KVP'
#Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="GetTile")
#DCP= etree.SubElement(Operation, ows + "DCP")
#HTTP= etree.SubElement(DCP, ows + "HTTP")
#Get= etree.SubElement(HTTP, ows + "Get", {href:gConfig['wmts']['url']})
Parameter = etree.SubElement(Operation, ows + "Parameter", name="AcceptVersions")
Value = etree.SubElement(Parameter, ows + "Value").text = "1.1.0"
Value = etree.SubElement(Parameter, ows + "Value").text = "1.0.0"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="AcceptFormats")
Value = etree.SubElement(Parameter, ows + "Value").text = "text/xml"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="Sections")
Value = etree.SubElement(Parameter, ows + "Value").text = "ServiceIdentification"
Value = etree.SubElement(Parameter, ows + "Value").text = "OperationsMetadata"
Value = etree.SubElement(Parameter, ows + "Value").text = "FeatureTypeList"
Value = etree.SubElement(Parameter, ows + "Value").text = "ServesGMLObjectTypeList"
Value = etree.SubElement(Parameter, ows + "Value").text = "SupportsGMLObjectTypeList"
Value = etree.SubElement(Parameter, ows + "Value").text = "Filter_Capabilities"
Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="DescribeFeatureType")
DCP= etree.SubElement(Operation, ows + "DCP")
HTTP= etree.SubElement(DCP, ows + "HTTP")
Get= etree.SubElement(HTTP, ows + "Get", {href:gConfig['wfs']['url']})#+'/wfs.cgi?'})
Post= etree.SubElement(HTTP, ows + "Post", {href:gConfig['wfs']['url']})#+'/wfs.cgi'})
Parameter = etree.SubElement(Operation, ows + "Parameter", name="outputFormat")
Value = etree.SubElement(Parameter, ows + "Value").text = "text/xml; subtype=gml/3.1.1"
Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="GetFeature")
DCP= etree.SubElement(Operation, ows + "DCP")
HTTP= etree.SubElement(DCP, ows + "HTTP")
Get= etree.SubElement(HTTP, ows + "Get", {href:gConfig['wfs']['url']})#+'/wfs.cgi?'})
Post= etree.SubElement(HTTP, ows + "Post", {href:gConfig['wfs']['url']})#+'/wfs.cgi'})
Parameter = etree.SubElement(Operation, ows + "Parameter", name="resultType")
Value = etree.SubElement(Parameter, ows + "Value").text = "results"
Value = etree.SubElement(Parameter, ows + "Value").text = "hits"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="outputFormat")
Value = etree.SubElement(Parameter, ows + "Value").text = "text/xml; subtype=gml/3.1.1"
Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="GetFeatureWithLock")
DCP= etree.SubElement(Operation, ows + "DCP")
HTTP= etree.SubElement(DCP, ows + "HTTP")
Post= etree.SubElement(HTTP, ows + "Post", {href:gConfig['wfs']['url']})
Parameter = etree.SubElement(Operation, ows + "Parameter", name="resultType")
Value = etree.SubElement(Parameter, ows + "Value").text = "results"
Value = etree.SubElement(Parameter, ows + "Value").text = "hits"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="outputFormat")
Value = etree.SubElement(Parameter, ows + "Value").text = "text/xml; subtype=gml/3.1.1"
Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="GetGMLObject")
DCP= etree.SubElement(Operation, ows + "DCP")
HTTP= etree.SubElement(DCP, ows + "HTTP")
Post= etree.SubElement(HTTP, ows + "Post", {href:gConfig['wfs']['url']})
Parameter = etree.SubElement(Operation, ows + "Parameter", name="outputFormat")
Value = etree.SubElement(Parameter, ows + "Value").text = "text/xml; subtype=gml/3.1.1"
Value = etree.SubElement(Parameter, ows + "Value").text = "text/xhtml"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="LocalTraverseXLinkScope")
Value = etree.SubElement(Parameter, ows + "Value").text = "0"
Value = etree.SubElement(Parameter, ows + "Value").text = "*"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="RemoteTraverseXLinkScope")
Value = etree.SubElement(Parameter, ows + "Value").text = "0"
Value = etree.SubElement(Parameter, ows + "Value").text = "*"
Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="LockFeature")
DCP= etree.SubElement(Operation, ows + "DCP")
HTTP= etree.SubElement(DCP, ows + "HTTP")
Post= etree.SubElement(HTTP, ows + "Post", {href:gConfig['wfs']['url']})
Parameter = etree.SubElement(Operation, ows + "Parameter", name="lockAction")
Value = etree.SubElement(Parameter, ows + "Value").text = "ALL"
Value = etree.SubElement(Parameter, ows + "Value").text = "SOME"
Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="Transaction")
DCP= etree.SubElement(Operation, ows + "DCP")
HTTP= etree.SubElement(DCP, ows + "HTTP")
Post= etree.SubElement(HTTP, ows + "Post", {href:gConfig['wfs']['url']})
Parameter = etree.SubElement(Operation, ows + "Parameter", name="inputFormat")
Value = etree.SubElement(Parameter, ows + "Value").text = "text/xml; subtype=gml/3.1.1"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="idgen")
Value = etree.SubElement(Parameter, ows + "Value").text = "GenerateNew"
Value = etree.SubElement(Parameter, ows + "Value").text = "UseExisting"
Value = etree.SubElement(Parameter, ows + "Value").text = "ReplaceDuplicate"
Parameter = etree.SubElement(Operation, ows + "Parameter", name="releaseAction")
Value = etree.SubElement(Parameter, ows + "Value").text = "ALL"
Value = etree.SubElement(Parameter, ows + "Value").text = "SOME"
Parameter = etree.SubElement(OperationsMetadata, ows + "Parameter", name="srsName")
Value = etree.SubElement(Parameter, ows + "Value").text = "EPSG:4326"
Constraint = etree.SubElement(OperationsMetadata, ows + "Constraint", name="DefaultMaxFeatures")
Value = etree.SubElement(Constraint, ows + "Value").text = "10000"
Constraint = etree.SubElement(OperationsMetadata, ows + "Constraint", name="LocalTraverseXLinkScope")
Value = etree.SubElement(Constraint, ows + "Value").text = "0"
Value = etree.SubElement(Constraint, ows + "Value").text = "*"
Constraint = etree.SubElement(OperationsMetadata, ows + "Constraint", name="RemoteTraverseXLinkScope")
Value = etree.SubElement(Constraint, ows + "Value").text = "0"
Value = etree.SubElement(Constraint, ows + "Value").text = "*"
Constraint = etree.SubElement(OperationsMetadata, ows + "Constraint", name="DefaultLockExpiry")
Value = etree.SubElement(Constraint, ows + "Value").text = "5"
FeatureTypeList = etree.SubElement(root, wfs + "FeatureTypeList")
FeatureType = etree.SubElement(FeatureTypeList, wfs + "FeatureType")
Name = etree.SubElement(FeatureType, wfs + "Name").text = "PointType"
Title = etree.SubElement(FeatureType, wfs + "Title").text = "Point Type"
DefaultSRS = etree.SubElement(FeatureType, wfs + "DefaultSRS").text = "EPSG:4326"
OutputFormats = etree.SubElement(FeatureType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
WGS84BoundingBox = etree.SubElement(FeatureType, ows + "WGS84BoundingBox")
LowerCorner = etree.SubElement(WGS84BoundingBox, ows + "LowerCorner").text = "-180 -90"
UpperCorner = etree.SubElement(WGS84BoundingBox, ows + "UpperCorner").text = "180 90"
ServesGMLObjectTypeList = etree.SubElement(root, wfs + "ServesGMLObjectTypeList")
GMLObjectType = etree.SubElement(ServesGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "PointType"
Title = etree.SubElement(GMLObjectType, wfs + "Title").text = "Point Type"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
SupportsGMLObjectTypeList = etree.SubElement(root, wfs + "SupportsGMLObjectTypeList")
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:AbstractGMLFeatureType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:PointType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:LineStringType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:PolygonType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:MultiPointType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:MultiCurveType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:MultiSurfaceType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:AbstractMetaDataType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
GMLObjectType = etree.SubElement(SupportsGMLObjectTypeList, wfs + "GMLObjectType")
Name = etree.SubElement(GMLObjectType, wfs + "Name").text = "gml:AbstractTopologyType"
OutputFormats = etree.SubElement(GMLObjectType, wfs + "OutputFormats")
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xml; subtype=gml/3.1.1"
Format = etree.SubElement(OutputFormats, wfs + "Format").text = "text/xhmtl"
Filter_Capabilities = etree.SubElement(root, ogc + "Filter_Capabilities")
Spatial_Capabilities = etree.SubElement(Filter_Capabilities, ogc + "Spatial_Capabilities")
GeometryOperands = etree.SubElement(Spatial_Capabilities, ogc + "GeometryOperands")
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Envelope"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Point"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:LineString"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Polygon"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:ArcByCenterPoint"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:CircleByCenterPoint"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Arc"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Circle"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:ArcByBulge"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Bezier"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Clothoid"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:CubicSpline"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Geodesic"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:OffsetCurve"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Triangle"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:PolyhedralSurface"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:TriangulatedSurface"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Tin"
GeometryOperand = etree.SubElement(GeometryOperands, ogc + "GeometryOperand").text = "gml:Solid"
SpatialOperators = etree.SubElement(Spatial_Capabilities, ogc + "SpatialOperators")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="BBOX")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Equals")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Disjoint")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Intersects")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Touches")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Crosses")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Within")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Contains")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Overlaps")
SpatialOperator = etree.SubElement(GeometryOperands, ogc + "SpatialOperator", name="Beyond")
Scalar_Capabilities = etree.SubElement(Filter_Capabilities, ogc + "Scalar_Capabilities")
LogicalOperators = etree.SubElement(Scalar_Capabilities, ogc + "LogicalOperators")
ComparisonOperators = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperators")
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "LessThan"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "GreaterThan"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "LessThanEqualTo"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "GreaterThanEqualTo"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "EqualTo"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "NotEqualTo"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "Like"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "Between"
ComparisonOperator = etree.SubElement(Scalar_Capabilities, ogc + "ComparisonOperator").text = "NullCheck"
ArithmeticOperators = etree.SubElement(Scalar_Capabilities, ogc + "ArithmeticOperators")
SimpleArithmetic = etree.SubElement(ArithmeticOperators, ogc + "SimpleArithmetic")
Functions = etree.SubElement(ArithmeticOperators, ogc + "Functions")
FunctionNames = etree.SubElement(Functions, ogc + "FunctionNames")
FunctionName = etree.SubElement(FunctionNames, ogc + "FunctionName", nArgs="1").text = "MIN"
FunctionName = etree.SubElement(FunctionNames, ogc + "FunctionName", nArgs="1").text = "MAX"
FunctionName = etree.SubElement(FunctionNames, ogc + "FunctionName", nArgs="1").text = "SIN"
FunctionName = etree.SubElement(FunctionNames, ogc + "FunctionName", nArgs="1").text = "COS"
FunctionName = etree.SubElement(FunctionNames, ogc + "FunctionName", nArgs="1").text = "TAN"
Id_Capabilities = etree.SubElement(Filter_Capabilities, ogc + "Id_Capabilities")
EID = etree.SubElement(Id_Capabilities, ogc + "EID")
FID = etree.SubElement(Id_Capabilities, ogc + "FID")
#WGS84BoundingBox = etree.SubElement(Layer, ows + "WGS84BoundingBox")
#SupportedCRS = etree.SubElement(TileMatrixSet, ows + "SupportedCRS" ).text = gConfig['wmts']['SupportedCRS']
ret = etree.tostring(root, pretty_print=True, xml_declaration=True, encoding=ENCODING)
print(ret)
return ret
def handle_wmts_GetCapabilities(params={}):
headers = {}
mimetype = 'text/xml;charset=' + ENCODING
s = ''
if params.has_key('TILETYPE') and params.has_key('SUBTYPE'):
s = create_wmts_GetCapabilities(params['TILETYPE'], params['SUBTYPE'])
return mimetype, s
def create_wmts_GetCapabilities(tiletype, subtype):
global gConfig
#'''
#namespace = {'ows':"http://www.opengis.net/ows/1.1", 'xlink':"http://www.w3.org/1999/xlink", 'xsi':"http://www.w3.org/2001/XMLSchema-instance", 'gml':"http://www.opengis.net/gml", 'schemaLocation':"http://schemas.opengis.net/wmts/1.0/wmtsGetCapabilities_response.xsd"}
#ows = '{%s}' % namespace['ows']
#xlink = '{%s}' % namespace['xlink']
#root = etree.Element("Capabilities", xmlns="http://www.opengis.net/wmts/1.0", nsmap=namespace, version="1.0.0")
##ServiceIdentification
#ServiceIdentification = etree.SubElement(root, ows + "ServiceIdentification")
#Title = etree.SubElement(ServiceIdentification, ows + "Title").text = gConfig['webgis']['wmts']['ServiceIdentification_Title']
#ServiceType = etree.SubElement(ServiceIdentification, ows + "ServiceType").text = 'OGC WMTS'
#ServiceTypeVersion = etree.SubElement(ServiceIdentification, ows + "ServiceTypeVersion").text = '1.0.0'
##OperationsMetadata
#OperationsMetadata = etree.SubElement(root, ows + "OperationsMetadata")
#Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="GetCapabilities")
#DCP= etree.SubElement(Operation, ows + "DCP")
#HTTP= etree.SubElement(DCP, ows + "HTTP")
#href = xlink + 'href'
#Get= etree.SubElement(HTTP, ows + "Get", {href:gConfig['webgis']['wmts']['url'] + '?'})
#Constraint= etree.SubElement(Get, ows + "Constraint", name="GetEncoding")
#AllowedValues= etree.SubElement(Constraint, ows + "AllowedValues")
#Value= etree.SubElement(AllowedValues, ows + "Value").text = 'KVP'
#Operation= etree.SubElement(OperationsMetadata, ows + "Operation", name="GetTile")
#DCP= etree.SubElement(Operation, ows + "DCP")
#HTTP= etree.SubElement(DCP, ows + "HTTP")
#Get= etree.SubElement(HTTP, ows + "Get", {href:gConfig['webgis']['wmts']['url'] + '?'})
##Contents
#Contents = etree.SubElement(root, "Contents")
#Layer = etree.SubElement(Contents, "Layer")
#Title = etree.SubElement(Layer, ows + "Title").text = gConfig['webgis']['wmts']['Layer_Title']
#WGS84BoundingBox = etree.SubElement(Layer, ows + "WGS84BoundingBox")
#LowerCorner = etree.SubElement(WGS84BoundingBox, ows + "LowerCorner").text = gConfig['webgis']['wmts']['WGS84BoundingBox']['LowerCorner']
#UpperCorner = etree.SubElement(WGS84BoundingBox, ows + "UpperCorner").text = gConfig['webgis']['wmts']['WGS84BoundingBox']['UpperCorner']
#Identifier = etree.SubElement(Layer, ows + "Identifier").text = gConfig['webgis']['wmts']['Layer_Identifier']
#Style = etree.SubElement(Layer, "Style", isDefault="true")
#Title = etree.SubElement(Style, ows + "Title" ).text = 'Default'
#Identifier = etree.SubElement(Style, ows + "Identifier" ).text = 'default'
#Format = etree.SubElement(Layer, "Format" ).text = gConfig['mime_type'][gConfig['wmts']['format']]
#TileMatrixSetLink = etree.SubElement(Layer, "TileMatrixSetLink" )
#TileMatrixSet = etree.SubElement(TileMatrixSetLink, "TileMatrixSet" ).text = gConfig['webgis']['wmts']['TileMatrixSet']
#TileMatrixSet = etree.SubElement(Contents, "TileMatrixSet")
#Identifier = etree.SubElement(TileMatrixSet, ows + "Identifier" ).text = gConfig['webgis']['wmts']['TileMatrixSet']
#SupportedCRS = etree.SubElement(TileMatrixSet, ows + "SupportedCRS" ).text = gConfig['webgis']['wmts']['SupportedCRS']
#WellKnownScaleSet = etree.SubElement(TileMatrixSet, "WellKnownScaleSet" ).text = gConfig['webgis']['wmts']['WellKnownScaleSet']
#max_zoom_level, min_zoom_level = int(gConfig['wmts']['max_zoom_level']), int(gConfig['webgis']['wmts']['min_zoom_level'])
#if max_zoom_level < min_zoom_level:
#max_zoom_level, min_zoom_level = min_zoom_level, max_zoom_level
##zoomlist = range(max_zoom_level,min_zoom_level, -1)
#zoomlist = range(min_zoom_level, max_zoom_level+1, 1)
#pixelSize = float(gConfig['webgis']['wmts']['pixelSize'])
#tileWidth,tileHeight = int(gConfig['webgis']['wmts']['TileWidth']), int(gConfig['webgis']['wmts']['TileHeight'])
#minLonLat,maxLonLat = (float(gConfig['webgis']['wmts']['minLonLat'][0]), float(gConfig['webgis']['wmts']['minLonLat'][1])), (float(gConfig['webgis']['wmts']['maxLonLat'][0]), float(gConfig['webgis']['wmts']['maxLonLat'][1]))
##tileMatrixMinX, tileMatrixMaxX = (26.0, 102.0), (26.0, 104.0)
##tileMatrixMinY, tileMatrixMaxY = (24.0, 102.0), (26.0, 102.0)
#tileMatrixMinX, tileMatrixMaxX = (maxLonLat[1], minLonLat[0]), (maxLonLat[1], maxLonLat[0])
#tileMatrixMinY, tileMatrixMaxY = (minLonLat[1], minLonLat[0]), (maxLonLat[1], minLonLat[0])
#metersPerUnit = 0.0
#if gConfig['webgis']['wmts'].has_key('metersPerUnit'):
#metersPerUnit = float(gConfig['webgis']['wmts']['metersPerUnit'])
#else:
#metersPerUnitX = mapUtils.countDistanceFromLatLon(tileMatrixMaxX , tileMatrixMinX)/2*1000
##print('metersPerUnitX=%f' % metersPerUnitX)
#metersPerUnitY = mapUtils.countDistanceFromLatLon(tileMatrixMaxY , tileMatrixMinY)/2*1000
##print('metersPerUnitY=%f' % metersPerUnitY)
#metersPerUnit = metersPerUnitY
##print('metersPerUnit=%f' % metersPerUnit)
#for i in zoomlist:
##matrixHeight = matrixWidth = mapUtils.tiles_on_level(i)
#matrixHeight = matrixWidth = mapUtils.tiles_on_level(max_zoom_level-(i-1))
##print('%d=%d' % (i , matrixHeight))
##scaleDenominatorX = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxX , tileMatrixMinX) * 1000./(tileWidth * matrixWidth)
##scaleDenominatorY = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxY , tileMatrixMinY) * 1000./(tileHeight * matrixHeight)
##print('scaleDenominatorX=%f, scaleDenominatorY=%f' % (scaleDenominatorX, scaleDenominatorY))
##scaleDenominator = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxY , tileMatrixMinY) * 1000. /(tileHeight * matrixHeight)
#scaleDenominator = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxY , tileMatrixMinY) /(tileHeight * matrixHeight)
#TileMatrix = etree.SubElement(TileMatrixSet, "TileMatrix" )
##Identifier = etree.SubElement(TileMatrix, ows + "Identifier" ).text = "ynsat_" + str(i)
#Identifier = etree.SubElement(TileMatrix, ows + "Identifier" ).text = str(i)
#ScaleDenominator = etree.SubElement(TileMatrix, "ScaleDenominator" ).text = '%.8f' % scaleDenominator
#TopLeftCorner = etree.SubElement(TileMatrix, "TopLeftCorner" ).text = ['webgis']['wmts']['TopLeftCorner']
#TileWidth = etree.SubElement(TileMatrix, "TileWidth" ).text = str(tileWidth)
#TileHeight = etree.SubElement(TileMatrix, "TileHeight" ).text = str(tileHeight)
#MatrixWidth = etree.SubElement(TileMatrix, "MatrixWidth" ).text = str(matrixWidth)
#MatrixHeight = etree.SubElement(TileMatrix, "MatrixHeight" ).text = str(matrixHeight)
#ret = etree.tostring(root, pretty_print=True, xml_declaration=True, encoding=ENCODING)
#print(ret)
#return ret
#'''
ret = '''<?xml version="1.0" encoding="UTF-8"?>
<Capabilities xmlns="http://www.opengis.net/wmts/1.0"
xmlns:ows="http://www.opengis.net/ows/1.1"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:gml="http://www.opengis.net/gml" xsi:schemaLocation="http://www.opengis.net/wmts/1.0 http://schemas.opengis.net/wmts/1.0/wmtsGetCapabilities_response.xsd"
version="1.0.0">
<ows:ServiceIdentification>
<ows:Title>%s</ows:Title>
<ows:ServiceType>OGC WMTS</ows:ServiceType>
<ows:ServiceTypeVersion>1.0.0</ows:ServiceTypeVersion>
</ows:ServiceIdentification>
<ows:OperationsMetadata>
<ows:Operation name="GetCapabilities">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="http://%s:%s/wmts?REQUEST=getcapabilities">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
<ows:Operation name="GetTile">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href="http://%s:%s/wmts?REQUEST=gettile">
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
</ows:OperationsMetadata>
<Contents>
<Layer>
<ows:Title>%s</ows:Title>
<ows:WGS84BoundingBox>
<ows:LowerCorner>-180.0 -90.0</ows:LowerCorner>
<ows:UpperCorner>180.0 90.0</ows:UpperCorner>
</ows:WGS84BoundingBox>
<ows:Identifier>%s</ows:Identifier>
<Style isDefault="true">
<ows:Identifier>_null</ows:Identifier>
</Style>
<Format>%s</Format>
<TileMatrixSetLink>
<TileMatrixSet>%s</TileMatrixSet>
</TileMatrixSetLink>
</Layer>
<TileMatrixSet>
<ows:Identifier>%s</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG::900913</ows:SupportedCRS>
<TileMatrix>
<ows:Identifier>0</ows:Identifier>
<ScaleDenominator>5.590822639508929E8</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>1</ows:Identifier>
<ScaleDenominator>2.7954113197544646E8</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>2</ows:Identifier>
<ScaleDenominator>1.3977056598772323E8</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>4</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>3</ows:Identifier>
<ScaleDenominator>6.988528299386162E7</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8</MatrixWidth>
<MatrixHeight>8</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>4</ows:Identifier>
<ScaleDenominator>3.494264149693081E7</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16</MatrixWidth>
<MatrixHeight>16</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>5</ows:Identifier>
<ScaleDenominator>1.7471320748465404E7</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>32</MatrixWidth>
<MatrixHeight>32</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>6</ows:Identifier>
<ScaleDenominator>8735660.374232702</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>64</MatrixWidth>
<MatrixHeight>64</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>7</ows:Identifier>
<ScaleDenominator>4367830.187116351</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>128</MatrixWidth>
<MatrixHeight>128</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>8</ows:Identifier>
<ScaleDenominator>2183915.0935581755</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>256</MatrixWidth>
<MatrixHeight>256</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>9</ows:Identifier>
<ScaleDenominator>1091957.5467790877</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>512</MatrixWidth>
<MatrixHeight>512</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>10</ows:Identifier>
<ScaleDenominator>545978.7733895439</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1024</MatrixWidth>
<MatrixHeight>1024</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>11</ows:Identifier>
<ScaleDenominator>272989.38669477194</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2048</MatrixWidth>
<MatrixHeight>2048</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>12</ows:Identifier>
<ScaleDenominator>136494.69334738597</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4096</MatrixWidth>
<MatrixHeight>4096</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>13</ows:Identifier>
<ScaleDenominator>68247.34667369298</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8192</MatrixWidth>
<MatrixHeight>8192</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>14</ows:Identifier>
<ScaleDenominator>34123.67333684649</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16384</MatrixWidth>
<MatrixHeight>16384</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>15</ows:Identifier>
<ScaleDenominator>17061.836668423246</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>32768</MatrixWidth>
<MatrixHeight>32768</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>16</ows:Identifier>
<ScaleDenominator>8530.918334211623</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>65536</MatrixWidth>
<MatrixHeight>65536</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>17</ows:Identifier>
<ScaleDenominator>4265.4591671058115</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>131072</MatrixWidth>
<MatrixHeight>131072</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>18</ows:Identifier>
<ScaleDenominator>2132.7295835529058</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>262144</MatrixWidth>
<MatrixHeight>262144</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>19</ows:Identifier>
<ScaleDenominator>1066.3647917764529</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>524288</MatrixWidth>
<MatrixHeight>524288</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>20</ows:Identifier>
<ScaleDenominator>533.1823958882264</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1048576</MatrixWidth>
<MatrixHeight>1048576</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>21</ows:Identifier>
<ScaleDenominator>266.5911979441132</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2097152</MatrixWidth>
<MatrixHeight>2097152</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>22</ows:Identifier>
<ScaleDenominator>133.2955989720566</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4194304</MatrixWidth>
<MatrixHeight>4194304</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>23</ows:Identifier>
<ScaleDenominator>66.6477994860283</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8388608</MatrixWidth>
<MatrixHeight>8388608</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>24</ows:Identifier>
<ScaleDenominator>33.32389974301415</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16777216</MatrixWidth>
<MatrixHeight>16777216</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>25</ows:Identifier>
<ScaleDenominator>16.661949871507076</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>33554432</MatrixWidth>
<MatrixHeight>33554432</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>26</ows:Identifier>
<ScaleDenominator>8.330974935753538</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>67108864</MatrixWidth>
<MatrixHeight>67108864</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>27</ows:Identifier>
<ScaleDenominator>4.165487467876769</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>134217728</MatrixWidth>
<MatrixHeight>134217728</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>28</ows:Identifier>
<ScaleDenominator>2.0827437339383845</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>268435456</MatrixWidth>
<MatrixHeight>268435456</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>29</ows:Identifier>
<ScaleDenominator>1.0413718669691923</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>536870912</MatrixWidth>
<MatrixHeight>536870912</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>30</ows:Identifier>
<ScaleDenominator>0.5206859334845961</ScaleDenominator>
<TopLeftCorner>-2.003750834E7 2.0037508E7</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1073741824</MatrixWidth>
<MatrixHeight>1073741824</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
</Capabilities>''' % (
str(tiletype),
str(gConfig['webgis']['wmts']['host']),
str(gConfig['webgis']['wmts']['port']),
str(gConfig['webgis']['wmts']['host']),
str(gConfig['webgis']['wmts']['port']),
str(subtype),
str(subtype),
str(gConfig['mime_type'][gConfig['webgis'][tiletype][subtype]['mimetype']]),
str(subtype),
str(subtype),
)
#<ServiceMetadataURL xlink:href="http://%s:%s/wmts?REQUEST=getcapabilities"/>
return ret
def download_callback(*args, **kwargs):
global gConfig, gMapTileCache, gSatTileCache, gTerrainCache
global STATICRESOURCE_IMG_DIR
zoom, col, row = args[1][2], args[1][0], args[1][1]
root = os.path.abspath(gConfig['wmts']['tiles_map_root'])
if args[2] == mapConst.LAYER_SAT:
root = os.path.abspath(gConfig['wmts']['tiles_sat_root'])
if args[2] == mapConst.LAYER_MAP:
root = os.path.abspath(gConfig['wmts']['tiles_map_root'])
p = os.path.join(root,
str(zoom),
str(col / 1024),
str(col % 1024),
str(row / 1024),
str(row % 1024) + gConfig['wmts']['format']
)
if os.path.exists(p):
key = '%d-%d-%d' % (zoom, col, row)
with open(p, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
if args[2] == mapConst.LAYER_SAT:
gSatTileCache[key] = f1.read()
if args[2] == mapConst.LAYER_MAP:
gMapTileCache[key] = f1.read()
def handle_wmts_GetTile(params):
global gConfig
mimetype, ret = None, None
tiletype = 'webgis/tiles'
arr = tiletype.split('/')
subtype = None
if params.has_key('TILEMATRIXSET'):
subtype = params['TILEMATRIXSET']
level, y, x = None, None, None
if params.has_key('TILEMATRIX'):
level = int(params['TILEMATRIX'])
if params.has_key('TILEROW'):
y = int(params['TILEROW'])
if params.has_key('TILECOL'):
x = int(params['TILECOL'])
if subtype is not None and level is not None and y is not None and x is not None:
tilepath = '%d/%d/%d%s' % (level, x, y, str(gConfig['webgis'][arr[1]][subtype]))
d = {}
d['x'] = str(x)
d['y'] = str(y)
d['level'] = str(level)
mimetype, ret = db_util.gridfs_tile_find(tiletype, subtype, tilepath, d)
return mimetype, ret
def handle_tiles(environ):
global gConfig, gTileCache
global STATICRESOURCE_IMG_DIR
def get_blank_tile(image_type):
blank_tile = ''
picpath = os.path.join(STATICRESOURCE_IMG_DIR, gConfig['webgis']['tiles'][image_type]['missing'])
with open(picpath, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
blank_tile = f1.read()
return blank_tile
headers = {}
#path_info = environ['PATH_INFO']
#d = cgi.parse(None, environ)
querydict, buf = get_querydict_by_GET_POST(environ)
ret = None
mimetype = 'image/png'
image_type = None
#key = path_info.replace('/tiles/','')
if querydict.has_key('image_type') and querydict.has_key('x') and querydict.has_key('y') and querydict.has_key('level'):
image_type = querydict['image_type']
x, y, level = querydict['x'], querydict['y'], querydict['level']
tilepath = '%s/%s/%s%s' % (level, x, y, gConfig['webgis']['tiles'][image_type]['mimetype'])
if not gTileCache.has_key(image_type):
gTileCache[image_type] = {}
if not gTileCache[image_type].has_key('missing'):
gTileCache[image_type]['missing'] = get_blank_tile(image_type)
if gTileCache[image_type].has_key(tilepath):
ret = gTileCache[image_type][tilepath]
else:
try:
mimetype, ret = db_util.gridfs_tile_find('webgis/tiles', image_type, tilepath, querydict)
gTileCache[image_type][tilepath] = ret
except:
print(sys.exc_info())
ret = gTileCache[image_type]['missing']
else:
if image_type:
if not gTileCache.has_key(image_type):
gTileCache[image_type] = {}
if not gTileCache[image_type].has_key('missing'):
gTileCache[image_type]['missing'] = get_blank_tile(image_type)
ret = gTileCache[image_type]['missing']
else:
ret = get_blank_tile('arcgis_sat')
if ret is None:
ret = gTileCache[image_type]['missing']
headers['Content-Type'] = mimetype
return '200 OK', headers, ret
def handle_terrain(environ):
global gConfig, gTileCache
path_info = environ['PATH_INFO']
#d = cgi.parse(None, environ)
querydict, buf = get_querydict_by_GET_POST(environ)
ret = None
headers = {}
mimetype = str('application/octet-stream')
key = path_info.replace('/terrain/','')
terrain_type = 'quantized_mesh'
if querydict.has_key('terrain_type'):
terrain_type = querydict['terrain_type']
if not gTileCache.has_key(terrain_type):
gTileCache[terrain_type] = {}
if gTileCache[terrain_type].has_key(key):
ret = gTileCache[terrain_type][key]
else:
tilepath = key
if tilepath == 'layer.json':
mimetype, ret = db_util.gridfs_tile_find('webgis/terrain', terrain_type, tilepath, querydict)
gTileCache[terrain_type][key] = ret
headers['Content-Type'] = mimetype
return '200 OK', headers, ret
else:
print('tilepath:%s' % tilepath)
mimetype, ret = db_util.gridfs_tile_find('webgis/terrain', terrain_type, tilepath, querydict)
if ret:
gTileCache[terrain_type][key] = ret
headers['Content-Type'] = mimetype
return '200 OK', headers, ret
else:
if not gTileCache[terrain_type].has_key('missing'):
print('reading mongo blank_terrain...')
tilepath = gConfig['webgis']['terrain'][terrain_type]['missing'] #'0/0/0.terrain'
mimetype, ret = db_util.gridfs_tile_find('webgis/terrain', terrain_type, tilepath, querydict)
gTileCache[terrain_type]['missing'] = ret
ret = gTileCache[terrain_type]['missing']
headers['Content-Type'] = mimetype
return '200 OK', headers, ret
def handle_terrain1(environ):
global gConfig, gMapTileCache, gSatTileCache, gTerrainCache
path_info = environ['PATH_INFO']
#d = cgi.parse(None, environ)
ret = None
headers = {}
key = path_info.replace('/terrain/','')
if gTerrainCache.has_key(key):
ret = gTerrainCache[key]
else:
arr = key.split('/')
tilepath = gConfig['webgis']['terrain']['tiles_dir']
for i in arr:
tilepath = os.path.join(tilepath, i)
tilepath = os.path.abspath(tilepath)
ret = ''
if os.path.exists(tilepath):
#print('reading %s...' % tilepath)
with open(tilepath, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
ret = f1.read()
gTerrainCache[key] = ret
else:
if gTerrainCache.has_key('missing'):
ret = gTerrainCache['missing']
else:
print('reading blank_terrain...')
with open(gConfig['webgis']['terrain']['blank_terrain'], 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
ret = f1.read()
gTerrainCache['missing'] = ret
headers['Content-Type'] = 'application/octet-stream'
return '200 OK', headers, ret
def handle_arcgistile(environ):
global gConfig, gMapTileCache, gSatTileCache
global STATICRESOURCE_IMG_DIR
ret = None
headers = {}
dd = cgi.parse(None, environ)
d = {}
for k in dd.keys():
d[k] = dd[k][0]
if d.has_key('zoom') and d.has_key('col') and d.has_key('row'):
zoom = int(d['zoom'])
col = int(d['col'])
row = int(d['row'])
key = '%d-%d-%d' % (zoom, col, row)
if not gSatTileCache.has_key(key):
try:
#picpath = os.path.join(gConfig['wmts']['arcgis_tiles_root'], '_alllayers', 'L%02d' % zoom, 'R%08x' % row, 'C%08x%s' % (col, gConfig['wmts']['format']))
picpath = os.path.join(gConfig['webgis']['wmts']['arcgis_tiles_root'], '%d' % zoom, '%d' % col, '%d%s' % (row, gConfig['webgis']['wmts']['format']))
print('%s, %s' % (key, picpath))
with open(picpath, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
gSatTileCache[key] = f1.read()
except:
foundit = False
if not foundit:
key = 'missing'
if not gSatTileCache.has_key(key):
picpath = os.path.join(STATICRESOURCE_IMG_DIR, gConfig['webgis']['wmts']['missing'])
with open(picpath, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
gSatTileCache[key] = f1.read()
ret = gSatTileCache[key]
elif d.has_key('is_esri') :
key = environ['PATH_INFO'].replace('/arcgistile/','')
if not gSatTileCache.has_key(key):
try:
#picpath = os.path.join(gConfig['webgis']['wmts']['arcgis_tiles_root'], '_alllayers', 'L%02d' % zoom, 'R%08x' % row, 'C%08x%s' % (col, gConfig['webgis']['wmts']['format']))
picpath = os.path.join(gConfig['webgis']['wmts']['arcgis_tiles_root'], key)
print('%s, %s' % (key, picpath))
with open(picpath, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
gSatTileCache[key] = f1.read()
except:
foundit = False
if not foundit:
key = 'missing'
if not gSatTileCache.has_key(key):
picpath = os.path.join(STATICRESOURCE_IMG_DIR, gConfig['webgis']['wmts']['missing'])
with open(picpath, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
gSatTileCache[key] = f1.read()
ret = gSatTileCache[key]
else:
if not gSatTileCache.has_key('missing'):
picpath = os.path.join(STATICRESOURCE_IMG_DIR, gConfig['webgis']['wmts']['missing'])
with open(picpath, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
gSatTileCache['missing'] = f1.read()
ret = gSatTileCache['missing']
headers['Content-Type'] = str(gConfig['mime_type'][gConfig['webgis']['wmts']['format']])
return '200 OK', headers, ret
def handle_wmts(environ):
dd = cgi.parse(None, environ)
d = {}
headers = {}
mimetype, ret = None, None
for k in dd.keys():
d[k.upper()] = dd[k][0]
ret, mimetype = None, None
if d.has_key('REQUEST') :
d['REQUEST'] = d['REQUEST'].replace('/1.0.0/WMTSCapabilities.xml', '')
if d.has_key('TILETYPE'):
d['TILETYPE'] = d['TILETYPE'].replace('/1.0.0/WMTSCapabilities.xml', '')
if d.has_key('SUBTYPE'):
d['SUBTYPE'] = d['SUBTYPE'].replace('/1.0.0/WMTSCapabilities.xml', '')
if d['REQUEST'].lower() in ['getcapabilities']:
mimetype, ret = handle_wmts_GetCapabilities(d)
elif d['REQUEST'].lower() in ['gettile']:
mimetype, ret = handle_wmts_GetTile(d)
headers['Content-Type'] = mimetype
return '200 OK', headers, ret
def handle_cluster(environ):
global gConfig
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
if int(environ['SERVER_PORT'])==int(gConfig['cluster']['manager_port']) and gConfig['cluster']['enable_cluster'] in ['true','True']:
op = ''
if environ['PATH_INFO']=='/create_cluster':
if len(get_pid_from_name('nginx'))==0:
op = 'create ok'
create_cluster()
elif environ['PATH_INFO']=='/kill_cluster':
op = 'kill ok'
kill_cluster()
#print(environ)
return '200 OK', headers, json.dumps({'result':op})
else:
return '200 OK', headers, json.dumps({'result':'cluster is disabled or not by manager'})
def handle_test(environ):
s = '测试OK'
headers = {}
d = cgi.parse(None, environ)
#print(d)
headers['Content-Type'] = 'text/json;charset=' + ENCODING
#print(s)
return '200 OK', headers, s
def get_condition_from_dict(dct):
cond = '1=1'
for k in dct.keys():
if k in ['voltage', 'line_id', 'id', 'tower_id', 'start_tower_id', 'end_tower_id', 'model_code', 'side', 'position']:
if k == 'side':
if dct[k]=='1':
cond += " AND %s='%s'" % (k, u'正')
elif dct[k]=='0':
cond += " AND %s='%s'" % (k, u'反')
else:
cond += " AND %s='%s'" % (k, dct[k])
else:
cond += " AND %s=%s" % (k, dct[k])
#print(cond)
return cond
def mongo_get_condition_from_dict(dct):
ret = {}
for k in dct.keys():
ret[k] = dct[k][0]
print(ret)
return ret
def handle_get_method(environ):
global ENCODING
global STATICRESOURCE_DIR, UPLOAD_PHOTOS_DIR, UPLOAD_VOICE_DIR
global gConfig
ret = {}
s = ''
querydict, buf = get_querydict_by_GET_POST(environ)
isgrid = False
area = ''
data = {}
headers = {}
clienttype = 'default'
if querydict.has_key('clienttype'):
clienttype = querydict['clienttype']
if querydict.has_key('grid'):
isgrid = True
if querydict.has_key('area'):
area = querydict['area']
# if querydict.has_key('geojson'):
# if querydict['geojson']=='line_towers':
# data = db_util.gen_geojson_by_lines(area)
# s = json.dumps(data, ensure_ascii=True, indent=4)
# elif querydict['geojson']=='tracks':
# data = db_util.gen_geojson_tracks(area)
# s = json.dumps(data, ensure_ascii=True, indent=4)
# else:
# k = querydict['geojson']
# p = os.path.abspath(STATICRESOURCE_DIR)
# if k == 'potential_risk':
# k = 'geojson_%s_%s' % (k, area)
# p = os.path.join(p, 'geojson', area, '%s.json' % k)
# #print(p)
# if os.path.exists(p):
# with open(p) as f:
# f1 = gevent.fileobject.FileObjectThread(f, 'r')
# s = f1.read()
# else:
# p = os.path.abspath(STATICRESOURCE_DIR)
# p = os.path.join(p, 'geojson', '%s.json' % k)
# if os.path.exists(p):
# with open(p) as f:
# f1 = gevent.fileobject.FileObjectThread(f, 'r')
# s = f1.read()
#
#
#
# if querydict.has_key('table'):
# table = querydict['table']
# dbtype = 'odbc'
# if querydict.has_key('dbtype'):
# dbtype = querydict['dbtype']
#
# if dbtype == 'pg':
# data = db_util.pg_get_records(table, get_condition_from_dict(querydict))
# else:
# data = db_util.odbc_get_records(table, get_condition_from_dict(querydict), area)
# if table in ['TABLE_TOWER']:
# if querydict.has_key('line_id'):
# data = db_util.odbc_get_sorted_tower_by_line(querydict['line_id'], area)
#
# if isgrid:
# data = {'Rows':data}
# s = json.dumps(data, ensure_ascii=True, indent=4)
# if querydict.has_key('check_file'):
# fn = querydict['check_file']
# dir_name = querydict['dir_name']
# ret["result"] = {}
# ret["result"]["filename"] = fn
# if dir_name == 'voice':
# if check_voice_file_by_fault(fn):
# ret["result"]["exist"] = "true"
# else:
# ret["result"]["exist"] = "false"
# else:
# if os.path.exists(os.path.join(UPLOAD_PHOTOS_DIR, dir_name, fn)):
# ret["result"]["exist"] = "true"
# else:
# ret["result"]["exist"] = "false"
# s = json.dumps(ret, ensure_ascii=True, indent=4)
# if querydict.has_key('delete_file'):
# fn = querydict['delete_file']
# dir_name = querydict['dir_name']
# ret["result"] = {}
# ret["result"]["filename"] = fn
# if dir_name == 'voice':
# pl = get_voice_file_by(fn)
# if len(pl)>0:
# for i in pl:
# p = os.path.join(UPLOAD_VOICE_DIR, fn)
# if os.path.exists(p):
# os.remove(p)
# ret["result"]["removed"] = "true"
# else:
# ret["result"]["removed"] = "false"
#
# else:
# p = os.path.join(UPLOAD_PHOTOS_DIR, dir_name, fn)
# if os.path.exists(p):
# os.remove(p)
# ret["result"]["removed"] = "true"
# else:
# ret["result"]["removed"] = "false"
# s = json.dumps(ret, ensure_ascii=True, indent=4)
# if querydict.has_key('list_file_dir_name'):
# dir_name = querydict['list_file_dir_name']
# ret["result"] = {}
# ret["result"]["dirs"] = [dir_name, ]
# p = os.path.join(UPLOAD_PHOTOS_DIR, dir_name)
# if os.path.exists(p):
# l = os.listdir(p)
# ret["result"]["files"] = l
# else:
# ret["result"]["files"] = []
# s = json.dumps(ret, ensure_ascii=True, indent=4)
# if querydict.has_key('get_voice_files'):
# get_voice_files = querydict['get_voice_files']
# ret["result"] = {}
# ret["result"]["ids"] = get_voice_file_all()
# s = json.dumps(ret, ensure_ascii=True, indent=4)
if querydict.has_key('op'):
op = querydict['op']
if op == "gridfs":
ret = db_util.gridfs_find(querydict, str(gConfig['wsgi']['application']))
if isinstance(ret, tuple) and ret[0] and ret[1]:
headers['Content-Type'] = str(ret[0])
if querydict.has_key('attachmentdownload'):
headers['Content-Disposition'] = 'attachment;filename="' + enc(ret[2]) + '"'
s = ret[1]
return '200 OK', headers , s
if isinstance(ret, list):
s = json.dumps(ret, ensure_ascii=True, indent=4)
elif op == "gridfs_delete":
try:
db_util.gridfs_delete(querydict, str(gConfig['wsgi']['application']))
ret = ''
except:
ret["result"] = sys.exc_info()[1].message
s = json.dumps(ret, ensure_ascii=True, indent=4)
headers['Content-Type'] = 'text/json;charset=' + ENCODING
if isinstance(ret, dict) and len(ret.keys())==0:
ret["result"] = "ok"
if isinstance(s, list) and len(s)==0:
s = json.dumps(ret, ensure_ascii=True, indent=4)
return '200 OK', headers, s
def create_upload_xls_dir():
global STATICRESOURCE_DIR
p = os.path.join(STATICRESOURCE_DIR, 'upload')
if not os.path.exists(p):
os.mkdir(p)
p = os.path.join(p, 'xls')
if not os.path.exists(p):
os.mkdir(p)
return p
def create_voice_dir():
global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR
if not os.path.exists(UPLOAD_VOICE_DIR):
os.mkdir(UPLOAD_VOICE_DIR)
def check_voice_file_by_fault(id):
global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR
create_voice_dir()
ret = False
for fn in os.listdir(UPLOAD_VOICE_DIR):
if id in fn:
ret = True
break
return ret
def get_voice_file_latest(id):
global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR
create_voice_dir()
l = []
for fn in os.listdir(UPLOAD_VOICE_DIR):
if id in fn:
l.append(fn)
ret = None
if len(l)>0:
l.sort()
ret = l[-1]
return ret
def get_voice_file_by(id):
global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR
create_voice_dir()
l = []
for fn in os.listdir(UPLOAD_VOICE_DIR):
if id in fn:
l.append(fn)
return l
def get_voice_file_all():
global STATICRESOURCE_DIR, UPLOAD_VOICE_DIR
s = set()
for fn in os.listdir(UPLOAD_VOICE_DIR):
p = os.path.join(UPLOAD_VOICE_DIR, fn)
if os.path.isfile(p):
arr = fn.split('@')
if len(arr)==3:
id = arr[1]
s.add(id)
return list(s)
def create_pic_dir():
global STATICRESOURCE_DIR, UPLOAD_PHOTOS_DIR
if not os.path.exists(os.path.join(STATICRESOURCE_DIR,'photos')):
os.mkdir(os.path.join(STATICRESOURCE_DIR,'photos'))
if not os.path.exists(UPLOAD_PHOTOS_DIR):
os.mkdir(UPLOAD_PHOTOS_DIR)
def handle_upload_file(querydict, buf):
global STATICRESOURCE_DIR, UPLOAD_PHOTOS_DIR, UPLOAD_VOICE_DIR
ret = False
# root = os.path.abspath(STATICRESOURCE_DIR)
try:
if querydict.has_key('db'):
db_util.gridfs_save(querydict, querydict['filename'], buf)
ret = True
except Exception,e:
raise
return ret
def import_xls(path, fileobj, area, line_name, voltage, category):
with open(path, 'wb') as f:
f.write(fileobj)
return db_util.import_tower_xls_file(area, line_name, voltage, category, path)
def save_file_to(category, dir_id, filename, fileobj):
root = os.path.abspath(category)
if not os.path.exists(root):
os.mkdir(root)
p = os.path.join(root, filename)
if dir_id:
p = os.path.join(root, dir_id)
if not os.path.exists(p):
os.mkdir(p)
p = os.path.join(root, dir_id, filename)
with open(p, 'wb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'wb')
f1.write(fileobj)
def geojson_to_czml(aList):
cz = czml.CZML()
for i in aList:
if i.has_key('properties') and i['properties'].has_key('id'):
packet = czml.CZMLPacket(id=i['properties']['id'])
#tower
if i['properties'].has_key('tower_code'):
packet = czml.CZMLPacket(id=i['properties']['id'], name=i['properties']['tower_name'])
packet.position = czml.Position(cartographicDegrees = [i['geometry']['coordinates'][0], i['geometry']['coordinates'][1], i['geometry']['coordinates'][2],])
packet.point = czml.Point(show=True, color={'rgba': [255, 255, 0, 255]}, pixelSize=10, outlineColor={'rgba': [0, 0, 0, 255]}, outlineWidth=1)
#packet.label = czml.Label(text=i['properties']['tower_name'], show=True, scale=0.5)
packet.description = i['properties']['tower_name']
#packet.billboard = czml.Billboard(image='http://localhost:88/img/tower.png')
cz.append(packet)
return cz
def handle_post_method(environ):
global ENCODING
global gRequest
querydict, buf = get_querydict_by_GET_POST(environ)
ret = {}
is_upload = False
is_mongo = False
use_czml = False
get_extext = False
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
if buf is not None:
try:
is_upload = handle_upload_file(querydict, buf)
except:
pass
if querydict.has_key('db') and querydict.has_key('collection'):
is_mongo = True
dbname = querydict['db']
collection = querydict['collection']
action = None
data = None
if querydict.has_key('action'):
action = querydict['action']
del querydict['action']
if querydict.has_key('data'):
data = querydict['data']
del querydict['data']
if querydict.has_key('use_czml') and querydict['use_czml']:
use_czml = True
del querydict['use_czml']
if querydict.has_key('get_extext') and querydict['get_extext']:
get_extext = True
del querydict['get_extext']
del querydict['db']
del querydict['collection']
if action:
if 'markdown_' in action or u'markdown_' in action:
l = db_util.mongo_action(dbname, collection, action, data, querydict, 'markdown')
else:
l = db_util.mongo_action(dbname, collection, action, data, querydict)
else:
l = db_util.mongo_find(dbname, collection, querydict)
if get_extext:
l = db_util.find_extent(l)
if use_czml:
l = geojson_to_czml(l)
if isinstance(l, list) and len(l) >= 0:
ret = l
elif isinstance(l, dict) and len(l.keys()) > 0:
ret = l
elif isinstance(l, czml.CZML):
headers['Content-Type'] = 'text/json;charset=' + ENCODING
return '200 OK', headers, enc(l.dumps())
#else:
#ret["result"] = "%s.%s return 0 record" % (dbname, collection)
#else:
#ret["result"] = "unknown query operation"
if not is_mongo:
if querydict.has_key('thunder_counter'):
try:
ret = handle_thunder_soap(querydict)
except:
e = sys.exc_info()[1]
if hasattr(e, 'message'):
ret['result'] = e.message
else:
ret['result'] = str(e)
elif querydict.has_key('op'):
if querydict.has_key('area') and querydict['area'] and len(querydict['area'])>0:
if querydict['op'] in ['save','delete','update']:
ret = db_util.odbc_save_data_to_table(querydict['table'], querydict['op'], querydict['data'], querydict['line_id'], querydict['start_tower_id'], querydict['end_tower_id'], querydict['area'])
else:
ret = handle_requset_sync(querydict)
elif querydict['op'] in ['alt','height'] :
if querydict.has_key('lng') and querydict.has_key('lat') and isinstance(querydict['lng'], float) and isinstance(querydict['lat'], float):
ret = db_util.extract_one_altitude(querydict['lng'], querydict['lat'])
if querydict.has_key('data') and isinstance(querydict['data'], list):
ret = db_util.extract_many_altitudes(querydict['data'])
else:
ret["result"] = "unknown area"
elif querydict.has_key('tracks') and querydict.has_key('area'):
ret = db_util.save_tracks(querydict['tracks'], querydict['area'])
elif querydict.has_key('mobile_action') and querydict.has_key('area') and querydict.has_key('data'):
ret = db_util.mobile_action(querydict['mobile_action'], querydict['area'], querydict['data'])
if isinstance(ret, list):
pass
elif isinstance(ret, str) or isinstance(ret, unicode) or isinstance(ret, int) or isinstance(ret, float):
pass
elif isinstance(ret, dict):
if len(ret.keys())==0:
pass
elif ret.has_key('result'):
if isinstance(ret['result'], exceptions.Exception):
if hasattr(ret['result'], 'message'):
ret['result'] = ret['result'].message
else:
ret['result'] = str(ret['result'])
elif isinstance(ret['result'], str) or isinstance(ret['result'], unicode) or isinstance(ret['result'], int) or isinstance(ret['result'], float):
pass
elif isinstance(ret['result'], list) or isinstance(ret['result'], dict):
pass
else:
ret["result"] = "unknown operation"
else:
ret["result"] = "unknown operation"
#time.sleep(6)
#print(ret)
return '200 OK', headers, json.dumps(ret, ensure_ascii=True, indent=4)
# def handle_login(environ):
# global ENCODING
# global gRequest
# buf = environ['wsgi.input'].read()
# ret = None
# try:
# ds_plus = urllib.unquote_plus(buf)
# obj = json.loads(dec(ds_plus))
# if obj.has_key(u'db') and obj.has_key(u'collection'):
# is_mongo = True
# dbname = obj[u'db']
# collection = obj[u'collection']
# action = None
# data = None
# if obj.has_key(u'action'):
# action = obj[u'action']
# del obj[u'action']
# if obj.has_key(u'data'):
# data = obj[u'data']
# del obj[u'data']
# if obj.has_key(u'url'):
# del obj[u'url']
# if obj.has_key(u'redirect'):
# del obj[u'redirect']
# del obj[u'db']
# del obj[u'collection']
# if action:
# ret = db_util.mongo_action(dbname, collection, action, data, obj)
# except:
# raise
# return ret
def handle_thunder_soap(obj):
ret = {}
if obj['thunder_counter'] == 'GetFlashofDate':
ret = soap_GetFlashofDate(obj['start_time'], obj['end_time'])
if obj['thunder_counter'] == 'GetFlashofEnvelope':
ret = soap_GetFlashofEnvelope(obj['start_time'], obj['end_time'], obj['lng1'], obj['lng2'], obj['lat1'], obj['lat2'])
return ret
def dishen_ws_loop(aWebSocket, aHash):
while 1:
#now = time.strftime('%Y-%m-%d %H:%M:%S')[:10]
#ws.send("%d,%f\n" % ((time.time() - time.timezone)*1000, random.random()*10))
#t = (time.time() - time.timezone) * 1000
t = time.time() * 1000
if aWebSocket:
#message = aWebSocket.receive()
#print("message=%s" % message)
aWebSocket.send( '%s\n%d' % (str(aHash),int(t)) )
else:
break
gevent.sleep(1.0)
def check_session(environ, request, session_store):
global gConfig
def set_cookie(key, value):
secure = False
if gConfig['listen_port']['enable_ssl'].lower() == 'true':
secure = True
max_age = int(gConfig['authorize_platform']['session']['session_age'])
cookie = ('Set-Cookie', dump_cookie(key, value, domain=str(gConfig['authorize_platform']['session']['session_domain']), max_age=max_age, secure=secure))
return cookie
sid = request.cookies.get('authorize_platform_session_id')
cookie = None
is_expire = False
sess = None
if sid is None or len(sid)==0:
request.session = session_store.new({})
#session_store.save(request.session)
is_expire = True
cookie = set_cookie('authorize_platform_session_id', request.session.sid )
sess = request.session
else:
request.session = session_store.get(sid)
if request.session:
cookie = set_cookie('authorize_platform_session_id', request.session.sid)
session_store.save_if_modified(request.session)
else:
cookie = set_cookie('authorize_platform_session_id', '')
is_expire = True
sess = request.session
return sess, cookie, is_expire
def get_token_from_env(environ):
global gConfig, gLoginToken
cookie = parse_cookie(environ)
session_id = None
ret = None
if cookie.has_key('session_id'):
session_id = cookie['session_id']
if gLoginToken.has_key(session_id):
ret = gLoginToken[session_id]
return session_id, ret
def get_session_from_env(environ):
global gSessionStore
cookie = parse_cookie(environ)
session_id = None
ret = None
if cookie.has_key('session_id'):
session_id = cookie['session_id']
ret = gSessionStore.get(session_id)
return ret
def get_userinfo_from_env(environ):
global gConfig, gLoginToken
cookie = parse_cookie(environ)
session_id = None
ret = None
if cookie.has_key('session_id'):
session_id = cookie['session_id']
if gLoginToken.has_key(session_id):
ret = gLoginToken[session_id]
return session_id, ret
def get_sign_alipay(sign_data):
global gConfig
ret = ''
text = sign_data + gConfig['pay_platform']['alipay']['partner_key']
text = enc_by_code(gConfig['pay_platform']['alipay']['input_charset'], text)
if (gConfig['pay_platform']['alipay']['sign_type']).lower() == 'md5':
md5.digest_size = 32
ret = md5.new(text).hexdigest()
return ret
def check_sign_alipay(input_charset, signature, sign_type, original_data):
global gConfig
text = original_data + gConfig['pay_platform']['alipay']['partner_key']
text = enc_by_code(str(input_charset), text)
ret = ''
if str(sign_type).lower() == 'md5':
md5.digest_size = 32
ret = md5.new(text).hexdigest()
return ret == str(signature)
def build_query_string(data={}):
ret = ''
keys = data.keys()
keys.sort()
for k in keys:
ret += '%s=%s' % (k, data[k])
if keys.index(k) < len(keys) - 1:
ret += '&'
return ret
def get_pay_record_by_id(querydict):
ret = None
if querydict['pay_channel'] == 'alipay':
out_trade_no = querydict['out_trade_no']
db_util.mongo_init_client('pay_platform')
client = db_util.gClientMongo['pay_platform']
db = client['pay']
if 'pay_log' in db.collection_names(False):
collection = db['pay_log']
ret = collection.find_one({"out_trade_no":out_trade_no})
return ret
def refund_alipay(querydict):
global ENCODING
global gConfig, gSecurityConfig, gJoinableQueue
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
href = str(gConfig['pay_platform']['alipay']['submit_gateway'])
sign_data = {}
sign_data['_input_charset'] = gConfig['pay_platform']['alipay']['input_charset']
sign_data['partner'] = gConfig['pay_platform']['alipay']['partner_id']
sign_data['service'] = 'refund_fastpay_by_platform_pwd'
sign_data['refund_date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
sign_data['batch_no'] = datetime.datetime.now().strftime("%Y%m%d") + str(ObjectId())
sign_data['batch_num'] = '1'
querydict['refund_date'] = sign_data['refund_date']
querydict['batch_no'] = sign_data['batch_no']
querydict['batch_num'] = int(sign_data['batch_num'])
if len(gConfig['pay_platform']['alipay']['return_url'])>0:
sign_data['return_url'] = gConfig['pay_platform']['alipay']['return_url']
if len(gConfig['pay_platform']['alipay']['error_notify_url'])>0:
sign_data['error_notify_url'] = gConfig['pay_platform']['alipay']['error_notify_url']
if len(gConfig['pay_platform']['alipay']['notify_url'])>0:
sign_data['notify_url'] = gConfig['pay_platform']['alipay']['notify_url']
rec = get_pay_record_by_id(querydict)
if rec:
if rec.has_key('error_code'):
body = json.dumps({'result':'refund_fail_pay_has_fail' }, ensure_ascii=True, indent=4)
else:
if rec.has_key('seller_email') \
and rec.has_key('trade_no') :
trade_no = rec['trade_no']
sign_data['seller_email'] = rec['seller_email']
querydict['seller_email'] = sign_data['seller_email']
querydict['trade_no'] = trade_no
detail_data = '%s^%.2f^%s' % (trade_no, float(querydict['refund_fee']), querydict['refund_desc'] )
sign_data['detail_data'] = detail_data
if not rec.has_key('seller_email'):
body = json.dumps({'result':'refund_fail_seller_email_required' }, ensure_ascii=True, indent=4)
if not rec.has_key('trade_no'):
body = json.dumps({'result':'refund_fail_trade_no_required' }, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'refund_fail_pay_trade_not_found:%s' % querydict['out_trade_no']}, ensure_ascii=True, indent=4)
if len(body) == 0:
#querydict['refund_result'] = 'refund_sending_to_alipay'
querydict['refund_result'] = 'refund_adding_to_queue'
querydict['refund_fee'] = float(querydict['refund_fee'])
g = gevent.spawn(update_refund_log, querydict['out_trade_no'], querydict)
#g1 = sign_and_send_alipay('post', href, sign_data)
#g1.join()
#resp = g1.value
#s = resp.read()
#print('refund response: [%s]' % dec(s))
#body = json.dumps({'result':'refund_sending_to_alipay'}, ensure_ascii=True, indent=4)
try:
gJoinableQueue.put({'thirdpay':'alipay', 'method':'post', 'url':href, 'data':sign_data})
except gevent.queue.Full:
body = json.dumps({'result':'refund_err_queue_full'}, ensure_ascii=True, indent=4)
body = json.dumps({'result':'refund_adding_to_queue'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def pay_alipay(querydict):
global ENCODING
global gConfig, gSecurityConfig, gJoinableQueue
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
href = str(gConfig['pay_platform']['alipay']['submit_gateway'])
if not href[-1:] == '?':
href += '?'
sign_data = {}
sign_data['_input_charset'] = gConfig['pay_platform']['alipay']['input_charset']
sign_data['total_fee'] = querydict['total_fee']
sign_data['out_trade_no'] = querydict['out_trade_no']
sign_data['partner'] = gConfig['pay_platform']['alipay']['partner_id']
sign_data['payment_type'] = '1'
sign_data['seller_email'] = querydict['seller_email']
sign_data['buyer_email'] = querydict['buyer_email']
sign_data['service'] = 'create_direct_pay_by_user'
sign_data['subject'] = querydict['subject']
if len(gConfig['pay_platform']['alipay']['return_url'])>0:
sign_data['return_url'] = gConfig['pay_platform']['alipay']['return_url']
if len(gConfig['pay_platform']['alipay']['error_notify_url'])>0:
sign_data['error_notify_url'] = gConfig['pay_platform']['alipay']['error_notify_url']
if len(gConfig['pay_platform']['alipay']['notify_url'])>0:
sign_data['notify_url'] = gConfig['pay_platform']['alipay']['notify_url']
#querydict['trade_status'] = 'pay_sending_to_alipay'
querydict['trade_status'] = 'pay_adding_to_queue'
querydict['total_fee'] = float(querydict['total_fee'])
if querydict.has_key('defaultbank'):
if gSecurityConfig['alipay']['bank_code'].has_key(querydict['defaultbank']):
sign_data['defaultbank'] = querydict['defaultbank']
sign_data['paymethod'] = 'bankPay'
else:
body = json.dumps({'result':'pay_fail_wrong_bank_code'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
if gConfig['pay_platform']['alipay']['need_ctu_check'].lower() == 'true':
sign_data['need_ctu_check'] = 'Y'
if gConfig['pay_platform']['alipay']['anti_fishing'].lower() == 'true':
sign_data['anti_phishing_key'] = ''
sign_data['exter_invoke_ip'] = ''
g = gevent.spawn(update_pay_log, querydict['out_trade_no'], querydict)
#g1 = sign_and_send_alipay('post', href, sign_data)
#body = json.dumps({'result':'pay_sending_to_alipay'}, ensure_ascii=True, indent=4)
try:
gJoinableQueue.put({'thirdpay':'alipay','method':'post', 'url':href, 'data':sign_data})
except gevent.queue.Full:
body = json.dumps({'result':'pay_err_queue_full'}, ensure_ascii=True, indent=4)
body = json.dumps({'result':'pay_adding_to_queue'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def handle_refund(environ):
global ENCODING
global gConfig
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
querydict = {}
if environ.has_key('QUERY_STRING') and len(environ['QUERY_STRING'])>0:
querystring = environ['QUERY_STRING']
querystring = urllib.unquote_plus(querystring)
querydict = urlparse.parse_qs(dec(querystring))
d = {}
for k in querydict.keys():
d[k] = querydict[k][0]
querydict = d
try:
buf = environ['wsgi.input'].read()
ds_plus = urllib.unquote_plus(buf)
d = json.loads(dec(ds_plus))
for k in d.keys():
querydict[k] = d[k]
except:
pass
if len(querydict.keys()) > 0:
if querydict.has_key('out_trade_no') and len(querydict['out_trade_no'])>0\
and querydict.has_key('pay_channel') and len(querydict['pay_channel'])>0\
and querydict.has_key('refund_fee') and len(querydict['refund_fee'])>0\
and querydict.has_key('refund_desc') and len(querydict['refund_desc'])>0:
if querydict['pay_channel'] == 'alipay':
refund_fee = 0
try:
refund_fee = float(querydict['refund_fee'])
except:
body = json.dumps({'result':'refund_fail_refund_fee_wrong_format'}, ensure_ascii=True, indent=4)
refund_fee = 0
if '^' in querydict['refund_desc'] \
or '#' in querydict['refund_desc'] \
or '|' in querydict['refund_desc'] \
or '$' in querydict['refund_desc'] \
or len(querydict['refund_desc'])>128 :
refund_fee = 0
body = json.dumps({'result':'refund_fail_refund_desc_wrong_charactor'}, ensure_ascii=True, indent=4)
if refund_fee>0:
statuscode, headers, body = refund_alipay(querydict)
#else:
#body = json.dumps({'result':'refund_fail_refund_fee_wrong_format'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'refund_fail_unsupport_pay_channel'}, ensure_ascii=True, indent=4)
if not querydict.has_key('out_trade_no') or len(querydict['out_trade_no'])==0:
body = json.dumps({'result':'refund_fail_out_trade_no_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('refund_fee') \
or (isinstance(querydict['refund_fee'], unicode) and len(querydict['refund_fee'])==0) \
or (isinstance(querydict['refund_fee'], float) and querydict['refund_fee']==0.0):
body = json.dumps({'result':'refund_fail_refund_fee_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('refund_desc') or len(querydict['refund_desc'])==0:
body = json.dumps({'result':'refund_fail_refund_desc_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('pay_channel') or len(querydict['pay_channel'])==0:
body = json.dumps({'result':'refund_fail_pay_channel_required'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'refund_fail_wrong_data_format'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def handle_pay_getinfo(environ):
global ENCODING
global gConfig, gSecurityConfig
def get_collection(collection):
ret = None
db_util.mongo_init_client('pay_platform')
db = db_util.gClientMongo['pay_platform'][gConfig['pay_platform']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
def query_pay_log(condition):
ret = []
collection = get_collection(gConfig['pay_platform']['mongodb']['collection_pay_log'])
ret = list(collection.find(condition))
#for i in cur:
#ret.append(i)
return ret
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
querydict = {}
if environ.has_key('QUERY_STRING') and len(environ['QUERY_STRING'])>0:
querystring = environ['QUERY_STRING']
querystring = urllib.unquote_plus(querystring)
querydict = urlparse.parse_qs(dec(querystring))
d = {}
for k in querydict.keys():
d[k] = querydict[k][0]
querydict = d
try:
buf = environ['wsgi.input'].read()
ds_plus = urllib.unquote_plus(buf)
d = json.loads(dec(ds_plus))
for k in d.keys():
querydict[k] = d[k]
except:
pass
if len(querydict.keys()) > 0:
if querydict.has_key('q'):
if querydict['q'] == 'bank_info':
if querydict.has_key('bank_code'):
if querydict['bank_code'] == 'all' or len(querydict['bank_code'])==0:
body = json.dumps(gSecurityConfig['alipay']['bank_code'], ensure_ascii=True, indent=4)
else:
if gSecurityConfig['alipay']['bank_code'].has_key(querydict['bank_code']):
body = json.dumps(gSecurityConfig['alipay']['bank_code'][querydict['bank_code']], ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'wrong_bank_code'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'unknown_query_type'}, ensure_ascii=True, indent=4)
elif querydict['q'] == 'error_info':
if querydict.has_key('error_code'):
if querydict['error_code'] == 'all' or len(querydict['error_code'])==0:
body = json.dumps(gSecurityConfig['alipay']['error_code'], ensure_ascii=True, indent=4)
else:
if gSecurityConfig['alipay']['error_code'].has_key(querydict['error_code']):
body = json.dumps(gSecurityConfig['alipay']['error_code'][querydict['error_code']], ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'wrong_error_code'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'unknown_query_type'}, ensure_ascii=True, indent=4)
elif querydict['q'] == 'trade_status':
if querydict.has_key('out_trade_no'):
if len(querydict['out_trade_no'])>0:
l = []
if isinstance(querydict['out_trade_no'], unicode):
l = query_pay_log({'out_trade_no': querydict['out_trade_no']})
elif isinstance(querydict['out_trade_no'], list):
idlist = [ObjectId(i) for i in querydict['out_trade_no']]
l = query_pay_log({'out_trade_no': {'$in': idlist}})
if len(l) > 0:
ll = []
for i in l:
o = {}
o['out_trade_no'] = i['out_trade_no']
if i.has_key('trade_status'):
o['trade_status'] = i['trade_status']
else:
o['trade_status'] = None
if i.has_key('error_code'):
o['error_code'] = i['error_code']
else:
o['error_code'] = None
if i.has_key('refund_status'):
o['refund_status'] = i['refund_status']
else:
o['refund_status'] = None
ll.append(o)
body = json.dumps(db_util.remove_mongo_id(ll), ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'out_trade_no_not_exist'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'out_trade_cannot_be_null'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'out_trade_no_required'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'unknown_query_type'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def handle_pay(environ):
global ENCODING
global gConfig
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
querydict = {}
if environ.has_key('QUERY_STRING') and len(environ['QUERY_STRING'])>0:
querystring = environ['QUERY_STRING']
querystring = urllib.unquote_plus(querystring)
querydict = urlparse.parse_qs(dec(querystring))
d = {}
for k in querydict.keys():
d[k] = querydict[k][0]
querydict = d
try:
buf = environ['wsgi.input'].read()
ds_plus = urllib.unquote_plus(buf)
d = json.loads(dec(ds_plus))
for k in d.keys():
querydict[k] = d[k]
except:
pass
if len(querydict.keys()) > 0:
if querydict.has_key('out_trade_no') and len(querydict['out_trade_no'])>0 \
and querydict.has_key('subject') and len(querydict['subject'])>0 \
and querydict.has_key('total_fee') and len(querydict['total_fee'])>0 \
and querydict.has_key('buyer_email') and len(querydict['buyer_email'])>0 \
and querydict.has_key('seller_email') and len(querydict['seller_email'])>0 \
and querydict.has_key('pay_channel') and len(querydict['pay_channel'])>0 :
if querydict['pay_channel'] == 'alipay':
#if querydict.has_key('service'):
total_fee = 0
try:
total_fee = float(querydict['total_fee'])
except:
body = json.dumps({'result':'pay_fail_total_fee_wrong_format'}, ensure_ascii=True, indent=4)
total_fee = 0
if '^' in querydict['subject'] \
or '#' in querydict['subject'] \
or '|' in querydict['subject'] \
or '$' in querydict['subject'] \
or '%' in querydict['subject'] \
or '&' in querydict['subject'] \
or '+' in querydict['subject'] \
or len(querydict['subject'])>128 :
total_fee = 0
body = json.dumps({'result':'pay_fail_subject_wrong_charactor'}, ensure_ascii=True, indent=4)
if total_fee>0:
statuscode, headers, body = pay_alipay(querydict)
else:
body = json.dumps({'result':'pay_fail_total_fee_wrong_format'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'pay_fail_unsupport_pay_channel'}, ensure_ascii=True, indent=4)
if not querydict.has_key('out_trade_no') or len(querydict['out_trade_no'])==0:
body = json.dumps({'result':'pay_fail_out_trade_no_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('subject') or len(querydict['subject'])==0:
body = json.dumps({'result':'pay_fail_subject_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('total_fee') \
or (isinstance(querydict['total_fee'], unicode) and len(querydict['total_fee'])==0) \
or (isinstance(querydict['total_fee'], float) and querydict['total_fee']==0.0):
body = json.dumps({'result':'pay_fail_total_fee_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('buyer_email') or len(querydict['buyer_email'])==0:
body = json.dumps({'result':'pay_fail_buyer_email_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('seller_email') or len(querydict['seller_email'])==0:
body = json.dumps({'result':'pay_fail_seller_email_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('pay_channel') or len(querydict['pay_channel'])==0:
body = json.dumps({'result':'pay_fail_pay_channel_required'}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'pay_fail_wrong_data_format'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def update_refund_log(out_trade_no, data, is_insert=True):
db_util.mongo_init_client('pay_platform')
client = db_util.gClientMongo['pay_platform']
db = client['pay']
if not 'refund_log' in db.collection_names(False):
collection = db.create_collection('refund_log')
collection.ensure_index([("out_trade_no", pymongo.ASCENDING),])
else:
collection = db['refund_log']
rec = collection.find_one({"out_trade_no":out_trade_no})
if data.has_key('refund_fee') and (isinstance(data['refund_fee'], unicode) or isinstance(data['refund_fee'], str)):
data['refund_fee'] = float(data['refund_fee'])
if rec:
for k in data.keys():
rec[k] = data[k]
wr = collection.update({'_id':rec['_id']}, db_util.add_mongo_id(rec), multi=False, upsert=False)
if wr and wr['n'] == 0:
print('update out_trade_no [%s] failed' % out_trade_no)
else:
if is_insert:
try:
_id = collection.insert( db_util.add_mongo_id(data))
#print('refund_log insert _id=%s' % str(_id))
except:
print('refund_log insert out_trade_no [%s] failed' % out_trade_no)
def update_pay_log(out_trade_no, data, is_insert=True):
db_util.mongo_init_client('pay_platform')
client = db_util.gClientMongo['pay_platform']
db = client['pay']
if not 'pay_log' in db.collection_names(False):
collection = db.create_collection('pay_log')
collection.ensure_index([("out_trade_no", pymongo.ASCENDING),])
else:
collection = db['pay_log']
rec = collection.find_one({"out_trade_no":out_trade_no})
if data.has_key('total_fee') and (isinstance(data['total_fee'], unicode) or isinstance(data['total_fee'], str)):
data['total_fee'] = float(data['total_fee'])
if data.has_key('refund_fee') and (isinstance(data['refund_fee'], unicode) or isinstance(data['refund_fee'], str)):
data['refund_fee'] = float(data['refund_fee'])
if data.has_key('price') and (isinstance(data['price'], unicode) or isinstance(data['price'], str)):
data['price'] = float(data['price'])
if data.has_key('quantity') and (isinstance(data['quantity'], unicode) or isinstance(data['quantity'], str)):
data['quantity'] = int(data['quantity'])
if rec:
for k in data.keys():
rec[k] = data[k]
wr = collection.update({'_id':rec['_id']}, db_util.add_mongo_id(rec), multi=False, upsert=False)
#print(wr)
if wr and wr['n'] == 0:
print('update out_trade_no [%s] failed' % out_trade_no)
else:
if is_insert:
try:
_id = collection.insert( db_util.add_mongo_id(data))
#print('pay_log insert _id=%s' % str(_id))
except:
print('pay_log insert out_trade_no [%s] failed' % out_trade_no)
def handle_alipay_return_url(environ):
global ENCODING
global gConfig, gSecurityConfig
querydict = {}
data = {}
data['pay_channel'] = 'alipay'
querystring = ''
if environ.has_key('QUERY_STRING'):
querystring = environ['QUERY_STRING']
querystring = urllib.unquote_plus(querystring)
querystring = dec_by_code(gConfig['pay_platform']['alipay']['input_charset'], querystring)
querydict = urlparse.parse_qs(querystring)
d = {}
for k in querydict.keys():
d[k] = querydict[k][0]
querydict = d
if querydict.has_key('notify_type') and 'trade_status_' in querydict['notify_type'] and querydict.has_key('out_trade_no'):
if querydict.has_key('is_success'):
if querydict['is_success'] == 'T':
data['trade_status'] = 'send_to_alipay_success'
if querydict.has_key('seller_email'):
data['seller_email'] = querydict['seller_email']
if querydict.has_key('buyer_email'):
data['buyer_email'] = querydict['buyer_email']
if querydict.has_key('seller_id'):
data['seller_id'] = querydict['seller_id']
if querydict.has_key('buyer_id'):
data['buyer_id'] = querydict['buyer_id']
if querydict.has_key('notify_time'):
data['notify_time'] = querydict['notify_time']
if querydict.has_key('notify_type'):
data['notify_type'] = querydict['notify_type']
if querydict.has_key('notify_id'):
data['notify_id'] = querydict['notify_id']
if querydict.has_key('out_trade_no'):
data['out_trade_no'] = querydict['out_trade_no']
if querydict.has_key('subject'):
data['subject'] = querydict['subject']
if querydict.has_key('payment_type'):
data['payment_type'] = querydict['payment_type']
if querydict.has_key('trade_no'):
data['trade_no'] = querydict['trade_no']
if querydict.has_key('trade_status'):
data['trade_status'] = querydict['trade_status']
if gSecurityConfig['alipay']['trade_status'].has_key(data['trade_status']):
data['trade_status_desc'] = gSecurityConfig['alipay']['trade_status'][data['trade_status']]
if querydict.has_key('gmt_create'):
data['gmt_create'] = querydict['gmt_create']
if querydict.has_key('gmt_payment'):
data['gmt_payment'] = querydict['gmt_payment']
if querydict.has_key('gmt_close'):
data['gmt_close'] = querydict['gmt_close']
if querydict.has_key('gmt_refund'):
data['gmt_refund'] = querydict['gmt_refund']
if querydict.has_key('body'):
data['body'] = querydict['body']
if querydict.has_key('error_code'):
data['error_code'] = querydict['error_code']
if querydict.has_key('bank_seq_no'):
data['bank_seq_no'] = querydict['bank_seq_no']
if querydict.has_key('out_channel_type'):
data['out_channel_type'] = querydict['out_channel_type']
if querydict.has_key('out_channel_amount'):
data['out_channel_amount'] = querydict['out_channel_amount']
if querydict.has_key('out_channel_inst'):
data['out_channel_inst'] = querydict['out_channel_inst']
if querydict.has_key('business_scene'):
data['business_scene'] = querydict['business_scene']
if querydict.has_key('total_fee'):
data['total_fee'] = querydict['total_fee']
if data.has_key('out_trade_no'):
g = gevent.spawn(update_pay_log, data['out_trade_no'], data, False)
def handle_alipay_notify_url(environ):
global gConfig, gSecurityConfig
buf = environ['wsgi.input'].read()
ds_plus = urllib.unquote_plus(buf)
ds_plus = dec_by_code(gConfig['pay_platform']['alipay']['input_charset'], ds_plus)
querydict = {}
data = {}
data['pay_channel'] = 'alipay'
try:
querydict = urlparse.parse_qs(ds_plus)
d = {}
for k in querydict.keys():
d[k] = querydict[k][0]
querydict = d
except:
querydict = {}
if querydict.has_key('seller_email'):
data['seller_email'] = querydict['seller_email']
if querydict.has_key('buyer_email'):
data['buyer_email'] = querydict['buyer_email']
if querydict.has_key('seller_id'):
data['seller_id'] = querydict['seller_id']
if querydict.has_key('buyer_id'):
data['buyer_id'] = querydict['buyer_id']
if querydict.has_key('notify_time'):
data['notify_time'] = querydict['notify_time']
if querydict.has_key('notify_id'):
data['notify_id'] = querydict['notify_id']
if querydict.has_key('notify_type'):
data['notify_type'] = querydict['notify_type']
if querydict.has_key('out_trade_no'):
data['out_trade_no'] = querydict['out_trade_no']
if querydict.has_key('subject'):
data['subject'] = querydict['subject']
if querydict.has_key('payment_type'):
data['payment_type'] = querydict['payment_type']
if querydict.has_key('trade_no'):
data['trade_no'] = querydict['trade_no']
if querydict.has_key('trade_status'):
data['trade_status'] = querydict['trade_status']
if gSecurityConfig['alipay']['trade_status'].has_key(data['trade_status']):
data['trade_status_desc'] = gSecurityConfig['alipay']['trade_status'][data['trade_status']]
if querydict.has_key('gmt_create'):
data['gmt_create'] = querydict['gmt_create']
if querydict.has_key('gmt_payment'):
data['gmt_payment'] = querydict['gmt_payment']
if querydict.has_key('gmt_close'):
data['gmt_close'] = querydict['gmt_close']
if querydict.has_key('gmt_refund'):
data['gmt_refund'] = querydict['gmt_refund']
if querydict.has_key('body'):
data['body'] = querydict['body']
if querydict.has_key('error_code'):
data['error_code'] = querydict['error_code']
if querydict.has_key('bank_seq_no'):
data['bank_seq_no'] = querydict['bank_seq_no']
if querydict.has_key('out_channel_type'):
data['out_channel_type'] = querydict['out_channel_type']
if querydict.has_key('out_channel_amount'):
data['out_channel_amount'] = querydict['out_channel_amount']
if querydict.has_key('out_channel_inst'):
data['out_channel_inst'] = querydict['out_channel_inst']
if querydict.has_key('business_scene'):
data['business_scene'] = querydict['business_scene']
if querydict.has_key('total_fee'):
data['total_fee'] = querydict['total_fee']
if querydict.has_key('notify_type') and 'trade_status_' in querydict['notify_type'] and data.has_key('out_trade_no'):
g = gevent.spawn(update_pay_log, data['out_trade_no'], data, False)
if querydict.has_key('notify_type') and querydict['notify_type'] == 'batch_refund_notify':
if querydict.has_key('batch_no'):
data['batch_no'] = querydict['batch_no']
if querydict.has_key('success_num'):
data['success_num'] = int(querydict['success_num'])
if querydict.has_key('result_details'):
arr = querydict['result_details'].split('^')
trade_no = arr[0]
refund_fee = float(arr[1])
refund_status = arr[2]
data['trade_no'] = trade_no
data['refund_fee'] = refund_fee
data['refund_status'] = refund_status
g = gevent.spawn(update_refund_log, data['trade_no'], data, False)
def handle_alipay_error_notify_url(environ):
global gConfig, gSecurityConfig
buf = environ['wsgi.input'].read()
ds_plus = urllib.unquote_plus(buf)
ds_plus = dec_by_code(gConfig['pay_platform']['alipay']['input_charset'], ds_plus)
querydict = {}
data = {}
data['pay_channel'] = 'alipay'
try:
querydict = urlparse.parse_qs(ds_plus)
d = {}
for k in querydict.keys():
d[k] = querydict[k][0]
querydict = d
except:
querydict = {}
if querydict.has_key('out_trade_no'):
data['out_trade_no'] = querydict['out_trade_no']
if querydict.has_key('error_code'):
data['error_code'] = querydict['error_code']
if gSecurityConfig['alipay']['error_code'].has_key(data['error_code']):
data['error_desc'] = gSecurityConfig['alipay']['error_code'][data['error_code']]
if data.has_key('out_trade_no'):
g = gevent.spawn(update_pay_log, data['out_trade_no'], data, False)
#g.join()
def get_querydict_by_GET_POST(environ):
querydict = {}
buf = None
if environ.has_key('QUERY_STRING'):
querystring = environ['QUERY_STRING']
querystring = urllib.unquote_plus(querystring)
querystring = dec(querystring)
try:
d = json.loads(querystring)
if isinstance(d, dict):
for k in d.keys():
querydict[k] = d[k]
except:
querydict = urlparse.parse_qs(querystring)
d = {}
for k in querydict.keys():
d[k] = querydict[k][0]
querydict = d
# try:
# # buf = environ['wsgi.input'].read()
# buf = stream.read()
# print('buf=')
# print(buf)
# ds_plus = urllib.unquote_plus(buf)
# obj = json.loads(dec(ds_plus))
# for k in obj.keys():
# querydict[k] = obj[k]
# except:
# pass
stream, form, files = werkzeug.formparser.parse_form_data(environ, charset='utf-8')
if len(form.keys()) > 0:
for key in form.keys():
try:
if isinstance(key, str):
key = dec(key)
obj = json.loads(key)
if isinstance(obj, dict):
for k in obj.keys():
querydict[k] = obj[k]
if isinstance(obj, list):
querydict = obj
except Exception,e:
print(e)
querydict[key] = form[key]
file_storage_list = []
if len(files.keys()) > 0:
for key in files.keys():
file_storage_list.extend(files.getlist(key))
for file_storage in file_storage_list:
if isinstance(file_storage, werkzeug.datastructures.FileStorage):
querydict['filename'] = file_storage.filename
querydict['content_type'] = file_storage.content_type
querydict['mimetype'] = file_storage.mimetype
# querydict['content_length'] = file_storage.content_length
buf = file_storage.read()
break
return querydict, buf
def handle_combiz_platform(environ):
global ENCODING
global gConfig, gRequest, gFormTemplate
def get_collection(collection):
ret = None
db_util.mongo_init_client('combiz_platform')
db = db_util.gClientMongo['combiz_platform'][gConfig['combiz_platform']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
#Rule('/workflow_add', endpoint='workflow_add'),
#Rule('/workflow_query', endpoint='workflow_query'),
#Rule('/workflow_query/<_id>', endpoint='workflow_query'),
#Rule('/workflow_update', endpoint='workflow_update'),
#Rule('/workflow_delete', endpoint='workflow_delete'),
#Rule('/workflow_delete/<_id>', endpoint='workflow_delete'),
#Rule('/workflow_template_add', endpoint='workflow_template_add'),
#Rule('/workflow_template_query', endpoint='workflow_template_query'),
#Rule('/workflow_template_query/<_id>', endpoint='workflow_template_query'),
#Rule('/workflow_template_update', endpoint='workflow_template_update'),
#Rule('/workflow_template_delete', endpoint='workflow_template_delete'),
#Rule('/workflow_template_delete/<_id>', endpoint='workflow_template_delete'),
def workflow_add(querydict):
ret = ''
if querydict.has_key('order_id'):
try:
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow'])
existone = collection.find_one({'order_id':querydict['order_id']})
if existone:
ret = json.dumps({'result':u'workflow_add_order_id_already_exist' }, ensure_ascii=True, indent=4)
else:
_id = collection.save(querydict)
o = collection.find_one({'_id':_id})
ret = json.dumps(db_util.remove_mongo_id(o), ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_add_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_add_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_add_order_id_required' }, ensure_ascii=True, indent=4)
return ret
def workflow_query(querydict):
ret = ''
o = None
try:
#print(querydict)
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow'])
limit = 10
skip = 0
ssort = None
cond = {}
if querydict.has_key('limit'):
limit = int(querydict['limit'])
if querydict.has_key('offset'):
skip = int(querydict['offset'])
if querydict.has_key('order'):
ssort = []
if querydict['order'] == 'asc':
ssort = [('order_id', pymongo.ASCENDING),]
if querydict['order'] == 'desc':
ssort = [('order_id', pymongo.DESCENDING),]
if querydict.has_key('_id'):
o = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
elif querydict.has_key('order_id'):
if '*' in querydict['order_id']:
cond = {'order_id': {'$regex':'^.*' + querydict['order_id'].replace('*', '') + '.*$'}}
#print(cond)
o = list(collection.find(cond, skip=skip, limit=limit, sort=ssort))
#print(o)
else:
o = collection.find_one({'order_id':querydict['order_id']})
else:
ssort = None
cond = {}
if querydict.has_key('search_field') and querydict.has_key('search'):
cond = {str(querydict['search_field']): {'$regex':'^.*' + querydict['search'].replace('*', '') + '.*$'}}
if querydict.has_key('order'):
ssort = []
if querydict['order'] == 'asc':
ssort = [(str(querydict['search_field']), pymongo.ASCENDING),]
if querydict['order'] == 'desc':
ssort = [(str(querydict['search_field']), pymongo.DESCENDING),]
o = list(collection.find(cond, skip=skip, limit=limit, sort=ssort))
if o:
ret = json.dumps(db_util.remove_mongo_id(o), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_query_workflow_not_exist' }, ensure_ascii=True, indent=4)
#if not querydict.has_key('_id') and not querydict.has_key('order_id'):
#ret = json.dumps({'result':u'workflow_query_id_or_order_id_required' }, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_query_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_query_fail' }, ensure_ascii=True, indent=4)
return ret
def workflow_update(querydict):
ret = ''
try:
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow'])
if querydict.has_key('_id'):
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.update({'_id':existone['_id']}, {'$set': db_util.add_mongo_id(querydict)}, multi=False, upsert=False)
one = collection.find_one(db_util.add_mongo_id({'_id':existone['_id']}))
ret = json.dumps(db_util.remove_mongo_id(one), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_update_workflow_not_exist' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_update_id_required' }, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_update_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_update_fail' }, ensure_ascii=True, indent=4)
return ret
def workflow_delete(querydict):
ret = ''
try:
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow'])
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.remove({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_delete_workflow_not_exist' }, ensure_ascii=True, indent=4)
if isinstance(querydict['_id'], list):
ids = db_util.add_mongo_id(querydict['_id'])
cond = {'_id':{'$in':ids}}
collection.remove(cond)
ret = json.dumps(db_util.remove_mongo_id(querydict['_id']), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_delete_id_required' }, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_delete_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_delete_fail' }, ensure_ascii=True, indent=4)
return ret
def workflow_template_add(querydict):
ret = ''
if querydict.has_key('name') \
and querydict.has_key('nodes') \
and querydict.has_key('edges'):
try:
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow_template'])
existone = collection.find_one({'name':querydict['name']})
if existone:
ret = json.dumps({'result':u'workflow_template_add_name_already_exist' }, ensure_ascii=True, indent=4)
else:
_id = collection.save(db_util.add_mongo_id(querydict))
o = collection.find_one({'_id':_id})
ret = json.dumps(db_util.remove_mongo_id(o), ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_template_add_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_add_fail' }, ensure_ascii=True, indent=4)
else:
if not querydict.has_key('name'):
ret = json.dumps({'result':u'workflow_template_add_name_required' }, ensure_ascii=True, indent=4)
if not querydict.has_key('nodes'):
ret = json.dumps({'result':u'workflow_template_add_nodes_required' }, ensure_ascii=True, indent=4)
if not querydict.has_key('edges'):
ret = json.dumps({'result':u'workflow_template_add_edges_required' }, ensure_ascii=True, indent=4)
return ret
def workflow_template_query(querydict):
ret = ''
o = None
try:
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow_template'])
o = None
limit = 10
skip = 0
ssort = None
cond = {}
if querydict.has_key('limit'):
limit = int(querydict['limit'])
if querydict.has_key('offset'):
skip = int(querydict['offset'])
if querydict.has_key('order'):
ssort = []
if querydict['order'] == 'asc':
ssort = [('name', pymongo.ASCENDING),]
if querydict['order'] == 'desc':
ssort = [('name', pymongo.DESCENDING),]
if querydict.has_key('name'):
if '*' in querydict['name']:
cond = {'name': {'$regex':'^.*' + querydict['name'].replace('*', '') + '.*$'}}
o = list(collection.find(cond, skip=skip, limit=limit, sort=ssort))
else:
o = collection.find_one({'name':querydict['name']})
elif querydict.has_key('_id'):
o = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if o:
ret = json.dumps(db_util.remove_mongo_id(o), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_query_workflow_not_exist' }, ensure_ascii=True, indent=4)
else:
ssort = None
cond = {}
if querydict.has_key('search_field') and querydict.has_key('search'):
cond = {str(querydict['search_field']): {'$regex':'^.*' + querydict['search'].replace('*', '') + '.*$'}}
if querydict.has_key('order'):
ssort = []
if querydict['order'] == 'asc':
ssort = [(str(querydict['search_field']), pymongo.ASCENDING),]
if querydict['order'] == 'desc':
ssort = [(str(querydict['search_field']), pymongo.DESCENDING),]
o = list(collection.find(cond, skip=skip, limit=limit, sort=ssort))
ret = json.dumps(db_util.remove_mongo_id(o), ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_template_query_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_query_fail' }, ensure_ascii=True, indent=4)
return ret
def workflow_template_update(querydict):
ret = ''
try:
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow_template'])
if querydict.has_key('_id'):
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.update({'_id':existone['_id']}, {'$set': db_util.add_mongo_id(querydict)}, multi=False, upsert=False)
one = collection.find_one({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(one), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_update_workflow_not_exist' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_update_id_required' }, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_template_update_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_update_fail' }, ensure_ascii=True, indent=4)
return ret
def workflow_template_delete(querydict):
ret = ''
try:
collection = get_collection(gConfig['combiz_platform']['mongodb']['collection_workflow'])
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.remove({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_delete_workflow_not_exist' }, ensure_ascii=True, indent=4)
if isinstance(querydict['_id'], list):
ids = db_util.add_mongo_id(querydict['_id'])
cond = {'_id':{'$in':ids}}
collection.remove(cond)
ret = json.dumps(db_util.remove_mongo_id(querydict['_id']), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_delete_id_required' }, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'workflow_template_delete_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'workflow_template_delete_fail' }, ensure_ascii=True, indent=4)
return ret
def get_form(form_id):
global gFormTemplate
ret = None
for i in gFormTemplate:
if i['form_path'] == form_id:
ret = i
break
return ret
def get_out_tmp_dir(dirname):
out_dir = os.path.join(dirname, 'export_tmp')
if not os.path.exists(out_dir):
os.mkdir(out_dir)
now = time.strftime('%Y-%m-%d %H:%M:%S')[:19].replace('-','').replace(' ','').replace(':','')
out_dir = os.path.join(out_dir, '%s-%s' % ( now , uuid.uuid4()))
if not os.path.exists(out_dir):
os.mkdir(out_dir)
return out_dir
def form_blank(querydict):
global gFormTemplate
ret = ''
content_type = 'text/json'
filename = None
if len(gFormTemplate) == 0:
p = os.path.join(STATICRESOURCE_DIR, 'form_templates', 'list.json')
if os.path.exists(p):
try:
with open(p, 'r') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'r')
gFormTemplate = json.loads(f1.read())
except:
ret = json.dumps({'result':u'form_blank_list_json_parse_error'}, ensure_ascii=True, indent=4)
return ret, content_type, filename
else:
ret = json.dumps({'result':u'form_blank_list_json_not_exist'}, ensure_ascii=True, indent=4)
return ret, content_type, filename
if querydict.has_key('form_id'):
form = get_form(querydict['form_id'])
if form and form.has_key('blank_document'):
out_path = form['blank_document']
out_path = os.path.join(STATICRESOURCE_DIR, out_path)
if os.path.exists(out_path):
ext = 'pdf'
if querydict.has_key('format'):
ext = querydict['format']
ret,content_type = form_export(out_path, ext)
if querydict.has_key('attachmentdownload') and querydict['attachmentdownload'] is True:
filename = os.path.basename(form['blank_document'])
filename = filename[:filename.rindex('.')]
filename = '%s%s.%s' % (filename , u'(空白)', ext)
else:
ret = json.dumps({'result':u'form_blank_generated_document_not_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'form_blank_blank_document_need_specify_in_list_json'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'form_blank_form_id_required'}, ensure_ascii=True, indent=4)
return ret, content_type, filename
def form_fill(querydict):
global gFormTemplate
def check_is_bool(form, fld):
ret = False
if form.has_key('bool') and isinstance(form['bool'], list):
for i in form['bool']:
if i == fld:
ret = True
break
return ret
def chinese_date(value):
ret = value
if len(ret) == 19 :
if ret[4] == u'-' and ret[7] == u'-' and ret[10] == u' ':
ret1 = ret[:4]
ret1 += u'年'
ret1 += ret[5:7]
ret1 += u'月'
ret1 += ret[8:10]
ret1 += u'日'
ret = ret1
return ret
def check_is_image(form, fld):
ret = False
if form.has_key('image') and isinstance(form['image'], list):
for i in form['image']:
if i == fld:
ret = True
break
return ret
def check_is_list(form, fld):
ret = False
if form.has_key('list') and isinstance(form['list'], list):
for i in form['list']:
if i == fld:
ret = True
break
return ret
def fill_tpl(form, form_data):
template_document = os.path.join(STATICRESOURCE_DIR, form['template_document'])
dirname = os.path.dirname(template_document)
basename = os.path.basename(template_document)
basename = basename.replace('_template', '')
out_dir = get_out_tmp_dir(dirname)
out_path = os.path.join(out_dir, basename)
t = Template(template_document, out_path)
data = {}
document = Py3oItem()
file_service_url = '%s://%s:%s/fileservice/rest/file/' % (gConfig['combiz_platform']['proxy_file']['protocol'], gConfig['combiz_platform']['proxy_file']['host'], gConfig['combiz_platform']['proxy_file']['port'])
for k in form_data.keys():
#listobj = check_is_list(form, k)
if check_is_bool(form, k):
if form_data[k] is True:
setattr(document, k, u'\u2611')
if form_data[k] is False:
setattr(document, k, u'\u2610')
elif check_is_list(form, k):
data[k] = []
for i in form_data[k]:
item = Py3oItem()
for kk in i.keys():
setattr(item, kk, chinese_date(i[kk]))
data[k].append(item)
elif check_is_image(form, k):
out_path1 = os.path.join(out_dir, form_data[k])
url = URL(file_service_url + form_data[k])
client = HTTPClient.from_url(url)
try:
response = client.get(url.request_uri)
if hasattr(response, 'status_code') and (response.status_code == 200 or response.status_code == 304):
with open(out_path1, 'wb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'wb')
f1.write(response.read())
if os.path.exists(out_path1):
t.set_image_path(k, out_path1)
except Exception,e:
print(e)
out_path1 = os.path.join(STATICRESOURCE_DIR, 'form_templates', 'document', 'no-photo.jpg')
t.set_image_path(k, out_path1)
else:
setattr(document, k, chinese_date(form_data[k]))
data['document'] = document
#print(dir(data))
t.render(data)
return out_path
ret = ''
content_type = 'text/json'
filename = None
if len(gFormTemplate) == 0:
p = os.path.join(STATICRESOURCE_DIR, 'form_templates', 'list.json')
if os.path.exists(p):
try:
with open(p, 'r') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'r')
gFormTemplate = json.loads(f1.read())
except:
ret = json.dumps({'result':u'form_fill_list_json_parse_error'}, ensure_ascii=True, indent=4)
return ret, content_type, filename
else:
ret = json.dumps({'result':u'form_fill_list_json_not_exist'}, ensure_ascii=True, indent=4)
return ret, content_type, filename
o = json.loads(workflow_query(querydict))
if o.has_key('result'):
ret = json.dumps(o, ensure_ascii=True, indent=4)
else:
if querydict.has_key('form_id'):
if o.has_key('form_data') and isinstance(o['form_data'], dict):
if querydict['form_id'] in o['form_data'].keys():
form_data = o['form_data'][querydict['form_id']]
form = get_form(querydict['form_id'])
if form and form.has_key('template_document'):
out_path = fill_tpl(form, form_data)
if os.path.exists(out_path):
ext = 'pdf'
if querydict.has_key('format'):
ext = querydict['format']
ret, content_type = form_export(out_path, ext)
if querydict.has_key('attachmentdownload') and querydict['attachmentdownload'] is True:
filename = os.path.basename(form['template_document']).replace('_template', '')
filename = filename[:filename.rindex('.')]
filename = '%s%s.%s' % (filename , u'(已填)', ext)
else:
ret = json.dumps({'result':u'form_fill_generated_document_not_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'form_fill_template_document_need_specify_in_list_json'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'form_fill_form_id_not_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'form_fill_form_data_is_none'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'form_fill_form_id_required'}, ensure_ascii=True, indent=4)
return ret, content_type, filename
def form_export(src, ext):
dirname = os.path.dirname(src)
out_dir = get_out_tmp_dir(dirname)
out_path = os.path.basename(src)
idx = out_path.rindex('.')
out_path = out_path[:idx+1] + ext
out_path = os.path.join(out_dir, out_path)
ret = json.dumps({'result':'unsupport export format.'}, ensure_ascii=True, indent=4)
content_type = 'text/json'
format = 'pdf'
if ext == 'pdf':
#format = 'pdf:writer pdf Export'
format = 'pdf'
content_type = 'application/pdf'
elif ext == 'doc':
format = 'doc:MS Word 97'
content_type = 'application/msword'
elif ext == 'docx':
format = 'docx:MS Word 2007 XML'
content_type = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
elif ext == 'html':
format = 'html:XHTML Writer File'
content_type = 'text/html'
encfunc = enc
if sys.platform == 'win32':
encfunc = enc1
cmd = [
encfunc(gConfig['combiz_platform']['libreoffice']['executable_path']),
'--headless',
'--convert-to',
format,
'--outdir',
encfunc(out_dir),
encfunc(src)
]
output = check_output(cmd)
print(output)
#if len(output.strip())>0:
#ret = json.dumps({'result':output}, ensure_ascii=True, indent=4)
#content_type = 'text/json'
if not os.path.exists(out_path):
ret = json.dumps({'result':'export failed:file not exist.'}, ensure_ascii=True, indent=4)
content_type = 'text/json'
if os.path.exists(out_path):
with open(out_path, 'rb') as f:
f1 = gevent.fileobject.FileObjectThread(f, 'rb')
ret = f1.read()
return ret, content_type
def check_url_token(querydict):
is_token_pass = False
enable_url_md5_check = False
md5prefix = ''
if gConfig['combiz_platform'].has_key('security') \
and gConfig['combiz_platform']['security'].has_key('md5prefix'):
md5prefix = str(gConfig['combiz_platform']['security']['md5prefix'])
if gConfig['combiz_platform'].has_key('security') \
and gConfig['combiz_platform']['security'].has_key('enable_url_md5_check') \
and gConfig['combiz_platform']['security']['enable_url_md5_check'].lower() == 'true':
enable_url_md5_check = True
else:
is_token_pass = True
if enable_url_md5_check:
print('checking token...')
if querydict.has_key('_token'):
plain = '%s_|_%s' % (md5prefix, time.strftime('%Y%m%d%H'))
token = md5.new(plain).hexdigest()
if token == str(querydict['_token']):
is_token_pass = True
return is_token_pass
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
isnew = False
urls = gUrlMap.bind_to_environ(environ)
querydict, buf = get_querydict_by_GET_POST(environ)
try:
endpoint, args = urls.match()
if args.has_key('_id'):
querydict['_id'] = args['_id']
if endpoint not in []:
if not check_url_token(querydict):
body = json.dumps({'result': u'invalid_token'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
if querydict.has_key('_token'):
del querydict['_token']
if endpoint == 'workflow_add':
body = workflow_add(querydict)
elif endpoint == 'workflow_query':
body = workflow_query(querydict)
elif endpoint == 'workflow_update':
body = workflow_update(querydict)
elif endpoint == 'workflow_delete':
body = workflow_delete(querydict)
elif endpoint == 'workflow_template_add':
body = workflow_template_add(querydict)
elif endpoint == 'workflow_template_query':
body = workflow_template_query(querydict)
elif endpoint == 'workflow_template_update':
body = workflow_template_update(querydict)
elif endpoint == 'workflow_template_delete':
body = workflow_template_delete(querydict)
elif endpoint == 'workflow_form_fill':
body, content_type, filename = form_fill(querydict)
headers['Content-Type'] = content_type
if filename:
headers['Content-Disposition'] = 'attachment;filename="' + enc(filename) + '"'
elif endpoint == 'workflow_form_blank':
body, content_type, filename = form_blank(querydict)
headers['Content-Type'] = content_type
if filename:
headers['Content-Disposition'] = 'attachment;filename="' + enc(filename) + '"'
else:
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
except HTTPException, e:
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def handle_chat_platform(environ, session):
global ENCODING
global gConfig, gRequest, gSessionStore, gUrlMap, gSecurityConfig, gWebSocketsMap, gJoinableQueue
def get_collection(collection):
ret = None
db_util.mongo_init_client('chat_platform')
db = db_util.gClientMongo['chat_platform'][gConfig['chat_platform']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
def user_query(session, querydict):
ret = []
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_users'])
q = {}
limit = 0
skip = 0
user_detail = False
if querydict.has_key('user_detail') and querydict['user_detail'] is True:
user_detail = True
del querydict['user_detail']
if querydict.has_key('limit'):
limit = int(querydict['limit'])
del querydict['limit']
if querydict.has_key('skip'):
skip = int(querydict['skip'])
del querydict['skip']
if querydict.has_key('username'):
if isinstance(querydict['username'], str) or isinstance(querydict['username'], unicode):
q['username'] = querydict['username']
if isinstance(querydict['username'], list):
q['username'] = {'$in': querydict['username']}
if isinstance(querydict['username'], dict):
q['username'] = querydict['username']
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
q['_id'] = db_util.add_mongo_id(querydict['_id'])
if isinstance(querydict['_id'], list):
q['_id'] = {'$in': [db_util.add_mongo_id(i) for i in querydict['_id']]}
if isinstance(querydict['_id'], dict):
q['_id'] = querydict['_id']
rec = list(collection.find(q).limit(limit).skip(skip))
keys = gWebSocketsMap.keys()
for i in rec:
if user_detail:
if str(i['_id']) in keys:
i['online_status'] = 'online'
else:
i['online_status'] = 'offline'
ret.append(i)
return ret
def group_query(session, querydict={}):
ret = []
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_groups'])
q = {}
limit = 0
skip = 0
if querydict.has_key('limit'):
limit = int(querydict['limit'])
del querydict['limit']
if querydict.has_key('skip'):
skip = int(querydict['skip'])
del querydict['skip']
if querydict.has_key('group_name'):
if isinstance(querydict['group_name'], str) or isinstance(querydict['group_name'], unicode):
q['group_name'] = querydict['group_name']
if isinstance(querydict['group_name'], list):
q['group_name'] = {'$in': querydict['group_name']}
if isinstance(querydict['group_name'], dict):
q['group_name'] = querydict['group_name']
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
q['_id'] = querydict['_id']
if isinstance(querydict['_id'], list):
q['_id'] = {'$in': querydict['_id']}
ret = list(collection.find(db_util.add_mongo_id(q)).limit(limit).skip(skip))
if querydict.has_key('user_detail') and querydict['user_detail'] is True:
keys = gWebSocketsMap.keys()
for i in ret:
idx = ret.index(i)
detail = []
userlist = user_query(session, {'_id':i['members']})
for j in userlist:
if j.has_key('contacts'):
del j['contacts']
if j.has_key('password'):
del j['password']
if str(j['_id']) in keys:
j['online_status'] = 'online'
else:
j['online_status'] = 'offline'
detail.append(j)
ret[idx]['members'] = detail
return ret
def group_get(session, querydict):
rec = group_query(session, querydict)
if len(rec)>0:
ret = json.dumps(db_util.remove_mongo_id(rec), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'query_no_record'}, ensure_ascii=True, indent=4)
return ret
def user_group_get(session, querydict):
ret = []
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_groups'])
q = {}
if querydict.has_key('username'):
if isinstance(querydict['username'], str) or isinstance(querydict['username'], unicode) or isinstance(querydict['username'], dict):
q['username'] = querydict['username']
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
q['_id'] = db_util.add_mongo_id(querydict['_id'])
if len(q.keys())>0:
users = user_query(session, querydict)
if len(users)>0:
user0 = users[0]
_id = user0['_id']
grps = group_query(session)
for i in grps:
if i.has_key('members') and _id in i['members']:
ret.append(i)
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_group_get_user_not_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_group_get_one_user_required'}, ensure_ascii=True, indent=4)
return ret
def all_user_get(session, querydict):
limit = 0
skip = 0
filter_str = ''
if querydict.has_key('user_detail') and querydict['user_detail'] is True:
user_detail = True
del querydict['user_detail']
if querydict.has_key('limit'):
try:
limit = int(querydict['limit'])
except:
pass
del querydict['limit']
if querydict.has_key('skip'):
try:
skip = int(querydict['skip'])
except:
pass
del querydict['skip']
if querydict.has_key('filter'):
filter_str = querydict['filter']
del querydict['filter']
contactlist = user_query(session, {'username':{'$regex': '^.*' + filter_str + '.*$'}, 'limit':limit, 'skip':skip})
ret = []
keys = gWebSocketsMap.keys()
for i in contactlist:
for k in i.keys():
if not k in ['_id', 'username', 'display_name', 'avatar']:
del i[k]
if str(i['_id']) in keys:
i['online_status'] = 'online'
else:
i['online_status'] = 'offline'
ret.append(i)
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
return ret
def user_contact_get(session, querydict):
ret = []
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_users'])
q = {}
if querydict.has_key('username'):
if isinstance(querydict['username'], str) or isinstance(querydict['username'], unicode):
q['username'] = querydict['username']
del querydict['username']
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
q['_id'] = db_util.add_mongo_id(querydict['_id'])
del querydict['_id']
if len(q.keys())>0:
contacts = []
selfid = None
rec = collection.find_one(q)
if rec and rec.has_key('contacts'):
# contacts = rec['contacts']
contacts = [db_util.add_mongo_id(i) for i in rec['contacts']]
ret = contacts
selfid = rec['_id']
limit = 0
skip = 0
user_detail = False
if querydict.has_key('user_detail') and querydict['user_detail'] is True:
user_detail = True
del querydict['user_detail']
if querydict.has_key('limit'):
try:
limit = int(querydict['limit'])
except:
pass
del querydict['limit']
if querydict.has_key('skip'):
try:
skip = int(querydict['skip'])
except:
pass
del querydict['skip']
if user_detail:
if querydict.has_key('filter'):
contactlist = user_query(session, {'username':{'$regex': '^.*' + querydict['filter'] + '.*$'}, '_id': {'$in':contacts, '$ne':selfid}, 'limit':limit, 'skip':skip})
del querydict['filter']
else:
contactlist = user_query(session, {'_id':contacts, 'limit':limit, 'skip':skip})
ret = []
keys = gWebSocketsMap.keys()
for i in contactlist:
if i.has_key('contacts'):
del i['contacts']
if i.has_key('password'):
del i['password']
if str(i['_id']) in keys:
i['online_status'] = 'online'
else:
i['online_status'] = 'offline'
ret.append(i)
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_contact_query_one_user_required'}, ensure_ascii=True, indent=4)
return ret
def user_get(session, querydict):
ret = ''
rec = user_query(session, querydict)
for i in rec:
idx = rec.index(i)
if i.has_key('contacts'):
del i['contacts']
if i.has_key('password'):
del i['password']
rec[idx] = i
if len(rec)>0:
ret = json.dumps(db_util.remove_mongo_id(rec), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'query_no_record'}, ensure_ascii=True, indent=4)
return ret
def user_add(session, querydict):
ret = ''
if querydict.has_key('username') and querydict.has_key('password') and len(querydict['username'])>0 and len(querydict['password'])>0:
try:
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_users'])
existone = collection.find_one({'username':querydict['username']})
if existone:
ret = json.dumps({'result':u'user_add_fail_username_already_exist'}, ensure_ascii=True, indent=4)
else:
obj = {}
obj['username'] = querydict['username']
obj['display_name'] = querydict['username']
obj['password'] = querydict['password']
ts = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
obj['register_date'] = ts
obj['update_date'] = ts
obj['description'] = ''
obj['person_info'] = {}
obj['contacts'] = []
obj['avatar'] = None
if querydict.has_key('person_info') :
obj['person_info'] = querydict['person_info']
if querydict.has_key('contacts') and isinstance(querydict['contacts'], list):
obj['contacts'] = querydict['contacts']
if querydict.has_key('avatar') and len(querydict['avatar']) > 0:
obj['avatar'] = querydict['avatar']
_id = collection.save(db_util.add_mongo_id(obj))
rec = collection.find_one({'_id':_id})
ret = json.dumps(db_util.remove_mongo_id(rec), ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'user_add_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_add_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_add_fail_username_password_required'}, ensure_ascii=True, indent=4)
return ret
def user_update(session, querydict):
ret = ''
if querydict.has_key('_id') and len(querydict['_id'])>0:
try:
_id = db_util.add_mongo_id(querydict['_id'])
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_users'])
existone = collection.find_one({'_id':_id})
if existone:
del querydict['_id']
querydict['update_date'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
collection.update({'_id':existone['_id']}, {'$set': db_util.add_mongo_id(querydict)}, multi=False, upsert=False)
one = collection.find_one({'_id':_id})
ret = json.dumps(db_util.remove_mongo_id(one), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_update_user_not_exist'}, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'user_update_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_update_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_update_fail_user_id_required'}, ensure_ascii=True, indent=4)
return ret
def user_delete(session, querydict):
ret = ''
if querydict.has_key('_id') and len(querydict['_id'])>0:
try:
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_users'])
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.remove({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_remove_user_not_exist'}, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'user_remove_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_remove_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_remove_fail_user_id_required'}, ensure_ascii=True, indent=4)
return ret
def group_add(session, querydict):
ret = ''
if querydict.has_key('owner_id')\
and len(querydict['owner_id']) > 0\
and querydict.has_key('group_name')\
and len(querydict['group_name']) > 0:
try:
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_groups'])
existone = collection.find_one({'group_name':querydict['group_name']})
if existone:
ret = json.dumps({'result':u'group_add_fail_group_name_already_exist'}, ensure_ascii=True, indent=4)
else:
obj = {}
obj['owner_id'] = querydict['owner_id']
obj['group_name'] = querydict['group_name']
ts = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
obj['found_date'] = ts
obj['update_date'] = ts
obj['members'] = [db_util.add_mongo_id(obj['owner_id']), ]
if querydict.has_key('avatar') and len(querydict['avatar']) > 0:
obj['avatar'] = querydict['avatar']
if querydict.has_key('description') and len(querydict['description']) > 0:
obj['description'] = querydict['description']
_id = collection.save(db_util.add_mongo_id(obj))
rec = collection.find_one({'_id':_id})
ret = json.dumps(db_util.remove_mongo_id(rec), ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'group_add_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'group_add_fail' }, ensure_ascii=True, indent=4)
else:
if not querydict.has_key('owner_id') or len(querydict['owner_id']) == 0:
ret = json.dumps({'result':u'group_add_fail_owner_id_required'}, ensure_ascii=True, indent=4)
if not querydict.has_key('group_name') or len(querydict['group_name']) == 0:
ret = json.dumps({'result':u'group_add_fail_group_name_required'}, ensure_ascii=True, indent=4)
return ret
def group_update(session, querydict):
ret = ''
if querydict.has_key('_id') and len(querydict['_id'])>0:
try:
_id = db_util.add_mongo_id(querydict['_id'])
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_groups'])
existone = collection.find_one({'_id':_id})
if existone:
del querydict['_id']
querydict['update_date'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
collection.update({'_id':existone['_id']}, {'$set': db_util.add_mongo_id(querydict)}, multi=False, upsert=False)
one = collection.find_one({'_id':_id})
ret = json.dumps(db_util.remove_mongo_id(one), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'group_update_group_not_exist'}, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'group_update_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'group_update_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'group_update_fail_group_id_required'}, ensure_ascii=True, indent=4)
return ret
def group_remove(session, querydict):
ret = ''
if querydict.has_key('_id') and len(querydict['_id']) > 0:
try:
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_groups'])
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.remove({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'group_remove_fail_group_not_exist'}, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'group_remove_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'group_remove_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'group_remove_fail_found_group_id_required'}, ensure_ascii=True, indent=4)
return ret
def check_contact_exist(_id, alist):
ret = None
for i in alist:
if i['_id'] == _id:
ret = i
break
return ret
def online(user_id, websocket):
if user_id and websocket and not websocket.closed:
if not gWebSocketsMap.has_key(user_id):
gWebSocketsMap[user_id] = []
if not websocket in gWebSocketsMap[user_id]:
gWebSocketsMap[user_id].append(websocket)
def offline(user_id):
if user_id and gWebSocketsMap.has_key(user_id):
for i in gWebSocketsMap[user_id]:
i.close()
del gWebSocketsMap[user_id]
chat_save_log({
'op':'chat/offline',
'from':user_id,
'timestamp':time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
})
def get_destination(session, from_id, _id):
ret = []
if isinstance(_id, str) or isinstance(_id, unicode):
userlist = user_query(session, {'_id':from_id})
if len(userlist)==0:
userlist = user_query(session, {'username':from_id})
if len(userlist)>0:
user0 = userlist[0]
if user0.has_key('contacts'):
toid = _id
try:
toid = ObjectId(_id)
except:
ul = user_query(session, {'username':_id})
if len(ul)>0:
toid = ul[0]['_id']
if db_util.add_mongo_id(str(toid)) in user0['contacts']:
ret.append(str(toid))
elif isinstance(_id, list):
userlist = user_query(session, {'_id':from_id})
if len(userlist)==0:
userlist = user_query(session, {'username':from_id})
if len(userlist)>0:
user0 = userlist[0]
if user0.has_key('contacts'):
for id in _id:
if db_util.add_mongo_id(id) in user0['contacts']:
ret.append(id)
return ret
def get_destination_group(session, from_id, _id):
ret = []
userset = set()
grps = group_query(session, {'_id':_id})
for grp in grps:
if grp.has_key('members') and len(grp['members'])>0:
if db_util.add_mongo_id(from_id) in grp['members']:
userset = userset.union(set(grp['members']))
userlist = list(userset)
for id in userlist:
ret.append(id)
return ret
def resend_offline_msg(session, to_id, limit=10):
offlinecol = 'chat_log_offline'
if gConfig['chat_platform']['mongodb'].has_key('collection_chat_log_offline'):
offlinecol = gConfig['chat_platform']['mongodb']['collection_chat_log_offline']
collection = get_collection(offlinecol)
arr = list(collection.find({'to':db_util.add_mongo_id(to_id)}).limit(limit).sort('timestamp', pymongo.DESCENDING))
ids = [i['_id'] for i in arr]
collection.remove({'_id':{'$in': ids}})
for i in arr:
gJoinableQueue.put(db_util.remove_mongo_id(i))
def chat(session, websocket, obj={}):
tolist = []
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('msg') and len(obj['msg'])>0:
if obj.has_key('to') and len(obj['to'])>0:
tolist = get_destination(session, obj['from'], obj['to'])
if obj.has_key('to_group') and len(obj['to_group']) > 0:
tolist = get_destination_group(session, obj['from'], obj['to_group'])
for k in tolist:
try:
d = {'op': 'chat/chat', 'from': obj['from'], 'to': k, 'timestamp': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), 'msg': obj['msg']}
gJoinableQueue.put(d)
except gevent.queue.Full:
print('chat queue is full')
def request_response(session, websocket, obj={}):
#'chat/request/contact/add',
#'chat/request/contact/remove',
#'chat/response/contact/add/accept',
#'chat/response/contact/add/reject'
#'chat/request/group/join'
#'chat/request/group/quit'
#'chat/response/group/join/accept',
#'chat/response/group/join/reject',
tolist = []
try:
if obj['op'] == 'chat/response/contact/add/accept':
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('to') and len(obj['to'])>0:
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_users'])
userlist = user_query(session, {'_id':[obj['from'], obj['to']]})
for user in userlist:
if str(user['_id']) == obj['from'] and not db_util.add_mongo_id(obj['to']) in user['contacts']:
user['contacts'].append(db_util.add_mongo_id(obj['to']))
if str(user['_id']) == obj['to'] and not db_util.add_mongo_id(obj['from']) in user['contacts']:
user['contacts'].append(db_util.add_mongo_id(obj['from']))
user['update_date'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
collection.save(user)
fromuser = {}
fromuser['op'] = obj['op']
fromuser['_id'] = obj['from']
fromuser['from'] = obj['to']
fromuser['to'] = obj['from']
fromuser['contacts'] = json.loads(user_contact_get(session, {'_id':obj['from'],'user_detail':True}))
gJoinableQueue.put(db_util.remove_mongo_id(fromuser))
touser = {}
touser['op'] = obj['op']
touser['_id'] = obj['to']
touser['from'] = obj['from']
touser['to'] = obj['to']
touser['contacts'] = json.loads(user_contact_get(session, {'_id':obj['to'],'user_detail':True}))
gJoinableQueue.put(db_util.remove_mongo_id(touser))
elif obj['op'] == 'chat/response/contact/add/reject':
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('to') and len(obj['to'])>0:
userlist = user_query(session, {'_id':obj['from']})
if len(userlist)>0:
user0 = userlist[0]
user0['op'] = obj['op']
user0['from'] = obj['from']
user0['to'] = obj['to']
if user0.has_key('password'):
del user0['password']
if user0.has_key('contacts'):
del user0['contacts']
if obj.has_key('reject_reason') and len(obj['reject_reason'])>0:
user0['reject_reason'] = obj['reject_reason']
gJoinableQueue.put(db_util.remove_mongo_id(user0))
elif obj['op'] == 'chat/request/contact/add':
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('to') and len(obj['to'])>0:
userlist = user_query(session, {'_id':obj['from']})
if len(userlist)>0:
user0 = userlist[0]
user0['op'] = obj['op']
user0['from'] = obj['from']
user0['to'] = obj['to']
if user0.has_key('password'):
del user0['password']
if user0.has_key('contacts'):
del user0['contacts']
gJoinableQueue.put(db_util.remove_mongo_id(user0))
elif obj['op'] == 'chat/request/contact/remove':
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('to') and len(obj['to'])>0:
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_users'])
userlist = user_query(session, {'_id':[obj['from'], obj['to']]})
remover, removee = None, None
for user in userlist:
if str(user['_id']) == obj['from'] and db_util.add_mongo_id(obj['to']) in user['contacts']:
user['contacts'].remove(db_util.add_mongo_id(obj['to']))
remover = user['display_name']
if str(user['_id']) == obj['to'] and db_util.add_mongo_id(obj['from']) in user['contacts']:
user['contacts'].remove(db_util.add_mongo_id(obj['from']))
removee = user['display_name']
user['update_date'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
collection.save(user)
fromuser = {}
fromuser['op'] = obj['op']
fromuser['_id'] = obj['from']
fromuser['from'] = obj['to']
fromuser['to'] = obj['from']
fromuser['remover'] = remover
fromuser['removee'] = removee
fromuser['remove_type'] = 'remover'
fromuser['contacts'] = json.loads(user_contact_get(session, {'_id':obj['from'], 'user_detail':True}))
gJoinableQueue.put(db_util.remove_mongo_id(fromuser))
touser = {}
touser['op'] = obj['op']
touser['_id'] = obj['to']
touser['from'] = obj['from']
touser['to'] = obj['to']
touser['remover'] = remover
touser['removee'] = removee
touser['remove_type'] = 'removee'
touser['contacts'] = json.loads(user_contact_get(session, {'_id':obj['to'], 'user_detail':True}))
gJoinableQueue.put(db_util.remove_mongo_id(touser))
elif obj['op'] == 'chat/request/group/join':
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('to_group') and len(obj['to_group'])>0:
grps = group_query(session, {'_id':obj['to_group']})
if len(grps)>0:
grp0 = grps[0]
userlist = user_query(session, {'_id':obj['from']})
if len(userlist)>0:
user0 = userlist[0]
user0['op'] = obj['op']
user0['from'] = obj['from']
user0['request_src'] = obj['from']
user0['to_group'] = obj['to_group']
user0['to'] = grp0['owner_id']
if user0.has_key('password'):
del user0['password']
if user0.has_key('contacts'):
del user0['contacts']
gJoinableQueue.put(db_util.remove_mongo_id(user0))
elif obj['op'] == 'chat/request/group/quit':
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('to_group') and len(obj['to_group'])>0:
grps = group_query(session, {'_id':obj['to_group']})
if len(grps)>0:
grp0 = grps[0]
members = []
if db_util.add_mongo_id(obj['from']) in grp0['members']:
grp0['members'].remove(db_util.add_mongo_id(obj['from']))
members = [str(i) for i in grp0['members']]
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_groups'])
collection.save(grp0)
broadcast(session, websocket, members, {'op':obj['op'], 'from':obj['from'], 'to_group':obj['to_group']} )
elif obj['op'] == 'chat/response/group/join/accept':
if obj.has_key('to_group') and len(obj['to_group'])>0 and obj.has_key('request_src') and len(obj['request_src'])>0:
grps = group_query(session, {'_id': obj['to_group']})
if len(grps)>0:
grp0 = grps[0]
if not db_util.add_mongo_id(obj['request_src']) in grp0['members']:
grp0['members'].append(db_util.add_mongo_id(obj['request_src']))
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_groups'])
collection.save(grp0)
members = [str(i) for i in grp0['members']]
broadcast(session, websocket, members, obj)
elif obj['op'] == 'chat/response/group/join/reject':
if obj.has_key('from') and len(obj['from'])>0 and obj.has_key('to') and len(obj['to'])>0 and obj.has_key('to_group') and len(obj['to_group'])>0:
userlist = user_query(session, {'_id':obj['from']})
if len(userlist)>0:
user0 = userlist[0]
user0['op'] = obj['op']
user0['from'] = obj['from']
user0['to'] = obj['to']
user0['to_group'] = obj['to_group']
if user0.has_key('password'):
del user0['password']
if user0.has_key('contacts'):
del user0['contacts']
if obj.has_key('reject_reason') and len(obj['reject_reason'])>0:
user0['reject_reason'] = obj['reject_reason']
gJoinableQueue.put(db_util.remove_mongo_id(user0))
#else:
#d = {'op': obj['op'], 'from':obj['from'], 'to':k, 'timestamp':time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),}
#gJoinableQueue.put(d)
except gevent.queue.Full:
print('chat queue is full')
def broadcast(session, websocket, alist, obj={}):
for i in alist:
d = {}
#d['timestamp'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
for k in obj.keys():
d[k] = obj[k]
if isinstance(i, str) or isinstance(i, unicode):
d['to'] = i
elif isinstance(i, dict):
if i.has_key('_id'):
d['to'] = i['_id']
try:
gJoinableQueue.put(d)
except:
pass
def handle_websocket(environ):
ws = get_websocket(environ)
app = gConfig['wsgi']['application']
#session_id = None
#channel = ''
#if environ.has_key('HTTP_COOKIE'):
#arr = environ['HTTP_COOKIE'].split('=')
#if len(arr)>1:
#session_id = arr[1]
interval = 1.0
try:
interval = float(gConfig[app]['websocket']['interval_poll'])
except:
interval = 1.0
while ws and not ws.closed:
obj = ws_recv(environ)
if obj and isinstance(obj, dict) and obj.has_key('op'):
if obj['op'] == 'queue_size':
qsize = 0
if gJoinableQueue:
qsize = gJoinableQueue.qsize()
ws.send(json.dumps({'queue_size':qsize}, ensure_ascii=True, indent=4))
elif obj['op'] == 'chat/online':
rec = []
if obj.has_key('_id') and len(obj['_id'])>0:
rec = user_query(session, {'_id':obj['_id']})
elif obj.has_key('username') and len(obj['username'])>0:
rec = user_query(session, {'username':obj['username']})
if len(rec)>0:
r0 = rec[0]
_id = str(r0['_id'])
online(_id, ws)
r0['contacts'] = json.loads(user_contact_get(session, {'_id':_id,'user_detail':True}))
r0['groups'] = json.loads(user_group_get(session, {'_id':_id,'user_detail':True}))
d = db_util.remove_mongo_id(r0)
d['op'] = obj['op']
d['from'] = _id
d['to'] = _id
d['timestamp'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
gJoinableQueue.put(d)
#ws.send(json.dumps(d, ensure_ascii=True, indent=4))
if obj.has_key('inform_contact') and obj['inform_contact'] is True:
other_contacts = gWebSocketsMap.keys()[:]
if _id in other_contacts:
other_contacts.remove(_id)
broadcast(session, ws, other_contacts, {'op':'chat/info/online','from':_id})
limit = 10
if gConfig['chat_platform'].has_key('resend') and gConfig['chat_platform']['resend'].has_key('max_resend_record_num'):
try:
limit = int(gConfig['chat_platform']['resend']['max_resend_record_num'])
except:
pass
resend_offline_msg(session, _id, limit)
else:
ws.send(json.dumps({'result':'chat_online_user_not_exist'}, ensure_ascii=True, indent=4))
elif obj['op'] == 'chat/offline':
if obj.has_key('_id'):
_id = obj['_id']
if obj.has_key('inform_contact') and obj['inform_contact'] is True:
other_contacts = gWebSocketsMap.keys()[:]
if _id in other_contacts:
other_contacts.remove(_id)
broadcast(session, ws, other_contacts, {'op':'chat/info/offline','from':_id, 'timestamp': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())})
offline(_id)
elif obj.has_key('username'):
rec = user_query(session, {'username':obj['username']})
if len(rec)>0:
_id = str(rec[0]['_id'])
if obj.has_key('inform_contact') and obj['inform_contact'] is True:
other_contacts = gWebSocketsMap.keys()[:]
if _id in other_contacts:
other_contacts.remove(_id)
broadcast(session, ws, other_contacts, {'op':'chat/info/offline','from':_id})
offline(_id)
else:
ws.send(json.dumps({'result':'chat_offline_user_not_exist'}, ensure_ascii=True, indent=4))
else:
ws.send(json.dumps({'result':'chat_offline_username_or_id_required'}, ensure_ascii=True, indent=4))
elif obj['op'] == 'chat/chat':
chat(session, ws, obj)
elif 'chat/request' in obj['op'] or 'chat/response' in obj['op']:
request_response(session, ws, obj)
else:
try:
ws.send('')
except:
_id = None
for k in gWebSocketsMap.keys():
if ws in gWebSocketsMap[k] :
_id = k
break
if _id:
print('websocket[%s] is closed2' % _id)
offline(_id)
broadcast(session, None, gWebSocketsMap.keys(), {'op':'chat/info/offline', 'from':_id})
gevent.sleep(interval)
if ws and ws.closed:
del ws
def check_url_token(querydict):
is_token_pass = False
enable_url_md5_check = False
md5prefix = ''
if gConfig['chat_platform'].has_key('security') \
and gConfig['chat_platform']['security'].has_key('md5prefix'):
md5prefix = str(gConfig['chat_platform']['security']['md5prefix'])
if gConfig['chat_platform'].has_key('security') \
and gConfig['chat_platform']['security'].has_key('enable_url_md5_check') \
and gConfig['chat_platform']['security']['enable_url_md5_check'].lower() == 'true':
enable_url_md5_check = True
else:
is_token_pass = True
if enable_url_md5_check:
print('checking token...')
if querydict.has_key('_token'):
plain = '%s_|_%s' % (md5prefix, time.strftime('%Y%m%d%H'))
token = md5.new(plain).hexdigest()
if token == str(querydict['_token']):
is_token_pass = True
return is_token_pass
def chat_broadcast(session, querydict):
ret = '{}'
tolist = []
if querydict.has_key('from') and len(querydict['from'])>0:
if querydict.has_key('to'):
if isinstance(querydict['to'], str) or isinstance(querydict['to'], unicode):
tolist.append(querydict['to'])
if isinstance(querydict['to'], list):
tolist.extend(querydict['to'])
else:
ret = json.dumps({'result':u'chat_broadcast_to_required'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'chat_broadcast_from_required'}, ensure_ascii=True, indent=4)
if querydict.has_key('msg') and len(querydict['msg'])>0:
for k in tolist:
try:
d = {'op': 'chat/chat', 'from': querydict['from'], 'to': k, 'timestamp': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), 'msg': querydict['msg']}
gJoinableQueue.put(d)
except gevent.queue.Full:
print('chat queue is full')
ret = json.dumps({'result':u'chat queue is full'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'chat_broadcast_msg_required'}, ensure_ascii=True, indent=4)
return ret
def chat_log_query(session, querydict):
limit = 0
skip = 0
filter_str = ''
from_id, to_id = None, None
if querydict.has_key('from') and (isinstance(querydict['from'], str) or isinstance(querydict['from'], unicode)) and len(querydict['from'])>0:
from_id = querydict['from']
if querydict.has_key('to') and (isinstance(querydict['to'], str) or isinstance(querydict['to'], unicode)) and len(querydict['to'])>0:
to_id = querydict['to']
if from_id is None or to_id is None:
return json.dumps({'result':u'chat_log_query_from_and_to_required'}, ensure_ascii=True, indent=4)
if querydict.has_key('limit'):
try:
limit = int(querydict['limit'])
except:
pass
del querydict['limit']
if querydict.has_key('skip'):
try:
skip = int(querydict['skip'])
except:
pass
del querydict['skip']
if querydict.has_key('filter'):
filter_str = querydict['filter']
del querydict['filter']
# offlinecol = 'chat_log_offline'
# if gConfig['chat_platform']['mongodb'].has_key('collection_chat_log_offline'):
# offlinecol = gConfig['chat_platform']['mongodb']['collection_chat_log_offline']
collection1 = get_collection(gConfig['chat_platform']['mongodb']['collection_chat_log'])
# collection2 = get_collection(offlinecol)
ret = list(collection1.find({'$or':[{'from':db_util.add_mongo_id(from_id), 'to':db_util.add_mongo_id(to_id)}, {'to':db_util.add_mongo_id(from_id), 'from':db_util.add_mongo_id(to_id)}]}).limit(limit).skip(skip).sort('timestamp', pymongo.DESCENDING))
# arr2 = list(collection2.find({'$or':[{'from':db_util.add_mongo_id(from_id), 'to':db_util.add_mongo_id(to_id)}, {'to':db_util.add_mongo_id(from_id), 'from':db_util.add_mongo_id(to_id)}]}).limit(limit).skip(skip).sort('timestamp', pymongo.DESCENDING))
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
return ret
def chat_log_remove(session, querydict):
return ''
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
isnew = False
urls = gUrlMap.bind_to_environ(environ)
querydict, buf = get_querydict_by_GET_POST(environ)
endpoint = ''
try:
endpoint, args = urls.match()
if endpoint not in ['handle_websocket', 'gridfs_upload', 'gridfs_get', 'gridfs_delete', 'gridfs_query']:
if not check_url_token(querydict):
body = json.dumps({'result':u'invalid_token'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
if querydict.has_key('_token'):
del querydict['_token']
if endpoint == 'user_add':
body = user_add(session, querydict)
elif endpoint == 'user_remove':
body = user_delete(session, querydict)
elif endpoint == 'user_get':
body = user_get(session, querydict)
elif endpoint == 'all_user_get':
body = all_user_get(session, querydict)
elif endpoint == 'user_update':
body = user_update(session, querydict)
elif endpoint == 'group_add':
body = group_add(session, querydict)
elif endpoint == 'group_get':
body = group_get(session, querydict)
elif endpoint == 'user_group_get':
body = user_group_get(session, querydict)
elif endpoint == 'user_contact_get':
body = user_contact_get(session, querydict)
elif endpoint == 'group_update':
body = group_update(session, querydict)
elif endpoint == 'group_remove':
body = group_remove(session, querydict)
elif endpoint == 'handle_websocket':
handle_websocket(environ)
elif endpoint == 'chat_broadcast':
body = chat_broadcast(session, querydict)
elif endpoint == 'chat_log_query':
body = chat_log_query(session, querydict)
elif endpoint == 'chat_log_remove':
body = chat_log_remove(session, querydict)
elif endpoint == 'gridfs_upload':
body = gridfs_upload(environ, querydict, buf)
elif endpoint == 'gridfs_get':
if args.has_key('_id'):
querydict['_id'] = args['_id']
if args.has_key('width'):
try:
querydict['width'] = int(args['width'])
except:
querydict['width'] = 64
if args.has_key('height'):
try:
querydict['height'] = int(args['height'])
except:
querydict['height'] = 64
statuscode, headers, body = gridfs_get(environ, querydict)
elif endpoint == 'gridfs_delete':
if args.has_key('_id'):
querydict['_id'] = args['_id']
statuscode, headers, body = gridfs_delete(environ, querydict)
elif endpoint == 'gridfs_query':
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
if ',' in querydict['_id']:
querydict['_id'] = querydict['_id'].split(',')
else:
querydict['_id'] = [querydict['_id'],]
if args.has_key('width'):
try:
querydict['width'] = int(args['width'])
except:
querydict['width'] = 64
if args.has_key('height'):
try:
querydict['height'] = int(args['height'])
except:
querydict['height'] = 64
if args.has_key('limit'):
try:
querydict['limit'] = int(args['limit'])
except:
querydict['limit'] = 10
if args.has_key('skip'):
try:
querydict['skip'] = int(args['skip'])
except:
querydict['skip'] = 0
statuscode, headers, body = gridfs_query(environ, querydict)
else:
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
except HTTPException, e:
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
if session:
gSessionStore.save(session)
return statuscode, headers, body
def gridfs_get(environ, querydict):
global gConfig, ENCODING, STATICRESOURCE_DIR
def thumbnail(fp, size, use_base64=False):
ret = None
if 'image/' in fp.mimetype:
im = Image.open(fp)
im.thumbnail(size)
buf = StringIO.StringIO()
#print(im.format)
im.save(buf, im.format)
ret = buf.getvalue()
if use_base64:
ret = base64.b64encode(ret)
if 'application/' in fp.mimetype or 'text/' in fp.mimetype:
thumpath = gConfig['web']['thumbnail']['application/octet-stream']
if gConfig['web']['thumbnail'].has_key(fp.mimetype):
thumpath = gConfig['web']['thumbnail'][fp.mimetype]
thumpath = os.path.join(STATICRESOURCE_DIR, 'img', 'thumbnail', thumpath)
im = Image.open(thumpath)
im.thumbnail(size)
buf = StringIO.StringIO()
im.save(buf, im.format)
ret = buf.getvalue()
if use_base64:
ret = base64.b64encode(ret)
return ret
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
body = ''
statuscode = '200 OK'
if querydict.has_key('_'):
del querydict['_']
if querydict.has_key('_random'):
del querydict['_random']
if not querydict.has_key('_id'):
body = json.dumps({'result': u'gridfs_get_id_required'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
app = gConfig['wsgi']['application']
if gConfig.has_key(app):
collection = 'fs'
if gConfig[app].has_key('mongodb') and gConfig[app]['mongodb'].has_key('gridfs_collection'):
collection = str(gConfig[app]['mongodb']['gridfs_collection'])
if len(collection) == 0:
collection = 'fs'
db_util.mongo_init_client(app)
dbname = gConfig[app]['mongodb']['database']
db = db_util.gClientMongo[app][dbname]
fs = gridfs.GridFS(db, collection=collection)
_id = db_util.add_mongo_id(querydict['_id'])
try:
f = fs.get(_id)
headers['Content-Type'] = str(f.content_type)
if querydict.has_key('width') and querydict.has_key('height') \
and querydict['width']>0 and querydict['width']<8192 \
and querydict['height']>0 and querydict['height']<8192 :
if 'image/' in f.content_type:
body = thumbnail(f, (querydict['width'], querydict['height']), False)
else:
body = thumbnail(f, (128, 128), False)
headers['Content-Type'] = 'image/png'
if body is None:
body = json.dumps({'result': u'gridfs_get_error_invalid_image_format'}, ensure_ascii=True, indent=4)
else:
body = f.read()
if querydict.has_key('attachmentdownload'):
headers['Content-Disposition'] = 'attachment;filename="' + enc(f.filename) + '"'
except gridfs.errors.NoFile:
body = json.dumps({'result': u'gridfs_get_file_not_exist'}, ensure_ascii=True, indent=4)
except Exception,e:
headers['Content-Type'] = 'text/json;charset=' + ENCODING
body = json.dumps({'result': u'gridfs_get_error:%s' % e.message}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result': u'gridfs_get_cannot_find_wsgi_app [%s]' % app}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def gridfs_delete(environ, querydict):
global gConfig, ENCODING
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
body = ''
statuscode = '200 OK'
if querydict.has_key('_'):
del querydict['_']
if querydict.has_key('_random'):
del querydict['_random']
if not querydict.has_key('_id'):
body = json.dumps({'result': u'gridfs_delete_id_required'}, ensure_ascii=True, indent=4)
return statuscode, headers, body
app = gConfig['wsgi']['application']
if gConfig.has_key(app):
collection = 'fs'
if gConfig[app].has_key('mongodb') and gConfig[app]['mongodb'].has_key('gridfs_collection'):
collection = str(gConfig[app]['mongodb']['gridfs_collection'])
if len(collection) == 0:
collection = 'fs'
db_util.mongo_init_client(app)
dbname = gConfig[app]['mongodb']['database']
db = db_util.gClientMongo[app][dbname]
fs = gridfs.GridFS(db, collection=collection)
arr = querydict['_id'].split(',')
ids = []
for i in arr:
ids.append(db_util.add_mongo_id(i))
try:
for i in ids:
fs.delete(i)
body = json.dumps(querydict, ensure_ascii=True, indent=4)
except Exception,e:
body = json.dumps({'result': u'gridfs_delete_error:%s' % e.message}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result': u'gridfs_delete_cannot_find_wsgi_app [%s]' % app}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def gridfs_query(environ, querydict):
global gConfig, ENCODING, STATICRESOURCE_DIR
def thumbnail(fp, size, use_base64=False):
ret = None
if 'image/' in fp.mimetype:
im = Image.open(fp)
im.thumbnail(size)
buf = StringIO.StringIO()
#print(im.format)
im.save(buf, im.format)
ret = buf.getvalue()
if use_base64:
ret = base64.b64encode(ret)
if 'application/' in fp.mimetype or 'text/' in fp.mimetype:
thumpath = gConfig['web']['thumbnail']['application/octet-stream']
if gConfig['web']['thumbnail'].has_key(fp.mimetype):
thumpath = gConfig['web']['thumbnail'][fp.mimetype]
thumpath = os.path.join(STATICRESOURCE_DIR, 'img', 'thumbnail', thumpath)
im = Image.open(thumpath)
im.thumbnail(size)
buf = StringIO.StringIO()
im.save(buf, im.format)
ret = buf.getvalue()
if use_base64:
ret = base64.b64encode(ret)
return ret
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
body = '[]'
statuscode = '200 OK'
app = gConfig['wsgi']['application']
if querydict.has_key('_'):
del querydict['_']
if querydict.has_key('_random'):
del querydict['_random']
if gConfig.has_key(app):
collection = 'fs'
if gConfig[app].has_key('mongodb') and gConfig[app]['mongodb'].has_key('gridfs_collection'):
collection = str(gConfig[app]['mongodb']['gridfs_collection'])
if len(collection) == 0:
collection = 'fs'
db_util.mongo_init_client(app)
dbname = gConfig[app]['mongodb']['database']
db = db_util.gClientMongo[app][dbname]
fs = gridfs.GridFS(db, collection=collection)
limit = 10
skip = 0
if querydict.has_key('limit'):
limit = querydict['limit']
del querydict['limit']
if querydict.has_key('skip'):
skip = querydict['skip']
del querydict['skip']
try:
if querydict.has_key('width') and querydict.has_key('height') \
and querydict['width']>0 and querydict['width']<8192 \
and querydict['height']>0 and querydict['height']<8192 :
w, h = querydict['width'], querydict['height']
del querydict['width']
del querydict['height']
cur = None
if querydict.has_key('_id'):
ids = db_util.add_mongo_id(querydict['_id'])
cur = fs.find({'_id':{'$in':ids}}).limit(limit).skip(skip)
else:
cur = fs.find(db_util.add_mongo_id(querydict)).limit(limit).skip(skip)
arr = []
for f in cur:
b64str = thumbnail(f, (w, h), True)
if 'application/' in f.content_type:
f.mimetype = 'image/png'
arr.append({'_id':db_util.remove_mongo_id(f._id), 'mimetype':f.mimetype,'filename':enc(f.filename), 'data': b64str})
body = json.dumps(arr, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result': u'gridfs_query_size_required'}, ensure_ascii=True, indent=4)
except gridfs.errors.NoFile:
body = json.dumps({'result': u'gridfs_query_file_not_exist'}, ensure_ascii=True, indent=4)
except Exception,e:
body = json.dumps({'result': u'gridfs_query_error:%s' % e.message}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result': u'gridfs_query_cannot_find_wsgi_app [%s]' % app}, ensure_ascii=True, indent=4)
return statuscode, headers, body
def gridfs_upload(environ, querydict, buf):
global gConfig
app = gConfig['wsgi']['application']
body = ''
if gConfig.has_key(app):
collection = 'fs'
if gConfig[app].has_key('mongodb') and gConfig[app]['mongodb'].has_key('gridfs_collection'):
collection = str(gConfig[app]['mongodb']['gridfs_collection'])
if len(collection) == 0:
collection = 'fs'
db_util.mongo_init_client(app)
dbname = gConfig[app]['mongodb']['database']
db = db_util.gClientMongo[app][dbname]
if querydict.has_key('file_id'):
del querydict['file_id']
fs = gridfs.GridFS(db, collection=collection)
_id = None
try:
querydict = db_util.add_mongo_id(querydict);
if querydict.has_key('_uniqueIndex'):
uniqueIndex = querydict['_uniqueIndex']
cond = {}
if (isinstance(uniqueIndex, unicode) or isinstance(uniqueIndex, str)) and len(uniqueIndex)>0:
arr = uniqueIndex.split(',')
for indexName in arr:
indexName = indexName.strip()
if querydict.has_key(indexName):
cond[indexName] = querydict[indexName]
if len(cond.keys())>1:
idlist = []
cur = fs.find(cond)
for i in cur:
idlist.append(i._id)
for i in idlist:
fs.delete(i)
del querydict['_uniqueIndex']
_id = fs.put(buf, **querydict)
except gridfs.errors.FileExists:
if querydict.has_key('_id'):
_id = db_util.add_mongo_id(querydict['_id'])
fs.delete(_id)
_id = fs.put(buf, **querydict)
except:
raise
body = json.dumps({'_id':db_util.remove_mongo_id(_id)}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':u'cannot find wsgi app [%s]' % app}, ensure_ascii=True, indent=4)
return body
def handle_authorize_platform(environ, session):
global ENCODING
global gConfig, gRequest, gSessionStore, gUrlMap, gSecurityConfig, gWebSocketsMap, gJoinableQueue
def get_collection(collection):
ret = None
db_util.mongo_init_client('authorize_platform')
db = db_util.gClientMongo['authorize_platform'][gConfig['authorize_platform']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
def get_all_functions():
ret = []
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_functions'])
ret = list(collection.find({}))
#for i in cur:
#ret.append(i)
return ret
def get_all_roles(exclude_template=False):
ret = []
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_roles'])
if exclude_template:
ret = list(collection.find({'name':{'$not':re.compile("template")}}))
else:
ret = list(collection.find({}))
#for i in cur:
#ret.append(i)
return ret
def check_role_can_be_delete(_id):
def get_id_list(node):
reet = []
if node.has_key('roles'):
for i in node['roles']:
reet.append(i)
return reet
ret = True
for i in get_user():
idlist = get_id_list(i)
if _id in idlist:
ret = False
break
return ret
def check_function_can_be_delete(_id):
def get_id_list(node):
reet = []
if node.has_key('_id'):
reet.append(node['_id'])
if node.has_key('children'):
for i in node['children']:
reet.extend(get_id_list(i))
return reet
ret = True
for i in get_all_roles():
idlist = get_id_list(i)
if _id in idlist:
ret = False
break
return ret
def check_valid_user(session, user=None):
ret = False
if session and session.has_key('username') and len(session['username'])>0:
if user:
ret = session['username'] == user
else:
ret = True
return ret
def function_add(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_functions'])
if querydict.has_key('name'):
existone = collection.find_one({'name':querydict['name']})
if existone:
ret = json.dumps({'result':u'function_add_fail_name_exist'}, ensure_ascii=True, indent=4)
else:
_id = collection.save(db_util.add_mongo_id(querydict))
rec = collection.find_one({'_id':_id})
ret = db_util.remove_mongo_id(rec)
ret = json.dumps(ret, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'function_add_fail_name_required'}, ensure_ascii=True, indent=4)
return ret
def function_query(session, querydict):
#if not check_valid_user(session, 'admin'):
#return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
l = get_all_functions()
ret = json.dumps(db_util.remove_mongo_id(l), ensure_ascii=True, indent=4)
return ret
def function_update(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_functions'])
if querydict.has_key('_id'):
wr = collection.update({'_id':db_util.add_mongo_id(querydict['_id'])}, db_util.add_mongo_id(querydict), multi=False, upsert=False)
rec = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
ret = db_util.remove_mongo_id(rec)
ret = json.dumps(ret, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'function_update_fail_id_required'}, ensure_ascii=True, indent=4)
return ret
def function_delete(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_functions'])
if querydict.has_key('_id'):
existone = collection.find_one({'_id': db_util.add_mongo_id(querydict['_id'])})
if existone:
if check_function_can_be_delete(existone['_id']):
wr = collection.remove({'_id':db_util.add_mongo_id(existone['_id'])})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'function_delete_fail_need_deleted_in_role_first'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'function_delete_fail_not_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'function_delete_fail_id_required'}, ensure_ascii=True, indent=4)
return ret
def role_add(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_roles'])
if querydict.has_key('name'):
existone = collection.find_one({'name':querydict['name']})
if existone:
ret = json.dumps({'result':u'role_add_fail_name_already_exist'}, ensure_ascii=True, indent=4)
else:
_id = collection.save(db_util.add_mongo_id(querydict))
rec = collection.find_one({'_id':_id})
ret = db_util.remove_mongo_id(rec)
ret = json.dumps(ret, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'role_add_fail_name_required'}, ensure_ascii=True, indent=4)
return ret
def role_query(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
l = get_all_roles(True)
ret = json.dumps(db_util.remove_mongo_id(l), ensure_ascii=True, indent=4)
return ret
def role_update(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_roles'])
if querydict.has_key('_id'):
wr = collection.update({'_id':db_util.add_mongo_id(querydict['_id'])}, db_util.add_mongo_id(querydict), multi=False, upsert=False)
rec = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
ret = db_util.remove_mongo_id(rec)
ret = json.dumps(ret, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'role_update_fail_id_required'}, ensure_ascii=True, indent=4)
return ret
def role_delete(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_roles'])
if querydict.has_key('_id'):
existone = collection.find_one({'_id': db_util.add_mongo_id(querydict['_id'])})
if existone:
if check_role_can_be_delete(existone['_id']):
wr = collection.remove({'_id':db_util.add_mongo_id(existone['_id'])})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'role_delete_fail_need_delete_in_user_first'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'role_delete_fail_not_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'role_delete_fail_id_required'}, ensure_ascii=True, indent=4)
return ret
def role_template_get(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
l = get_all_roles()
for i in l:
if i['name'] == 'template':
ret = json.dumps(db_util.remove_mongo_id(i), ensure_ascii=True, indent=4)
break
if len(ret) == 0:
ret = json.dumps({}, ensure_ascii=True, indent=4)
return ret
def role_template_save(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_roles'])
if querydict.has_key('_id'):
wr = collection.update({'_id':db_util.add_mongo_id(querydict['_id'])}, db_util.add_mongo_id(querydict), multi=False, upsert=False)
rec = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
ret = db_util.remove_mongo_id(rec)
ret = json.dumps(ret, ensure_ascii=True, indent=4)
else:
#ret = json.dumps({'result':u'role_template_save_fail_id_required'}, ensure_ascii=True, indent=4)
_id = collection.save(db_util.add_mongo_id(querydict))
if _id:
querydict['_id'] = _id
ret = json.dumps(db_util.remove_mongo_id(querydict), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'role_template_save_fail'}, ensure_ascii=True, indent=4)
return ret
def get_user(user=None):
ret = []
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_user_account'])
if user:
ret = list(collection.find({'username':user}))
else:
ret = list(collection.find({}))
#for i in cur:
#ret.append(i)
return ret
def get_funclist_by_roles(roles):
def get_func_list(node):
ret = []
if node.has_key('_id'):
if node.has_key('checked') and node['checked'] is True:
ret.append(node['_id'])
if node.has_key('children'):
for i in node['children']:
ret.extend(get_func_list(i))
return ret
ret = []
rolelist = get_all_roles(True)
for node in rolelist:
if node.has_key('_id') and node['_id'] in roles:
ret.extend(get_func_list(node))
return ret
def check_user_has_function(session, querydict):
ret = ''
if not check_valid_user(session):
return json.dumps({'result':u'username_required'}, ensure_ascii=True, indent=4)
if querydict.has_key('username') :
if querydict.has_key('functions') :
if len(querydict['functions'])>0:
userlist = get_user(querydict['username'])
if len(userlist)>0:
if userlist[0].has_key('roles') and isinstance(userlist[0]['roles'], list) and len(userlist[0]['roles'])>0:
roles = userlist[0]['roles']
funclist = get_funclist_by_roles(roles)
retlist = []
for f in querydict['functions']:
o = {}
o['_id'] = f
if ObjectId(f) in funclist:
o['enable'] = True
else:
o['enable'] = False
retlist.append(o)
ret = json.dumps(retlist, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'this_user_has_no_role'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'username_not_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'function_id_list_required'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'functions_required'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'username_required'}, ensure_ascii=True, indent=4)
return ret
def user_query(session, querydict):
if not check_valid_user(session, 'admin'):
return json.dumps({'result':u'admin_permission_required'}, ensure_ascii=True, indent=4)
ret = ''
if querydict.has_key('username') and len(querydict['username'])>0:
l = get_user(querydict['username'])
if len(l)>0:
ret = json.dumps(db_util.remove_mongo_id(l[0]), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'username_not_found'}, ensure_ascii=True, indent=4)
else:
l = get_user()
ret = json.dumps(db_util.remove_mongo_id(l), ensure_ascii=True, indent=4)
return ret
def user_add(session, querydict):
ret = ''
if querydict.has_key('username') and querydict.has_key('password') and len(querydict['username'])>0 and len(querydict['password'])>0:
try:
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_user_account'])
existone = collection.find_one({'username':querydict['username']})
if existone:
ret = json.dumps({'result':u'register_fail_username_already_exist'}, ensure_ascii=True, indent=4)
else:
_id = collection.save(db_util.add_mongo_id(querydict))
rec = collection.find_one({'_id':_id})
ret = json.dumps(db_util.remove_mongo_id(rec), ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'register_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'register_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'register_fail_username_password_required'}, ensure_ascii=True, indent=4)
return ret
def user_delete(session, querydict):
ret = ''
if querydict.has_key('username') and len(querydict['username'])>0:
try:
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_user_account'])
existone = collection.find_one({'username':querydict['username']})
if existone:
collection.remove({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'unregister_fail_not_exist' }, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'unregister_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'unregister_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'unregister_fail_username_required'}, ensure_ascii=True, indent=4)
return ret
def reset_password(session, querydict):
ret = ''
if querydict.has_key('username') and len(querydict['username'])>0 and querydict.has_key('password') and len(querydict['password'])>0:
try:
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_user_account'])
one = collection.find_one({'username':querydict['username']})
if one:
collection.update({'username':querydict['username']}, {'$set':{'password':querydict['password']}}, multi=False, upsert=False)
ret = json.dumps(db_util.remove_mongo_id(one), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'reset_password_fail_not_exist'}, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'reset_password_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'reset_password_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'reset_password_fail_username_password_required'}, ensure_ascii=True, indent=4)
return ret
def user_update(session, querydict):
ret = ''
if querydict.has_key('username') and len(querydict['username'])>0 :
try:
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_user_account'])
one = collection.find_one({'username':querydict['username']})
if one:
collection.update({'username':querydict['username']}, db_util.add_mongo_id(querydict), multi=False, upsert=False)
ret = json.dumps(db_util.remove_mongo_id(one), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_update_fail_not_exist'}, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'user_update_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_update_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'user_update_fail_username_required'}, ensure_ascii=True, indent=4)
return ret
def login(session, querydict):
ok = False
ret = ''
if querydict.has_key('username') and querydict.has_key('password') and len(querydict['username'])>0 and len(querydict['password'])>0:
try:
check, ip = session_check_user_ip(environ, querydict['username'])
if gSessionStore and not check:
ret = json.dumps({'result':u'other_ip_already_login:%s' % ip }, ensure_ascii=True, indent=4)
return ret, ok
if gSessionStore and session:
collection = get_collection(gConfig['authorize_platform']['mongodb']['collection_user_account'])
one = collection.find_one({'username':querydict['username'], 'password':querydict['password']})
if one:
ret = json.dumps(db_util.remove_mongo_id(one), ensure_ascii=True, indent=4)
ok = True
else:
ret = json.dumps({'result':u'login_fail_wrong_username_or_password' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'login_fail_session_expired' }, ensure_ascii=True, indent=4)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret = json.dumps({'result':u'login_fail:%s' % sys.exc_info()[1].message}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'login_fail' }, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'login_fail_username_password_required'}, ensure_ascii=True, indent=4)
return ret, ok
def auth_check(session, querydict, isnew):
ret = ''
if session :
if querydict.has_key('username') and len(querydict['username'])>0:
if isnew is True:
session['username'] = querydict['username']
gSessionStore.save(session)
ret = json.dumps({'result':u'auth_check_ok_session_saved'}, ensure_ascii=True, indent=4)
else:
if session.sid:
user = gSessionStore.get_data_by_username(session.sid, querydict['username'])
if user:
ret = json.dumps({'result':u'auth_check_ok_user_exist'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'auth_check_fail_session_expired'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'auth_check_fail_session_expired'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'auth_check_fail_username_require'}, ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'auth_check_fail_session_expired'}, ensure_ascii=True, indent=4)
return ret
def sub(uid, channel, websocket):
if uid and websocket and not websocket.closed:
if not gWebSocketsMap.has_key(uid + '|' + channel):
gWebSocketsMap[uid + '|' + channel] = websocket
def unsub(uid, channels):
keys = channels
while len(keys)>0:
key = keys[0]
if uid and gWebSocketsMap.has_key(uid + '|' + key):
del gWebSocketsMap[uid + '|' + key]
del keys[0]
def handle_websocket(environ):
ws = get_websocket(environ)
app = gConfig['wsgi']['application']
session_id = None
channel = ''
if environ.has_key('HTTP_COOKIE'):
arr = environ['HTTP_COOKIE'].split('=')
if len(arr)>1:
session_id = arr[1]
interval = 1.0
try:
interval = float(gConfig[app]['websocket']['interval_poll'])
except:
interval = 1.0
while ws and not ws.closed:
obj = ws_recv(environ)
if obj and isinstance(obj, dict) and obj.has_key('op'):
#print(obj)
if obj['op'] == 'session_list':
ws.send(ws_session_query())
elif obj['op'] == 'subscribe/session_list':
sub(session_id, 'session_list', ws)
elif obj['op'] == 'unsubscribe/session_list':
unsub(session_id, ['session_list',])
elif obj['op'] == 'session_remove':
if obj.has_key('id') and len(obj['id'])>0:
print('remove session from client:')
print(obj['id'])
gSessionStore.delete_by_id(obj['id'])
elif obj['op'] == 'queue_size':
qsize = 0
if gJoinableQueue:
qsize = gJoinableQueue.qsize()
ws.send(json.dumps({'queue_size':qsize}, ensure_ascii=True, indent=4))
else:
try:
ws.send('')
except:
for k in gWebSocketsMap.keys():
if gWebSocketsMap[k] is ws:
gWebSocketsMap[k].close()
del gWebSocketsMap[k]
break
gevent.sleep(interval)
if ws and ws.closed:
del ws
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
isnew = False
urls = gUrlMap.bind_to_environ(environ)
querydict, buf = get_querydict_by_GET_POST(environ)
endpoint = ''
try:
endpoint, args = urls.match()
if args.has_key('username'):
querydict['username'] = args['username']
if args.has_key('password'):
querydict['password'] = args['password']
if endpoint == 'auth_check':
body = auth_check(session, querydict, False)
elif endpoint == 'handle_websocket':
handle_websocket(environ)
elif endpoint == 'get_salt':
if len(gSecurityConfig.keys())>0:
body = json.dumps({'result':'get_salt_ok','salt':gSecurityConfig['password_salt']}, ensure_ascii=True, indent=4)
else:
body = json.dumps({'result':'get_salt_fail'}, ensure_ascii=True, indent=4)
elif endpoint == 'user_add':
body = user_add(session, querydict)
elif endpoint == 'user_check':
body = check_user_has_function(session, querydict)
elif endpoint == 'user_delete':
body = user_delete(session, querydict)
elif endpoint == 'user_query':
body = user_query(session, querydict)
elif endpoint == 'user_update':
body = user_update(session, querydict)
elif endpoint == 'reset_password':
body = reset_password(session, querydict)
elif endpoint == 'login':
body, loginok = login(session, querydict)
if loginok:
if querydict.has_key('username') and len(querydict['username'])>0:
session['username'] = querydict['username']
elif endpoint == 'logout':
if gSessionStore and session:
gSessionStore.delete(session)
session = None
body = json.dumps({'result':u'logout_ok'}, ensure_ascii=True, indent=4)
elif endpoint == 'function_add':
body = function_add(session, querydict)
elif endpoint == 'function_query':
body = function_query(session, querydict)
elif endpoint == 'function_update':
body = function_update(session, querydict)
elif endpoint == 'function_delete':
body = function_delete(session, querydict)
elif endpoint == 'role_add':
body = role_add(session, querydict)
elif endpoint == 'role_update':
body = role_update(session, querydict)
elif endpoint == 'role_query':
body = role_query(session, querydict)
elif endpoint == 'role_delete':
body = role_delete(session, querydict)
elif endpoint == 'role_template_save':
body = role_template_save(session, querydict)
elif endpoint == 'role_template_get':
body = role_template_get(session, querydict)
else:
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
except HTTPException, e:
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
if session:
gSessionStore.save(session)
return statuscode, headers, body
def CORS_header(h={}):
global gConfig
def default_header(h={}):
ret = {};
for k in h.keys():
ret[k] = h[k]
ret['Access-Control-Allow-Origin'] = '*'
ret['Access-Control-Allow-Credentials'] = 'true'
ret['Access-Control-Expose-Headers'] = 'true'
ret['Access-Control-Max-Age'] = '3600'
ret['Access-Control-Allow-Methods'] = 'POST,GET,OPTIONS'
return ret
headers = {}
for k in h.keys():
headers[k] = h[k]
if gConfig['web']['cors']['enable_cors'].lower() == 'true':
app = gConfig['wsgi']['application']
if gConfig.has_key(app) and gConfig[app].has_key('cors'):
try:
if gConfig[app]['cors'].has_key('Access-Control-Allow-Origin'):
headers['Access-Control-Allow-Origin'] = str(gConfig[app]['cors']['Access-Control-Allow-Origin'])
if gConfig[app]['cors'].has_key('Access-Control-Allow-Credentials'):
headers['Access-Control-Allow-Credentials'] = str(gConfig[app]['cors']['Access-Control-Allow-Credentials'])
if gConfig[app]['cors'].has_key('Access-Control-Expose-Headers'):
headers['Access-Control-Expose-Headers'] = str(gConfig[app]['cors']['Access-Control-Expose-Headers'])
if gConfig[app]['cors'].has_key('Access-Control-Max-Age'):
headers['Access-Control-Max-Age'] = str(gConfig[app]['cors']['Access-Control-Max-Age'])
if gConfig[app]['cors'].has_key('Access-Control-Allow-Methods'):
s = gConfig[app]['cors']['Access-Control-Allow-Methods']
if isinstance(s, list):
s = ','.join(s)
headers['Access-Control-Allow-Methods'] = str(s)
except:
headers = default_header(h)
else:
try:
if gConfig['web']['cors'].has_key('Access-Control-Allow-Origin'):
headers['Access-Control-Allow-Origin'] = str(gConfig['web']['cors']['Access-Control-Allow-Origin'])
if gConfig['web']['cors'].has_key('Access-Control-Allow-Credentials'):
headers['Access-Control-Allow-Credentials'] = str(gConfig['web']['cors']['Access-Control-Allow-Credentials'])
if gConfig['web']['cors'].has_key('Access-Control-Expose-Headers'):
headers['Access-Control-Expose-Headers'] = str(gConfig['web']['cors']['Access-Control-Expose-Headers'])
if gConfig['web']['cors'].has_key('Access-Control-Max-Age'):
headers['Access-Control-Max-Age'] = str(gConfig['web']['cors']['Access-Control-Max-Age'])
if gConfig['web']['cors'].has_key('Access-Control-Allow-Methods'):
s = gConfig['web']['cors']['Access-Control-Allow-Methods']
if isinstance(s, list):
s = ','.join(s)
headers['Access-Control-Allow-Methods'] = str(s)
except:
headers = default_header(h)
return headers
def check_is_static(aUrl):
global STATICRESOURCE_DIR
global gConfig
ret = False
surl = dec(aUrl)
if surl[0:2] == '//':
surl = surl[2:]
if surl[0] == '/':
surl = surl[1:]
p = os.path.join(STATICRESOURCE_DIR , surl)
isBin = False
ext = os.path.splitext(p)[1]
if '.' in surl:
ext = surl[surl.rindex('.'):]
else:
ext = os.path.splitext(p)[1]
if len(ext)>0 and gConfig['mime_type'].has_key(ext):
ret = True
return ret
def whitelist_check(environ, start_response):
global gConfig
ret = True
if gConfig['listen_port'].has_key('whitelist') and len(gConfig['listen_port']['whitelist'])>0:
if isinstance(gConfig['listen_port']['whitelist'], unicode):
s = str(gConfig['listen_port']['whitelist'])
rere = re.compile(s)
if environ.has_key('REMOTE_ADDR') and len(rere.findall(environ['REMOTE_ADDR']))==0:
ret = False
elif isinstance(gConfig['listen_port']['whitelist'], list):
cnt = 0
ret = False
for i in gConfig['listen_port']['whitelist']:
s = str(i)
rere = re.compile(s)
if environ.has_key('REMOTE_ADDR') and len(rere.findall(environ['REMOTE_ADDR']))>0:
cnt += 1
if cnt>0:
ret = True
return ret
def blacklist_check(environ, start_response):
global gConfig
ret = True
if gConfig['listen_port'].has_key('blacklist') and len(gConfig['listen_port']['blacklist'])>0:
if isinstance(gConfig['listen_port']['blacklist'], unicode):
s = str(gConfig['listen_port']['blacklist'])
rere = re.compile(s)
if environ.has_key('REMOTE_ADDR') and len(rere.findall(environ['REMOTE_ADDR']))>0:
ret = False
elif isinstance(gConfig['listen_port']['blacklist'], list):
cnt = 0
ret = True
for i in gConfig['listen_port']['blacklist']:
s = str(i)
rere = re.compile(s)
if environ.has_key('REMOTE_ADDR') and len(rere.findall(environ['REMOTE_ADDR']))>0:
cnt += 1
if cnt>0:
ret = False
return ret
def ip_check(environ, start_response):
ret = False
if whitelist_check(environ, start_response) and blacklist_check(environ, start_response):
ret = True
return ret
def session_check_user_ip(environ, username):
global gConfig, gSessionStore
ret = True
ip = environ['REMOTE_ADDR']
if gConfig['authorize_platform']['session']['session_check_ip'].lower() == 'true':
l = gSessionStore.get_list_by_username(username)
for i in l:
if i.has_key('ip') and i['ip'] != environ['REMOTE_ADDR']:
ret = False
break
return ret, ip
def get_websocket(environ):
ret = None
if environ.has_key("wsgi.websocket") and environ['wsgi.websocket']:
ret = environ['wsgi.websocket']
return ret
def ws_send(channel=None, string=''):
global gWebSocketsMap
for k in gWebSocketsMap.keys():
ws = None
if channel:
if '|' + channel in k:
ws = gWebSocketsMap[k]
else:
ws = gWebSocketsMap[k]
if ws and not ws.closed:
try:
ws.send(string)
except geventwebsocket.WebSocketError, e:
print('ws_send exception:%s' % str(e))
elif ws and ws.closed:
del gWebSocketsMap[k]
def ws_session_query():
ret = json.dumps(db_util.remove_mongo_id(gSessionStore.list()), ensure_ascii=True, indent=4)
return ret
def ws_recv(environ):
ret = None
ws = get_websocket(environ)
if ws and not ws.closed:
msg = None
try:
msg = ws.receive()
except geventwebsocket.WebSocketError, e:
print('ws_recv exception:%s' % str(e))
if msg:
try:
ret = json.loads(msg)
except:
ret = msg
return ret
def application_combiz_platform(environ, start_response):
global STATICRESOURCE_DIR
global gConfig, gRequest, gSessionStore
def proxy(environ):
connection_timeout, network_timeout = 5.0, 10.0
proxy_type = ''
if '/proxy_platform' in path_info:
proxy_type = 'proxy_platform'
if '/proxy_file' in path_info:
proxy_type = 'proxy_file'
if '/proxy_pay' in path_info:
proxy_type = 'proxy_pay'
try:
connection_timeout = float(gConfig['combiz_platform'][proxy_type]['www_connection_timeout'])
except:
pass
try:
network_timeout = float(gConfig['combiz_platform'][proxy_type]['www_network_timeout'])
except:
pass
return handle_http_proxy(environ, proxy_type, gConfig['combiz_platform'][proxy_type]['protocol'], gConfig['combiz_platform'][proxy_type]['host'], gConfig['combiz_platform'][proxy_type]['port'], '', connection_timeout, network_timeout)
headers = {}
headerslist = []
cookie_header = None
body = ''
statuscode = '200 OK'
if not ip_check(environ, start_response):
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'your_ip_access_deny'}, ensure_ascii=True, indent=4)
start_response(statuscode, headerslist)
return [body]
path_info = environ['PATH_INFO']
statuscode = '200 OK'
if path_info[-1:] == '/':
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
elif check_is_static(path_info):
statuscode, headers, body = handle_static(environ, path_info)
elif len(path_info)>7 and path_info[:7] == '/proxy_':
statuscode, headers, body = proxy(environ)
else:
statuscode, headers, body = handle_combiz_platform(environ)
headers = CORS_header(headers)
for k in headers:
headerslist.append((k, headers[k]))
#print(headerslist)
start_response(statuscode, headerslist)
return [body]
def application_authorize_platform(environ, start_response):
global STATICRESOURCE_DIR
global gConfig, gRequest, gSessionStore
headers = {}
headerslist = []
cookie_header = None
body = ''
statuscode = '200 OK'
if not ip_check(environ, start_response):
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'your_ip_access_deny'}, ensure_ascii=True, indent=4)
start_response(statuscode, headerslist)
return [body]
path_info = environ['PATH_INFO']
if gSessionStore is None:
gSessionStore = MongodbSessionStore(host=gConfig['authorize_platform']['mongodb']['host'],
port=int(gConfig['authorize_platform']['mongodb']['port']),
replicaset=gConfig['authorize_platform']['mongodb']['replicaset'],
db = gConfig['authorize_platform']['mongodb']['database'],
collection = gConfig['authorize_platform']['mongodb']['collection_session'],
)
is_expire = False
statuscode = '200 OK'
if path_info[-1:] == '/':
#path_info += gConfig['web']['indexpage']
#statuscode, headers, body = handle_static(environ, path_info)
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
elif check_is_static(path_info):
statuscode, headers, body = handle_static(environ, path_info)
else:
with session_manager(environ):
sess, cookie_header, is_expire = check_session(environ, gRequest, gSessionStore)
if is_expire:
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
statuscode = '200 OK'
body = json.dumps({'result':u'session_expired'}, ensure_ascii=True, indent=4)
if sess:
if not sess.has_key('ip'):
sess['ip'] = environ['REMOTE_ADDR']
gSessionStore.save_if_modified(sess)
else:
statuscode, headers, body = handle_authorize_platform(environ, sess)
headers = CORS_header(headers)
if cookie_header:
headerslist.append(cookie_header)
for k in headers:
headerslist.append((k, headers[k]))
#print(headerslist)
start_response(statuscode, headerslist)
return [body]
def application_chat_platform(environ, start_response):
global STATICRESOURCE_DIR
global gConfig, gRequest, gSessionStore
headers = {}
headerslist = []
cookie_header = None
body = ''
statuscode = '200 OK'
if not ip_check(environ, start_response):
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'your_ip_access_deny'}, ensure_ascii=True, indent=4)
start_response(statuscode, headerslist)
return [body]
path_info = environ['PATH_INFO']
#if gSessionStore is None:
#gSessionStore = MongodbSessionStore(host=gConfig['chat_platform']['mongodb']['host'],
#port=int(gConfig['chat_platform']['mongodb']['port']),
#replicaset=gConfig['chat_platform']['mongodb']['replicaset'],
#db = gConfig['chat_platform']['mongodb']['database'],
#collection = gConfig['chat_platform']['mongodb']['collection_session'],
#)
#is_expire = False
statuscode = '200 OK'
if path_info[-1:] == '/':
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
elif check_is_static(path_info):
statuscode, headers, body = handle_static(environ, path_info)
else:
#with session_manager(environ):
#sess, cookie_header, is_expire = check_session(environ, gRequest, gSessionStore)
#if is_expire:
#headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
#statuscode = '200 OK'
#body = json.dumps({'result':u'session_expired'}, ensure_ascii=True, indent=4)
#if sess:
#if not sess.has_key('ip'):
#sess['ip'] = environ['REMOTE_ADDR']
#gSessionStore.save_if_modified(sess)
#else:
statuscode, headers, body = handle_chat_platform(environ, None)
headers = CORS_header(headers)
if cookie_header:
headerslist.append(cookie_header)
for k in headers:
headerslist.append((k, headers[k]))
#print(headerslist)
start_response(statuscode, headerslist)
return [body]
def sign_and_send(thirdpay, method, href, data, need_sign=True):
ret = None
if thirdpay == 'alipay':
ret = sign_and_send_alipay(method, href, data, need_sign)
return ret
def sign_and_send_alipay(method, href, data, need_sign=True):
global gConfig
qs = build_query_string(data)
if need_sign:
signed = get_sign_alipay(qs)
qs += '&sign=%s' % signed
qs += '&sign_type=%s' % gConfig['pay_platform']['alipay']['sign_type']
text = qs
text = enc_by_code(gConfig['pay_platform']['alipay']['input_charset'], text)
connection_timeout, network_timeout = float(gConfig['pay_platform']['alipay']['connection_timeout']), float(gConfig['pay_platform']['alipay']['network_timeout'])
client = HTTPClient.from_url(href, concurrency=1, connection_timeout=connection_timeout, network_timeout=network_timeout, )
g = None
if method == 'get':
if not href[-1:] == '?':
href += '?'
href += urllib.quote(text)
g = gevent.spawn(client.get, href)
if method == 'post':
postdata = urllib.quote(text)
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded; text/html; charset=%s' % str(gConfig['pay_platform']['alipay']['input_charset'])
g = gevent.spawn(client.post, href, body=postdata, headers=headers)
return g
def fake_gateway_alipay_return(querydict):
global gConfig
sign_data = {}
if querydict['service'] == 'refund_fastpay_by_platform_pwd':
sign_data['is_success'] = 'T'
#sign_data['refund_result'] = 'TRADE_PENDING'
elif querydict['service'] == 'create_direct_pay_by_user':
if querydict.has_key('out_trade_no'):
sign_data['is_success'] = 'T'
sign_data['notify_id'] = str(ObjectId())
sign_data['notify_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
sign_data['notify_type'] = 'trade_status_sync'
sign_data['out_trade_no'] = querydict['out_trade_no']
sign_data['partner'] = gConfig['fake_gateway_alipay']['alipay']['partner_id']
if querydict.has_key('seller_email'):
sign_data['seller_email'] = querydict['seller_email']
if querydict.has_key('subject'):
sign_data['subject'] = querydict['subject']
if querydict.has_key('buyer_email'):
sign_data['buyer_email'] = querydict['buyer_email']
if querydict.has_key('total_fee'):
sign_data['total_fee'] = querydict['total_fee']
#sign_data['trade_no'] = ''
sign_data['trade_status'] = 'TRADE_PENDING'
href = str(gConfig['pay_platform']['alipay']['return_url'])
if querydict.has_key('return_url'):
href = querydict['return_url']
sign_and_send_alipay('get', href, sign_data)
else:
print('fake_gateway_alipay_return out_trade_no required')
def fake_gateway_alipay_notify(querydict):
global gConfig
def get_pay_log_rec_by_trade_no(trade_no):
ret = None
db_util.mongo_init_client('pay_platform')
client = db_util.gClientMongo['pay_platform']
db = client['pay']
if 'pay_log' in db.collection_names(False):
collection = db['pay_log']
ret = collection.find_one({"trade_no":trade_no})
return ret
data = {}
if querydict['service'] == 'refund_fastpay_by_platform_pwd':
data['notify_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
data['notify_type'] = 'batch_refund_notify'
data['notify_id'] = str(ObjectId())
data['batch_no'] = querydict['batch_no']
data['success_num'] = '1'
detail_data = querydict['detail_data']
arr = detail_data.split('^')
trade_no = arr[0]
refund_fee = float(arr[1])
result_details = '%s^%s^%s' % (arr[0], arr[1], 'SUCCESS')
data['result_details'] = result_details
href = str(gConfig['pay_platform']['alipay']['notify_url'])
sign_and_send_alipay('post', href, data)
rec = get_pay_log_rec_by_trade_no(trade_no)
if rec:
data = {}
data['notify_type'] = 'trade_status_sync'
data['out_trade_no'] = rec['out_trade_no']
data['refund_status'] = 'REFUND_SUCCESS'
if refund_fee < rec['total_fee']:
data['trade_status'] = 'TRADE_SUCCESS'
else:
data['trade_status'] = 'TRADE_CLOSED'
sign_and_send_alipay('post', href, data)
elif querydict['service'] == 'create_direct_pay_by_user':
if querydict.has_key('out_trade_no'):
data['out_trade_no'] = querydict['out_trade_no']
data['notify_id'] = str(ObjectId())
data['notify_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
data['notify_type'] = 'trade_status_sync'
data['partner'] = gConfig['fake_gateway_alipay']['alipay']['partner_id']
if querydict.has_key('buyer_email'):
data['buyer_email' ] = querydict['buyer_email']
if querydict.has_key('seller_email'):
data['seller_email'] = querydict['seller_email']
if querydict.has_key('subject'):
data['subject'] = querydict['subject']
if querydict.has_key('total_fee'):
data['total_fee'] = querydict['total_fee']
if querydict.has_key('paymethod') and querydict['paymethod'] == 'bankPay':
data['bank_seq_no'] = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
data['trade_no'] = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + str(ObjectId())
data['trade_status'] = 'TRADE_SUCCESS'
href = str(gConfig['pay_platform']['alipay']['notify_url'])
sign_and_send_alipay('post', href, data)
else:
print('fake_gateway_alipay_notify out_trade_no required')
def fake_gateway_alipay_error_notify(querydict, error_code):
global gConfig
data = {}
if querydict['service'] == 'refund_fastpay_by_platform_pwd':
data['notify_time'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
data['notify_type'] = 'batch_refund_notify'
data['notify_id'] = str(ObjectId())
data['batch_no'] = querydict['batch_no']
data['success_num'] = '0'
detail_data = querydict['detail_data']
arr = detail_data.split('^')
result_details = '%s^%s^%s' % (arr[0], arr[1], error_code)
data['result_details'] = result_details
href = str(gConfig['pay_platform']['alipay']['notify_url'])
if querydict.has_key('notify_url'):
href = str(querydict['notify_url'])
sign_and_send_alipay('post', href, data)
elif querydict['service'] == 'create_direct_pay_by_user':
data['partner'] = gConfig['fake_gateway_alipay']['alipay']['partner_id']
if querydict.has_key('out_trade_no'):
data['out_trade_no'] = querydict['out_trade_no']
data['error_code'] = error_code
if querydict.has_key('buyer_email'):
data['buyer_email'] = querydict['buyer_email']
if querydict.has_key('seller_email'):
data['seller_email'] = querydict['seller_email']
href = str(gConfig['pay_platform']['alipay']['error_notify_url'])
sign_and_send_alipay('post', href, data, need_sign=False)
else:
print('fake_gateway_alipay_error_notify out_trade_no required')
def dec_by_code(code, string):
encode, decode, reader, writer = codecs.lookup(str(code))
text = string
text, length = decode(text, 'replace')
return text
def enc_by_code(code, string):
encode, decode, reader, writer = codecs.lookup(str(code))
text = string
text, length = encode(text, 'replace')
return text
def handle_fake_gateway_alipay(environ, error_code_pay=None, error_code_refund=None):
global ENCODING
global gConfig
headers = {}
headers['Content-Type'] = 'text/json;charset=' + ENCODING
statuscode = '200 OK'
body = ''
d = {}
querydict = {}
querystring = ''
querystring1 = ''
if environ.has_key('QUERY_STRING'):
querystring = environ['QUERY_STRING']
querydict = urlparse.parse_qs(querystring)
for key in querydict.keys():
d[key] = querydict[key][0]
querydict = d
if not environ.has_key('QUERY_STRING') or len(environ['QUERY_STRING'])==0:
buf = environ['wsgi.input'].read()
querystring = urllib.unquote_plus(buf)
querystring = dec_by_code(gConfig['pay_platform']['alipay']['input_charset'], querystring)
querydict = urlparse.parse_qs(querystring)
d = {}
for key in querydict.keys():
d[key] = querydict[key][0]
querydict = d
try:
querystring1 = querystring[:querystring.index('&sign=')]
except:
pass
try:
querystring1 = querystring1[:querystring1.index('&sign_type=')]
except:
pass
signed1 = None
if querydict['service'] == 'create_direct_pay_by_user':
fake_gateway_alipay_return(querydict)
if querydict['service'] == 'refund_fastpay_by_platform_pwd':
headers['Content-Type'] = 'text/xml;charset=' + ENCODING
body = '<?xml version="1.0" encoding="UTF-8"?><IS_SUCCESS>T</IS_SUCCESS>'
gevent.sleep(float(gConfig['fake_gateway_alipay']['alipay']['process_second']))
#print(querydict)
if querydict.has_key('sign') and querydict.has_key('sign_type') and querydict.has_key('_input_charset'):
ok = check_sign_alipay(querydict['_input_charset'], querydict['sign'], querydict['sign_type'], querystring1)
if ok:
error_code = error_code_pay
if error_code is None:
error_code = error_code_refund
if error_code:
fake_gateway_alipay_error_notify(querydict, error_code)
else:
fake_gateway_alipay_notify(querydict)
else:
print('signature check error')
fake_gateway_alipay_error_notify(querydict, 'ILLEGAL_SIGN')
else:
print('need sign or sign_type or _input_charset')
return statuscode, headers, body
def application_fake_gateway_alipay(environ, start_response):
global STATICRESOURCE_DIR
global gConfig, gSecurityConfig
headers = {}
headerslist = []
body = ''
statuscode = '200 OK'
path_info = environ['PATH_INFO']
statuscode = '200 OK'
if path_info[-1:] == '/':
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
elif check_is_static(path_info):
statuscode, headers, body = handle_static(environ, path_info)
elif path_info == '/gateway.do':
error_code_pay = gConfig['fake_gateway_alipay']['alipay']['error_code_pay']
error_code_refund = gConfig['fake_gateway_alipay']['alipay']['error_code_refund']
if len(error_code_pay) == 0:
error_code_pay = None
if error_code_pay and not gSecurityConfig['alipay']['error_code'].has_key(error_code_pay):
error_code_pay = None
if len(error_code_refund) == 0:
error_code_refund = None
if error_code_refund and not gSecurityConfig['alipay']['error_code'].has_key(error_code_refund):
error_code_refund = None
statuscode, headers, body = handle_fake_gateway_alipay(environ, error_code_pay, error_code_refund)
headers = CORS_header(headers)
for k in headers:
headerslist.append((k, headers[k]))
start_response(statuscode, headerslist)
return [body]
def application_pay_platform(environ, start_response):
global STATICRESOURCE_DIR
global gConfig, gWebSocketsMap, gJoinableQueue
def check_is_static(aUrl):
ret = False
surl = dec(aUrl)
if surl[0:2] == '//':
surl = surl[2:]
if surl[0] == '/':
surl = surl[1:]
p = os.path.join(STATICRESOURCE_DIR , surl)
isBin = False
ext = os.path.splitext(p)[1]
if '.' in surl:
ext = surl[surl.rindex('.'):]
else:
ext = os.path.splitext(p)[1]
if len(ext)>0 and gConfig['mime_type'].has_key(ext):
ret = True
return ret
def handle_websocket(environ):
ws = get_websocket(environ)
app = gConfig['wsgi']['application']
interval = 1.0
try:
interval = float(gConfig[app]['websocket']['interval_poll'])
except:
interval = 1.0
while ws and not ws.closed:
obj = ws_recv(environ)
if obj and isinstance(obj, dict) and obj.has_key('op'):
if obj['op'] == 'queue_size':
qsize = 0
if gJoinableQueue:
qsize = gJoinableQueue.qsize()
ws.send(json.dumps({'queue_size':qsize}, ensure_ascii=True, indent=4))
else:
try:
ws.send('')
except:
for k in gWebSocketsMap.keys():
if gWebSocketsMap[k] is ws:
gWebSocketsMap[k].close()
del gWebSocketsMap[k]
break
gevent.sleep(interval)
if ws and ws.closed:
del ws
headers = {}
headerslist = []
body = ''
statuscode = '200 OK'
if not ip_check(environ, start_response):
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'your_ip_access_deny'}, ensure_ascii=True, indent=4)
start_response(statuscode, headerslist)
return [body]
path_info = environ['PATH_INFO']
headerslist = []
statuscode = '200 OK'
#print('path_info=%s' % path_info)
if path_info[-1:] == '/':
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
body = json.dumps({'result':u'access_deny'}, ensure_ascii=True, indent=4)
elif check_is_static(path_info):
statuscode, headers, body = handle_static(environ, path_info)
elif path_info == '/pay':
statuscode, headers, body = handle_pay(environ)
elif path_info == '/refund':
statuscode, headers, body = handle_refund(environ)
elif path_info == '/query':
statuscode, headers, body = handle_pay_getinfo(environ)
elif path_info == '/alipay_return_url':
headerslist.append(('Content-Type', 'text/plain;charset=' + ENCODING))
handle_alipay_return_url(environ)
elif path_info == '/alipay_notify_url':
headerslist.append(('Content-Type', 'text/plain;charset=' + ENCODING))
handle_alipay_notify_url(environ)
body = 'success'
elif path_info == '/alipay_error_notify_url':
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
handle_alipay_error_notify_url(environ)
headers = CORS_header(headers)
for k in headers:
headerslist.append((k, headers[k]))
#print(headerslist)
start_response(statuscode, headerslist)
if path_info == '/websocket':
handle_websocket(environ)
return [body]
def handle_http_proxy(environ, proxy_placeholder='proxy', real_protocol='http', real_host='localhost', real_port='80', token='', connection_timeout=5.0, network_timeout=10.0, request_headers={}):
global ENCODING, gHttpClient, gRequest, gProxyRequest
path_info = environ['PATH_INFO']
if environ.has_key('QUERY_STRING') and len(environ['QUERY_STRING'])>0:
path_info += '?' + environ['QUERY_STRING']
request = None
if gProxyRequest is None:
request = Request(environ)
else:
request = gProxyRequest
method = request.method.lower()
data = request.get_data()
headers = {}
for i in request.headers:
headers[i[0]] = enc(i[1])
for k in request_headers.keys():
headers[k] = request_headers[k]
headers['Host'] = real_host
#for k in headers.keys():
#print('%s=%s' % (k, headers[k]))
href = '%s://%s:%s%s' % (real_protocol, real_host, real_port, path_info.replace('/%s/' % proxy_placeholder, '/'))
if '?' in href:
href += '&'
else:
href += '?';
href += 'token=%s&random=%d' % ( token, random.randint(0,100000) )
print('proxy to %s' % href)
header = {'Content-Type': 'application/json;charset=' + ENCODING, 'Cache-Control': 'no-cache'}
ret = ''
url = URL(href)
if not gHttpClient.has_key('http_proxy'):
gHttpClient['http_proxy'] = HTTPClient(url.host, port=url.port, connection_timeout=connection_timeout, network_timeout=network_timeout, concurrency=200)
client = gHttpClient['http_proxy']
response = None
try:
if method == 'get':
response = client.get(url.request_uri, headers)
elif method == 'put':
response = client.put(url.request_uri, data, headers)
elif method == 'delete':
response = client.delete(url.request_uri, data, headers)
elif method == 'post':
response = client.post(url.request_uri, data, headers)
except Exception,e:
idx = 0
e1 = e
while (e1.errno == 10053 or e1.errno == 10054) and idx < 4:
idx += 1
print('encounter 10053 error, trying %d reconnecting...' % idx)
try:
if method == 'get':
response = client.get(url.request_uri, headers)
elif method == 'put':
response = client.put(url.request_uri, data, headers)
elif method == 'delete':
response = client.delete(url.request_uri, data, headers)
elif method == 'post':
response = client.post(url.request_uri, data, headers)
break
except Exception,e2:
e1 = e2
if idx >= 4:
raise e1
if response:
if hasattr(response, 'status_code'):
if response.status_code == 200 or response.status_code == 304:
ret = response.read()
# print(ret)
header = {}
for k in response._headers_index.keys():
if not k in ['transfer-encoding', ]:
v = response._headers_index[k]
if '-' in k:
k = '-'.join([i.capitalize() for i in k.split('-')])
else:
k = k.capitalize()
header[k] = v
else:
msg = 'handle_http_proxy response error:%d' % response.status_code
ret = json.dumps({'result':msg}, ensure_ascii=True, indent=4)
#raise Exception(msg)
else:
raise Exception('handle_http_proxy error: response has no status_code')
else:
raise Exception('handle_http_proxy error')
return '200 OK', header, ret
def application_webgis(environ, start_response):
global ENCODING
global gConfig, gRequest, gSessionStore, gWebSocketsMap
def handle_websocket(environ):
key = str(gevent.getcurrent().__hash__())
ws = get_websocket(environ)
if not gWebSocketsMap.has_key(key):
gWebSocketsMap[key] = ws
app = gConfig['wsgi']['application']
interval = 1.0
try:
interval = float(gConfig[app]['websocket']['interval_poll'])
except:
interval = 1.0
while ws and not ws.closed:
obj = ws_recv(environ)
if obj and isinstance(obj, dict) and obj.has_key('op'):
if obj['op'] == 'queue_size':
qsize = 0
if gJoinableQueue:
qsize = gJoinableQueue.qsize()
ws.send(json.dumps({'queue_size':qsize}, ensure_ascii=True, indent=4))
if obj['op'] == 'turn_on_sound':
ws.send('')
else:
try:
ws.send('')
except:
for k in gWebSocketsMap.keys():
if gWebSocketsMap[k] is ws:
gWebSocketsMap[k].close()
del gWebSocketsMap[k]
break
gevent.sleep(interval)
if ws and ws.closed:
del ws
return '200 OK', {}, ''
def proxy(environ, request_headers={}):
global gConfig
connection_timeout, network_timeout = 5.0, 10.0
try:
connection_timeout = float(gConfig['webgis']['anti_bird']['www_connection_timeout'])
except:
pass
try:
network_timeout = float(gConfig['webgis']['anti_bird']['www_network_timeout'])
except:
pass
token = md5.new('bird%s' % time.strftime('%Y%m%d')).hexdigest()
path_info = environ['PATH_INFO']
if '/hasBird' in path_info:
request_headers['Content-Type'] = 'application/json'
return handle_http_proxy(environ, 'proxy', 'http', gConfig['webgis']['anti_bird']['tcp_host'], gConfig['webgis']['anti_bird']['http_port'], token, connection_timeout, network_timeout, request_headers)
# def get_anti_bird_list_from_cache():
# ret = '{"result":"get_anti_bird_list_from_cache_error:cannot connect to db"}'
# arr = []
# if gConfig['webgis'].has_key('anti_bird') and gConfig['webgis']['anti_bird'].has_key('mongodb'):
# db_util.mongo_init_client('anti_bird')
# db = db_util.gClientMongo['anti_bird'][gConfig['webgis']['anti_bird']['mongodb']['database']]
# collection = db[gConfig['webgis']['anti_bird']['mongodb']['detector_collection']]
# arr = db_util.remove_mongo_id(list(collection.find({})))
# ret = json.dumps(arr, ensure_ascii=True, indent=4)
# return ret
#
# def get_latest_records_from_cache():
# ret = '{"result":"get_latest_records_from_cache_error:cannot connect to db"}'
# arr = []
# if gConfig['webgis'].has_key('anti_bird') and gConfig['webgis']['anti_bird'].has_key('mongodb'):
# db_util.mongo_init_client('anti_bird')
# db = db_util.gClientMongo['anti_bird'][gConfig['webgis']['anti_bird']['mongodb']['database']]
# collection = db[gConfig['webgis']['anti_bird']['mongodb']['detector_collection']]
# arr = db_util.remove_mongo_id(list(collection.find({})))
# ret = json.dumps(arr, ensure_ascii=True, indent=4)
# return ret
def set_cookie(key, value):
secure = False
if gConfig['listen_port']['enable_ssl'].lower() == 'true':
secure = True
max_age = 60
try:
session_age = int(gConfig['webgis']['session']['session_age'])
except:
pass
# cookie = ('Set-Cookie', dump_cookie(key, value, domain=str(gConfig['webgis']['session']['session_domain']), max_age=session_age, secure=secure))
cookie = ('Set-Cookie', dump_cookie(key, value, max_age=session_age, secure=secure))
return cookie
def get_cookie_data(request, key=None):
string = '{}'
if request:
string = request.cookies.get('session_data')
ret = None
if string and len(string)>0:
try:
ret = json.loads(string)
if key and ret.has_key(key):
ret = ret[key]
else:
ret = None
except:
pass
return ret
def set_cookie_data(request, data):
string = '{}'
ret = None
if request:
string = request.cookies.get('session_data')
if string and len(string)>0:
try:
obj = json.loads(string)
if isinstance(obj, dict) and isinstance(data, dict):
for key in data.keys():
obj[key] = data[key]
string = json.dumps(obj)
ret = set_cookie('session_data', string)
except:
pass
return ret
def session_handle(environ, request, session_store):
sid = get_cookie_data(request, 'session_id')
sess = None
cookie = None
is_expire = False
if sid is None or len(sid)==0:
request.session = session_store.new()
# session_store.save(request.session)
sess = request.session
cookie = set_cookie_data(None, {'session_id': request.session.sid})
is_expire = True
else:
request.session = session_store.get(sid)
if request.session:
o = {'session_id': request.session.sid}
for k in request.session.keys():
if not k in [u'password',]:
o[k] = request.session[k]
cookie = set_cookie_data(request, o)
session_store.save_if_modified(request.session)
else:
cookie = set_cookie('session_data', '{}')
is_expire = True
# if request.session.should_save:
# session_store.save(request.session)
sess = request.session
return sess, cookie, is_expire
def handle_login(environ):
ret = None
querydict, buf = get_querydict_by_GET_POST(environ)
if querydict.has_key('db') and querydict.has_key('collection') and querydict.has_key('username') and querydict.has_key('password'):
ret = db_util.mongo_find_one(querydict['db'],
querydict['collection'],
{'username':querydict['username'],
'password':querydict['password']})
return ret
def handle_state_examination(environ):
def get_collection(collection):
ret = None
db_util.mongo_init_client('webgis')
db = db_util.gClientMongo['webgis'][gConfig['webgis']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
def state_examination_save(querydict):
def modifier(adict = {}):
for k in adict.keys():
if not k in ['_id', 'check_year']:
adict[k] = adict[k].strip()
if k == 'line_name':
adict[k] = adict[k].replace('-', '')\
.replace('500kV', '').replace('220kV', '').replace('110kV', '').replace('35kV', '').replace('10kV', '')\
.replace(u'Ⅱ', 'II').replace(u'Ⅰ', 'I')
if adict[k][-1] == u'回':
adict[k] = adict[k].replace( u'回', u'回线')
if not adict[k][-1] == u'线':
adict[k] = adict[k] + u'线'
if k == 'line_state' or 'unit_' in k:
adict[k] = adict[k].replace(u'正常', 'I').replace(u'注意', 'II').replace(u'异常', 'III').replace(u'严重', 'IV')
return adict
ret = []
collection = get_collection('state_examination')
if isinstance(querydict, dict) and querydict.has_key('line_name') and querydict.has_key('check_year'):
querydict['line_name'] = querydict['line_name'].strip()
existone = collection.find_one({'line_name':querydict['line_name'].strip(), 'check_year':querydict['check_year']})
if existone:
querydict['_id'] = str(existone['_id'])
querydict = modifier(querydict)
_id = collection.save(db_util.add_mongo_id(querydict))
ret = collection.find_one({'_id':_id})
if ret:
ret = db_util.remove_mongo_id(ret)
if isinstance(querydict, list):
for i in querydict:
i = modifier(i)
existone = collection.find_one({'line_name':i['line_name'], 'check_year':i['check_year']})
if existone:
i['_id'] = str(existone['_id'])
collection.save(db_util.add_mongo_id(i))
return json.dumps(ret, ensure_ascii=True, indent=4)
def state_examination_query_line_names(querydict):
ret = []
collection = get_collection('state_examination')
pipeline = [
# {'$unwind':'$line_name'},
{"$group": {"_id": "$line_name", "count": {"$sum": 1}}},
]
ret = list(collection.aggregate(pipeline))
ret = map(lambda x:x['_id'], ret)
return json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
def state_examination_query(querydict):
ret = []
collection = get_collection('state_examination')
if isinstance(querydict, dict):
# print(querydict)
ret = list(collection.find(db_util.add_mongo_id(querydict)))
return json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
def state_examination_delete(querydict):
ret = []
collection = get_collection('state_examination')
if isinstance(querydict, dict):
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.remove({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'record_not_exist' }, ensure_ascii=True, indent=4)
if isinstance(querydict['_id'], list):
ids = db_util.add_mongo_id(querydict['_id'])
cond = {'_id':{'$in':ids}}
collection.remove(cond)
ret = json.dumps(db_util.remove_mongo_id(querydict['_id']), ensure_ascii=True, indent=4)
return json.dumps(ret, ensure_ascii=True, indent=4)
statuscode, headers, body = '200 OK', {}, ''
urls = gUrlMap.bind_to_environ(environ)
querydict, buf = get_querydict_by_GET_POST(environ)
endpoint, args = urls.match()
if args.has_key('_id') and isinstance(querydict, dict):
querydict['_id'] = args['_id']
if endpoint == 'state_examination_save':
body = state_examination_save(querydict)
elif endpoint == 'state_examination_query':
body = state_examination_query(querydict)
elif endpoint == 'state_examination_delete':
body = state_examination_delete(querydict)
elif endpoint == 'state_examination_query_line_names':
body = state_examination_query_line_names(querydict)
return statuscode, headers, body
def handle_antibird(environ):
global gConfig, gUrlMap, ENCODING
def init_list(environ):
ret = []
s = '{"result":"unknown how to get anti bird list"}'
# if gConfig['webgis'].has_key('anti_bird') and gConfig['webgis']['anti_bird'].has_key('fetch_from_www') and gConfig['webgis']['anti_bird']['fetch_from_www'].lower() == 'true':
if True:
environ['PATH_INFO'] = '/proxy/api/detector'
environ['QUERY_STRING'] = ''
code, header, s = proxy(environ)
# if gConfig['webgis'].has_key('anti_bird') and gConfig['webgis']['anti_bird'].has_key('fetch_from_www') and gConfig['webgis']['anti_bird']['fetch_from_www'].lower() == 'false':
# if False:
# s = get_anti_bird_list_from_cache()
try:
if len(s)>0:
obj = json.loads(s)
if isinstance(obj, dict) :
if obj.has_key('result'):
print('antibird/init_list error:%s' % obj['result'])
else:
if obj.has_key('_id'):
if obj.has_key('imei'):
obj['label'] = obj['imei']
obj['value'] = obj['imei']
ret = [obj, ]
else:
print('antibird/init_list error: unknown error')
ret = []
elif isinstance(obj, list) :
for i in obj:
idx = obj.index(i)
if i.has_key('imei'):
i['label'] = i['imei']
i['value'] = i['imei']
obj[idx] = i
ret = obj
except Exception,e:
raise
return ret
def get_latest_records(environ, querydict):
ret = []
objstr = ''
if querydict.has_key('imei') and len(querydict['imei'])>0:
records_num = 1
if querydict.has_key('records_num') and len(querydict['records_num'])>0:
records_num = int(querydict['records_num'])
href = '/proxy/api/detector/%s/log/%d' % (querydict['imei'], records_num)
environ['PATH_INFO'] = href
environ['QUERY_STRING'] = ''
status, header, objstr = proxy(environ)
if len(objstr)>0:
try:
obj = json.loads(objstr)
if isinstance(obj, dict) :
if obj.has_key('result'):
print('antibird/get_latest_records error:%s' % obj['result'])
else:
if obj.has_key('_id'):
ret = [obj, ]
else:
print('antibird/get_latest_records error: unknown error')
ret = []
elif isinstance(obj, list) :
ret = obj
except:
e = sys.exc_info()[1]
if hasattr(e, 'message'):
print('antibird/get_latest_records error:%s' % e.message)
else:
print('antibird/get_latest_records error:%s' % str(e))
for item in ret:
idx = ret.index(item)
if item.has_key('picture') and isinstance(item['picture'], list):
for i in item['picture']:
idx1 = item['picture'].index(i)
item['picture'][idx1] = '/proxy/api/image/%s' % i
ret[idx] = item
return ret
def get_latest_records_by_imei(environ, querydict):
ret = get_latest_records(environ, querydict)
return json.dumps(ret, ensure_ascii=True, indent=4)
def get_equip_list(environ, querydict):
ret = ''
is_filter_used=False
if querydict.has_key('is_filter_used') and querydict['is_filter_used'] is True:
is_filter_used = True
equip_list = init_list(environ)
if not is_filter_used:
ret = json.dumps(equip_list, ensure_ascii=True, indent=4)
else:
exist = []
l = db_util.mongo_find(
gConfig['webgis']['mongodb']['database'],
'features',
{
"properties.webgis_type":"point_tower",
"properties.metals":{
"$elemMatch":{
"type":u"多功能驱鸟装置"
}
}
},
0,
'webgis'
)
for i in l:
for j in i['properties']['metals']:
if isinstance(j, dict) and j.has_key('imei'):
if not j['imei'] in exist:
exist.append(j['imei'])
while len(exist)>0:
i0 = exist[0]
for i in equip_list:
if i['imei'] == i0:
equip_list.remove(i)
exist.remove(i0)
break
ret = json.dumps(equip_list, ensure_ascii=True, indent=4)
return ret
def equip_tower_mapping(querydict):
ret = {}
if querydict.has_key('imei'):
l = db_util.mongo_find(
gConfig['webgis']['mongodb']['database'],
'features',
{
"properties.webgis_type":"point_tower",
"properties.metals":{
"$elemMatch":{
"type":u"多功能驱鸟装置",
"imei":querydict['imei']
}
}
},
0,
'webgis'
)
if len(l)>0:
obj = {}
obj['tower_id'] = l[0]['_id']
obj['name'] = l[0]['properties']['name']
obj['lng'] = l[0]['geometry']['coordinates'][0]
obj['lat'] = l[0]['geometry']['coordinates'][1]
obj['alt'] = l[0]['geometry']['coordinates'][2]
ret[querydict['imei']] = obj
else:
l = db_util.mongo_find(
gConfig['webgis']['mongodb']['database'],
'features',
{
"properties.webgis_type":"point_tower",
"properties.metals":{
"$elemMatch":{
"type":u"多功能驱鸟装置",
}
}
},
0,
'webgis'
)
for i in l:
for j in i['properties']['metals']:
if j.has_key('type') and j['type'] == u'多功能驱鸟装置' and j.has_key('imei') and len(j['imei'])>0:
obj = {}
obj['tower_id'] = i['_id']
obj['name'] = i['properties']['name']
obj['lng'] = i['geometry']['coordinates'][0]
obj['lat'] = i['geometry']['coordinates'][1]
obj['alt'] = i['geometry']['coordinates'][2]
ret[j['imei']] = obj
ret = json.dumps(ret, ensure_ascii=True, indent=4)
return ret
statuscode, headers, body = '200 OK', {}, ''
urls = gUrlMap.bind_to_environ(environ)
querydict, buf = get_querydict_by_GET_POST(environ)
endpoint, args = urls.match()
if args.has_key('_id') and isinstance(querydict, dict):
querydict['_id'] = args['_id']
if args.has_key('imei') and isinstance(querydict, dict):
querydict['imei'] = args['imei']
if args.has_key('records_num') and isinstance(querydict, dict):
querydict['records_num'] = args['records_num']
if endpoint == 'get_equip_list':
body = get_equip_list(environ, querydict)
elif endpoint == 'get_latest_records_by_imei':
body = get_latest_records_by_imei(environ, querydict)
elif endpoint == 'equip_tower_mapping':
body = equip_tower_mapping(querydict)
return statuscode, headers, body
def handle_bayesian(environ):
def get_collection(collection):
ret = None
db_util.mongo_init_client('webgis')
db = db_util.gClientMongo['webgis'][gConfig['webgis']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
# def convert_strkey_to_bool(obj):
# if isinstance(obj, list):
# for i in range(0, len(obj)):
# obj[i] = convert_strkey_to_bool(obj[i])
# if isinstance(obj, dict):
# for k in obj.keys():
# if k in ['true', u'true']:
# obj[True] = obj[k]
# del obj['true']
# del obj[u'true']
# elif k in ['false', u'false']:
# obj[False] = obj[k]
# del obj['false']
# del obj[u'false']
# obj[k] = convert_strkey_to_bool(obj[k])
#
# return obj
def save_by_id(querydict, collection_name):
ret = []
collection = get_collection(collection_name)
if isinstance(querydict, list):
ids = []
for i in querydict:
if i['_id'] is None:
del i['_id']
id = collection.save(db_util.add_mongo_id(i))
if id:
ids.append(id)
ret = list(collection.find({'_id':{'$in':ids}}))
elif isinstance(querydict, dict):
id = collection.save(db_util.add_mongo_id(querydict))
ret = collection.find_one({'_id':id})
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
return ret
def delete_by_id(querydict, collection_name):
ret = ''
collection = get_collection(collection_name)
if querydict.has_key('_id'):
if isinstance(querydict['_id'], str) or isinstance(querydict['_id'], unicode):
existone = collection.find_one({'_id':db_util.add_mongo_id(querydict['_id'])})
if existone:
collection.remove({'_id':existone['_id']})
ret = json.dumps(db_util.remove_mongo_id(existone), ensure_ascii=True, indent=4)
else:
ret = json.dumps({'result':u'record_not_exist' }, ensure_ascii=True, indent=4)
if isinstance(querydict['_id'], list):
ids = db_util.add_mongo_id(querydict['_id'])
cond = {'_id':{'$in':ids}}
collection.remove(cond)
ret = json.dumps(db_util.remove_mongo_id(querydict['_id']), ensure_ascii=True, indent=4)
return ret
def bayesian_query_domains_range(querydict):
ret = []
collection = get_collection('bayesian_domains_range')
ret = list(collection.find({}))
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
return ret
def bayesian_save_domains_range(querydict):
return save_by_id(querydict, 'bayesian_domains_range')
def bayesian_delete_domains_range(querydict):
return delete_by_id(querydict, 'bayesian_domains_range')
def bayesian_query_node(querydict):
ret = []
if querydict.has_key('line_name') and len(querydict['line_name']):
collection = get_collection('bayesian_nodes')
ret = list(collection.find({'line_name':querydict['line_name']}))
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
return ret
def bayesian_query_graphiz(querydict):
ret = ''
if querydict.has_key('line_name') and len(querydict['line_name']):
g = create_bbn_by_line_name(querydict['line_name'])
dpi = 100
rankdir = 'LL'
if querydict.has_key('dpi') and len(querydict['dpi']):
dpi = int(querydict['dpi'])
if querydict.has_key('rankdir') and len(querydict['rankdir']):
rankdir = querydict['rankdir']
ret = g.get_graphviz_source(dpi, rankdir)
return enc(ret)
def bayesian_save_node(querydict):
return save_by_id(querydict, 'bayesian_nodes')
def bayesian_delete_node(querydict):
ret = '[]'
delete_by_id(querydict, 'bayesian_nodes')
collection = get_collection('bayesian_nodes')
if querydict.has_key('names'):
if isinstance(querydict['names'], list):
# names = [str(i) for i in querydict['names']]
names = querydict['names']
l = list(collection.find({'conditions': {'$elemMatch': {'$elemMatch': {'$elemMatch': {'$elemMatch':{'$in': names}}}}}}))
for i in l:
existlist = []
conditions = []
for ii in i['conditions']:
idx = i['conditions'].index(ii)
tmp = []
for iii in ii[0]:
# idx1 = ii[0].index(iii)
if not iii[0] in names:
tmp.append(iii)
ii[0] = tmp
i['conditions'][idx] = ii
for ii in i['conditions']:
key = ''
for iii in ii[0]:
key += iii[0] + ':' + iii[1] + '|'
if not key in existlist:
existlist.append(key)
conditions.append(ii)
i['conditions'] = conditions
collection.save(i)
if querydict.has_key('line_name') and len(querydict['line_name'])>0:
ret = bayesian_query_node(querydict)
return ret
def bayesian_query_predict(querydict):
ret = []
if querydict.has_key('line_name') and len(querydict['line_name']):
g = create_bbn_by_line_name(querydict['line_name'])
del querydict['line_name']
qd = {}
querymulti = False
for k in querydict.keys():
if isinstance(querydict[k], unicode):
qd[str(k)] = str(querydict[k])
elif isinstance(querydict[k], list) and k == u'line_state':
querymulti = True
else:
qd[str(k)] = querydict[k]
if querymulti:
for i in querydict['line_state']:
qd['line_state'] = str(i)
ret.append({'line_state':i, 'result':bayes_util.query_bbn_condition(g, **qd)})
else:
ret = bayes_util.query_bbn_condition(g, **qd)
ret = json.dumps(ret, ensure_ascii=True, indent=4)
return ret
def reset_unit_by_line_name(line_name):
collection = get_collection('bayesian_nodes')
units = list(collection.find({'line_name':line_name, 'name':{'$regex':'^unit_[0-9]$'}}))
data = bayes_util.get_state_examination_data_by_line_name(line_name)
o = bayes_util.calc_probability_unit(data)
for unit in units:
if o.has_key(unit['name']):
unit['conditions'] = o[unit['name']]
# print(unit['name'])
# print(unit['conditions'])
collection.save(unit)
ret = list(collection.find({'line_name':line_name}).sort('name', pymongo.ASCENDING))
return ret
def bayesian_reset_unit(querydict):
ret = []
if querydict.has_key('line_name') and len(querydict['line_name']):
ret = reset_unit_by_line_name(querydict['line_name'])
ret = json.dumps(db_util.remove_mongo_id(ret), ensure_ascii=True, indent=4)
return ret
def build_additional_condition(line_name, cond):
ret = cond
collection = get_collection('bayesian_nodes')
l = list(collection.find({'line_name':line_name}))
for node in l:
ret[node['name']] = node['conditions']
return ret
def create_bbn_by_line_name(line_name):
cond = bayes_util.build_state_examination_condition(line_name)
cond = build_additional_condition(line_name, cond)
g = None
if bayes_util.USE_C_MODULE:
print('using c-accelerate module...')
g = bayes_util.build_bbn_from_conditionals(cond)
else:
print('using pure-python module...')
g = bayes_util.build_bbn_from_conditionals_plus(cond)
return g
statuscode, headers, body = '200 OK', {}, ''
urls = gUrlMap.bind_to_environ(environ)
querydict, buf = get_querydict_by_GET_POST(environ)
endpoint, args = urls.match()
if args.has_key('_id') and isinstance(querydict, dict):
querydict['_id'] = args['_id']
if endpoint == 'bayesian_query_node':
body = bayesian_query_node(querydict)
elif endpoint == 'bayesian_save_node':
body = bayesian_save_node(querydict)
elif endpoint == 'bayesian_query_predict':
body = bayesian_query_predict(querydict)
elif endpoint == 'bayesian_reset_unit':
body = bayesian_reset_unit(querydict)
elif endpoint == 'bayesian_query_graphiz':
body = bayesian_query_graphiz(querydict)
headers['Content-Type'] = 'text/plain'
elif endpoint == 'bayesian_delete_node':
body = bayesian_delete_node(querydict)
elif endpoint == 'bayesian_save_domains_range':
body = bayesian_save_domains_range(querydict)
elif endpoint == 'bayesian_delete_domains_range':
body = bayesian_delete_domains_range(querydict)
elif endpoint == 'bayesian_query_domains_range':
body = bayesian_query_domains_range(querydict)
return statuscode, headers, body
headers = {}
headerslist = []
cookie_header = None
statuscode = '200 OK'
body = ''
path_info = environ['PATH_INFO']
if 'proxy.cgi' in path_info:
statuscode, headers, body = handle_proxy_cgi(environ)
elif path_info == '/test':
statuscode, headers, body = handle_test(environ)
elif path_info == '/get':
statuscode, headers, body = handle_get_method(environ)
elif path_info == '/post':
statuscode, headers, body = handle_post_method(environ)
elif path_info == '/wmts':
statuscode, headers, body = handle_wmts(environ)
elif path_info == '/tiles':
statuscode, headers, body = handle_tiles(environ)
elif '/arcgistile' in path_info:
statuscode, headers, body = handle_arcgistile(environ)
elif path_info == '/terrain/layer.json' or path_info[-8:] == '.terrain':
statuscode, headers, body = handle_terrain(environ)
#elif path_info[-8:] == '.terrain':
#return handle_terrain1(environ)
# elif path_info == '/wfs':
# statuscode, headers, body = handle_wfs(environ)
elif path_info =='/create_cluster' or path_info =='/kill_cluster':
statuscode, headers, body = handle_cluster(environ)
elif path_info == '/websocket':
statuscode, headers, body = handle_websocket(environ)
elif len(path_info)>6 and path_info[:6] == '/proxy':
statuscode, headers, body = proxy(environ)
headers['Cache-Control'] = 'no-cache'
# elif path_info == '/anti_bird_equip_list':
# statuscode, headers, body = anti_bird_equip_list(environ)
# elif path_info == '/anti_bird_equip_tower_mapping':
# statuscode, headers, body = anti_bird_equip_tower_mapping(environ)
# elif path_info == '/anti_bird_get_latest_records_by_imei':
# statuscode, headers, body = anti_bird_get_latest_records_by_imei(environ)
else:
if path_info[-1:] == '/':
path_info = gConfig['web']['indexpage']
if str(gConfig['webgis']['session']['enable_session'].lower()) == 'true' :
# and path_info in ['/login', '/logout', gConfig['web']['loginpage'], gConfig['web']['indexpage'], gConfig['web']['mainpage']]:
if gSessionStore is None:
gSessionStore = FilesystemSessionStore()
is_expire = False
with session_manager(environ):
sess, cookie_header, is_expire = session_handle(environ, gRequest, gSessionStore)
if path_info == str(gConfig['web']['unauthorizedpage']):
if not sess.has_key('ip'):
sess['ip'] = environ['REMOTE_ADDR']
gSessionStore.save_if_modified(sess)
headerslist.append(('Content-Type', str(gConfig['mime_type']['.html'])))
headerslist.append(cookie_header)
statuscode, headers, body = handle_static(environ, gConfig['web']['unauthorizedpage'])
start_response('401 Unauthorized', headerslist)
return [body]
if path_info == '/logout':
gSessionStore.delete(sess)
sess, cookie_header, is_expire = session_handle(environ, gRequest, gSessionStore)
headerslist.append(cookie_header)
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
start_response('200 OK', headerslist)
return [json.dumps({'result':u'ok'}, ensure_ascii=True, indent=4)]
if is_expire:
if not sess.has_key('ip'):
sess['ip'] = environ['REMOTE_ADDR']
gSessionStore.save_if_modified(sess)
headerslist.append(('Content-Type', str(gConfig['mime_type']['.html'])))
headerslist.append(cookie_header)
statuscode, headers, body = handle_static(environ, gConfig['web']['unauthorizedpage'])
start_response('401 Unauthorized', headerslist)
return [body]
# headerslist.append(('Location', str(gConfig['web']['expirepage'])))
# start_response('302 Redirect', headerslist)
# return ['']
# headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
# statuscode = '200 OK'
# body = json.dumps({'result':u'session_expired'}, ensure_ascii=True, indent=4)
if path_info == '/login':
user = handle_login(environ)
if user:
sess = gSessionStore.session_class(user, sess.sid, False)
sess['username'] = user['username']
cookie_header = set_cookie_data(gRequest, {'_id':user['_id'], 'username': user['username'], 'displayname': user['displayname']})
gSessionStore.save_if_modified(sess)
headerslist.append(cookie_header)
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
start_response('200 OK', headerslist)
return [json.dumps(sess, ensure_ascii=True, indent=4)]
else:
headerslist.append(cookie_header)
headerslist.append(('Content-Type', 'text/json;charset=' + ENCODING))
start_response('200 OK', headerslist)
return [json.dumps({'result':u'用户名或密码错误'}, ensure_ascii=True, indent=4)]
if path_info == str(gConfig['web']['mainpage']):
#401 Unauthorized
#if session_id is None or token is None:
headerslist.append(('Content-Type', str(gConfig['mime_type']['.html'])))
headerslist.append(cookie_header)
if sess is None or len(sess.keys())==0 or len(sess.sid)==0 or not sess.has_key('username'):
statuscode, headers, body = handle_static(environ, gConfig['web']['unauthorizedpage'])
statuscode = '401 Unauthorized'
start_response(statuscode, headerslist)
return [body]
if not is_expire and len(sess.sid)>0:
if 'state_examination' in path_info:
statuscode, headers, body = handle_state_examination(environ)
elif 'bayesian/' in path_info:
statuscode, headers, body = handle_bayesian(environ)
elif 'antibird/' in path_info:
statuscode, headers, body = handle_antibird(environ)
else:
statuscode, headers, body = handle_static(environ, path_info)
else:
if path_info == '/login' and str(gConfig['webgis']['session']['enable_session'].lower()) != 'true':
path_info = gConfig['web']['mainpage']
if 'state_examination/' in path_info:
statuscode, headers, body = handle_state_examination(environ)
elif 'antibird/' in path_info:
statuscode, headers, body = handle_antibird(environ)
elif 'bayesian/' in path_info:
statuscode, headers, body = handle_bayesian(environ)
else:
statuscode, headers, body = handle_static(environ, path_info)
#headkeys = set([i[0] for i in headerslist])
headers = CORS_header(headers)
if cookie_header:
headerslist.append(cookie_header)
for k in headers:
headerslist.append((k, headers[k]))
#print(headerslist)
# headerslist = add_to_headerlist(headerslist, 'Cache-Control', 'no-cache')
# print(headerslist)
start_response(statuscode, headerslist)
return [body]
def add_to_headerlist(headerslist, key, value):
ret = headerslist
existidx = -1
for i in ret:
if i[0] == key:
existidx = ret.index(i)
break
if existidx < 0:
ret.append((key, value))
else:
ret[existidx] = (key, value)
return ret
def application_markdown(environ, start_response):
global gConfig, gRequest, gSessionStore
headers = {}
headerslist = []
path_info = environ['PATH_INFO']
if path_info == '/get':
statuscode, headers, body = handle_get_method(environ)
elif path_info == '/post':
statuscode, headers, body = handle_post_method(environ)
else:
if path_info[-1:] == '/':
path_info += gConfig['web']['indexpage']
statuscode, headers, body = handle_static(environ, path_info)
headers = CORS_header(headers)
for k in headers:
headerslist.append((k, headers[k]))
start_response(statuscode, headerslist)
return [body]
def handle_proxy_cgi(environ):
global gConfig, gHttpClient
method = environ['REQUEST_METHOD']
post_data = ''
if method == "POST":
qs = environ['PATH_INFO']
buf = environ['wsgi.input'].read()
post_data = urllib.unquote_plus(buf)
d = cgi.parse(None, environ)
if d.has_key("url"):
url = d["url"][0]
else:
url = 'http://XIEJUN-DESKTOP:88'
else:
fs = cgi.FieldStorage()
url = fs.getvalue('url', "http://XIEJUN-DESKTOP:88")
s = ''
headers = {'Content-Type': 'text/plain;charset=' + ENCODING}
try:
if url.startswith("http://") or url.startswith("https://"):
request = None
response = None
http = None
urlobj = URL(url)
if not gHttpClient.has_key('proxy_cgi'):
gHttpClient['proxy_cgi'] = HTTPClient(urlobj.host, port=urlobj.port, concurrency=100)
client = gHttpClient['proxy_cgi']
if method == "POST":
#length = int(environ["CONTENT_LENGTH"])
headers["Content-Type"] = environ["CONTENT_TYPE"]
response = client.post(urlobj.request_uri, post_data, headers)
else:
response = client.get(urlobj.request_uri)
if response:
h = str(response.info())
#if i.has_key("Content-Type"):
#print("Content-Type: %s" % (i["Content-Type"]))
hh = eval(h)
responseh = []
for i in hh:
if i[0] in ['Content-Type', 'Date', 'Server', ]:
responseh.append(i)
s = response.read()
client.close()
headers['Content-Length'] = str(len(s))
else:
s += "Illegal request."
except Exception, E:
s += "Status: 500 Unexpected Error"
s += "Content-Type: text/plain"
s += "Some unexpected error occurred. Error text was:%s" % E.message
return '200 OK', headers, s
def get_host_ip():
ret = []
if sys.platform == 'win32':
ret.append('127.0.0.1')
localIP = socket.gethostbyname(socket.gethostname())
#print ("local ip:%s " % localIP)
ipList = socket.gethostbyname_ex(socket.gethostname())
for i in ipList:
if i != localIP:
#if isinstance(i, str):
#print(re.findall('\d+\.\d+\.\d+\.\d+',i))
if isinstance(i, list):
for ii in i:
if len(re.findall('\d+\.\d+\.\d+\.\d+',ii))>0:
ret.append(ii)
#print("external IP:%s" % i )
elif 'linux' in sys.platform:
import commands
ips = commands.getoutput("/sbin/ifconfig | grep -i \"inet\" | grep -iv \"inet6\" | awk {'print $2'} | sed -ne 's/addr\:/ /p'")
arr = ips.split('\n')
for i in arr:
ret.append(i.strip())
return ret
def clear_tmp():
tmp_dir = r'C:\Users\Jeffrey\AppData\Local\ESRI\Local Caches\MapCacheV1'
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
tmp_dir = r'C:\Users\Jeffrey\AppData\Local\ESRI\Local Caches\GlobeCache'
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
def get_scaleDenominator(zoomlist):
#tileMatrixMaxX = tileMatrixMinX + tileWidth * (scaleDenominator * pixelSize / metersPerUnit) * matrixWidth
#tileMatrixMinY = tileMatrixMaxY - tileHeight * (scaleDenominator * pixelSize / metersPerUnit) * matrixHeight
#tileWidth * (scaleDenominator * pixelSize / metersPerUnit) * matrixWidth = tileMatrixMaxX - tileMatrixMinX
#tileHeight * (scaleDenominator * pixelSize / metersPerUnit) * matrixHeight = tileMatrixMaxY - tileMatrixMinY
#scaleDenominator * pixelSize / metersPerUnit = (tileMatrixMaxX - tileMatrixMinX)/(tileWidth * matrixWidth)
#scaleDenominator * pixelSize / metersPerUnit = (tileMatrixMaxY - tileMatrixMinY)/(tileHeight * matrixHeight)
#scaleDenominator * pixelSize = metersPerUnit * (tileMatrixMaxX - tileMatrixMinX)/(tileWidth * matrixWidth)
#scaleDenominator * pixelSize = metersPerUnit * (tileMatrixMaxY - tileMatrixMinY)/(tileHeight * matrixHeight)
#scaleDenominator = metersPerUnit/pixelSize * (tileMatrixMaxX - tileMatrixMinX)/(tileWidth * matrixWidth)
#scaleDenominator = metersPerUnit/pixelSize * (tileMatrixMaxY - tileMatrixMinY)/(tileHeight * matrixHeight)
metersPerUnit = float(gConfig['wmts']['metersPerUnit'])
pixelSize = float(gConfig['wmts']['pixelSize'])
tileWidth,tileHeight = 256.0, 256.0
tileMatrixMinX, tileMatrixMaxX = (26.0, 102.0), (26.0, 104.0)
tileMatrixMinY, tileMatrixMaxY = (24.0, 102.0), (26.0, 102.0)
for i in zoomlist:
#print('%d=%d' % (i , mapUtils.tiles_on_level(i)))
#mapUtils.countDistanceFromLatLon()
matrixHeight = matrixWidth = mapUtils.tiles_on_level(i)
print('%d=%d' % (i , matrixHeight))
#scaleDenominatorX = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxX , tileMatrixMinX) * 1000./(tileWidth * matrixWidth)
#scaleDenominatorY = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxY , tileMatrixMinY) * 1000./(tileHeight * matrixHeight)
#print('scaleDenominatorX=%f, scaleDenominatorY=%f' % (scaleDenominatorX, scaleDenominatorY))
#scaleDenominator = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxY , tileMatrixMinY) * 1000. /(tileHeight * matrixHeight)
scaleDenominator = metersPerUnit/pixelSize * mapUtils.countDistanceFromLatLon(tileMatrixMaxY , tileMatrixMinY) /(tileHeight * matrixHeight)
print('scaleDenominator=%f' % scaleDenominator)
def ToGeographic(mercatorX_lon, mercatorY_lat):
if abs(mercatorX_lon) < 180 and abs(mercatorY_lat) < 90:
return 0, 0
if abs(mercatorX_lon) > 20037508.3427892 or abs(mercatorY_lat) > 20037508.3427892 :
return 0, 0
x = mercatorX_lon;
y = mercatorY_lat;
num3 = x / 6378137.0;
num4 = num3 * 57.295779513082323;
num5 = math.floor(float(num4 + 180.0) / 360.0)
num6 = num4 - (num5 * 360.0)
num7 = 1.5707963267948966 - (2.0 * math.atan(math.exp((-1.0 * y) / 6378137.0)))
lon = num6
lat = num7 * 57.295779513082323
return lon, lat
def ToWebMercator(lon, lat):
if abs(lon) > 180 or abs(lat) > 90:
return 0, 0
num = lon * 0.017453292519943295
x = 6378137.0 * num
a = lat * 0.017453292519943295
mercatorX_lon = x
mercatorY_lat = 3189068.5 * math.log((1.0 + math.sin(a)) / (1.0 - math.sin(a)))
return mercatorX_lon, mercatorY_lat
def handle_requset_sync(obj):
ret = {'result':''}
if obj.has_key('area') and obj['area'] and len(obj['area'])>0:
kmgd, kmgdgeo, kmgdgeotmp = db_util.create_sde_conn(obj['area'])
if obj.has_key('odbc'):
if obj['odbc'] == 'TABLE_LINE':
l = db_util.odbc_get_records('TABLE_LINE', '1=1', obj['area'])
ret['result']= l
elif obj['odbc'] == 'TABLE_TOWER':
l = db_util.odbc_get_records('TABLE_TOWER', '1=1', obj['area'])
ret['result']= l
elif obj.has_key('op'):
if obj['op']=='download_task':
condition = '1=1'
if obj.has_key('team_id'):
condition += " AND team_id='%s'" % obj['team_id']
l = db_util.odbc_get_records('VIEW_TASK_ITEM', condition, obj['area'])
ret['result']= l
#elif obj['op']=='get_latest_stamp':
#f = '%Y-%m-%d %H:%M:%S'
#if obj.has_key('format'):
#f = obj['format']
#ret['result']= db_util.get_latest_stamp(f, obj['area'])
#elif obj['op']=='get_latest_3dd_stamp':
#f = '%Y-%m-%d %H:%M:%S'
#if obj.has_key('format'):
#f = obj['format']
#ret['result']= db_util.get_latest_3dd_stamp(f, obj['area'])
else:
print('unknown area')
ret['result'] = []
return ret
def soap_login():
client = SoapClient(wsdl='%s?wsdl' % gConfig['webservice']['location'], namespace = gConfig['webservice']['namespace'], timeout=int(gConfig['webservice']['timeout']))
response = client.login(username='', password='')
result = response['Result']
return result
def parse_thunder_counter_xml(xml):
ret = []
root = etree.fromstring(xml)
if root:
for Flash in root:
obj = {}
for child in Flash:
obj[child.tag] = child.text
ret.append(obj)
return ret
def soap_GetFlashofDate(start_time, end_time):
ret = {}
try:
client = SoapClient(wsdl='%s?wsdl' % gConfig['webservice']['location'], namespace = gConfig['webservice']['namespace'], timeout=int(gConfig['webservice']['timeout']))
response = client.GetFlashofDate(in0=start_time, in1=end_time)
result = response['Result']
ret = parse_thunder_counter_xml(result)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret['err'] = sys.exc_info()[1].message
if hasattr(sys.exc_info()[1], 'reason'):
ret['err'] = str(sys.exc_info()[1].reason)
return ret
def soap_GetFlashofEnvelope(start_time, end_time, lng1, lng2, lat1, lat2):
ret = {}
try:
client = SoapClient(wsdl='%s?wsdl' % gConfig['webservice']['location'], namespace = gConfig['webservice']['namespace'], timeout=int(gConfig['webservice']['timeout']))
response = client.GetFlashofEnvelope(in0=start_time, in1=end_time, in2=lng1, in3=lng2, in4=lat1, in5=lat2)
result = response['Result']
ret = parse_thunder_counter_xml(result)
except:
if hasattr(sys.exc_info()[1], 'message'):
ret['err'] = sys.exc_info()[1].message
if hasattr(sys.exc_info()[1], 'reason'):
ret['err'] = str(sys.exc_info()[1].reason)
return ret
def delete_expired_session(interval):
global gSessionStore
while 1:
gevent.sleep(interval)
if gSessionStore:
#print('session recycle checking')
gSessionStore.delete_expired_list()
ws_send('session_list', ws_session_query())
def joinedqueue_consumer_pay():
global gConfig, gJoinableQueue
interval = float(gConfig['pay_platform']['queue']['queue_consume_interval'])
while 1:
gevent.sleep(interval)
item = None
try:
item = gJoinableQueue.get()
except:
item = None
if item:
try:
sign_and_send(item['thirdpay'], item['method'], item['url'], item['data'])
finally:
gJoinableQueue.task_done()
def chat_offline_save_log(obj):
global gConfig, gClientMongo
def get_collection(collection):
ret = None
db_util.mongo_init_client('chat_platform')
db = db_util.gClientMongo['chat_platform'][gConfig['chat_platform']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
id = None
if obj['op'] not in ['chat/online', 'chat/offline', 'chat/info/online', 'chat/info/offline', 'chat/request/contact/remove', 'chat/request/group/quit'] and obj.has_key('to'):
offlinecol = 'chat_log_offline'
if gConfig['chat_platform']['mongodb'].has_key('collection_chat_log_offline'):
offlinecol = gConfig['chat_platform']['mongodb']['collection_chat_log_offline']
collection = get_collection(offlinecol)
id = collection.save(db_util.add_mongo_id(obj))
return id
def chat_save_log(obj):
global gConfig, gClientMongo
def get_collection(collection):
ret = None
db_util.mongo_init_client('chat_platform')
db = db_util.gClientMongo['chat_platform'][gConfig['chat_platform']['mongodb']['database']]
if not collection in db.collection_names(False):
ret = db.create_collection(collection)
else:
ret = db[collection]
return ret
id = None
if obj.has_key('op') and obj['op'] in ['chat/chat', 'chat/online', 'chat/offline']:
collection = get_collection(gConfig['chat_platform']['mongodb']['collection_chat_log'])
# if obj.has_key('timestamp'):
# obj['timestamp'] = datetime.datetime.fromtimestamp(obj['timestamp']/1000).strftime('%Y-%m-%d %H:%M:%S')
if obj['op'] in ['chat/online', 'chat/offline']:
obj1 = copy.deepcopy(obj)
for k in obj1.keys():
if not k in ['from', 'timestamp', 'op', 'to']:
del obj1[k]
if obj1.has_key('_id'):
del obj1['_id']
id = collection.save(db_util.add_mongo_id(obj1))
else:
id = collection.save(db_util.add_mongo_id(obj))
return id
def joinedqueue_consumer_chat():
global gConfig, gJoinableQueue, gWebSocketsMap
interval = float(gConfig['chat_platform']['queue']['queue_consume_interval'])
while 1:
gevent.sleep(interval)
item = None
try:
item = gJoinableQueue.get()
except:
item = None
if item:
try:
g = gevent.spawn(chat_save_log, item)
k = item['to']
if gWebSocketsMap.has_key(k):
for ws in gWebSocketsMap[k]:
if not ws.closed:
ws.send(json.dumps(item, ensure_ascii=True, indent=4))
else:
gevent.spawn(chat_offline_save_log, item)
finally:
gJoinableQueue.task_done()
def tcp_reconnect_check(interval=1):
global gConfig, gTcpReconnectCounter, gTcpSock
tcp_reconnect_threshold = int(gConfig['webgis']['anti_bird']['tcp_reconnect_threshold'])
gTcpReconnectCounter = tcp_reconnect_threshold
while 1:
gTcpReconnectCounter += interval
#print(gTcpReconnectCounter)
if gTcpReconnectCounter > tcp_reconnect_threshold:
gTcpReconnectCounter = 0
print('[%s]Trying to reconnect to anti-bird tcpserver [%s:%s]...' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), gConfig['webgis']['anti_bird']['tcp_host'], gConfig['webgis']['anti_bird']['tcp_port']))
if gTcpSock:
if not gTcpSock.closed:
gTcpSock.close()
del gTcpSock
gTcpSock = None
gevent.sleep(interval)
def tcp_print_exception():
e = sys.exc_info()[1]
message = ''
if hasattr(e, 'strerror'):
message = e.strerror
if message is None and hasattr(e, 'message'):
message = e.message
elif hasattr(e, 'message'):
message = e.message
else:
message = str(e)
print('connecting anti-bird server fail:%s' % message)
def tcp_connect():
global gConfig
tcp_host = gConfig['webgis']['anti_bird']['tcp_host']
tcp_port = int(gConfig['webgis']['anti_bird']['tcp_port'])
timeout = 5.0
try:
timeout = float(gConfig['webgis']['anti_bird']['tcp_timeout'])
except:
timeout = 5.0
sock = socket.create_connection((tcp_host, tcp_port), timeout=timeout)
sock.settimeout(None)
#sock = socket.create_connection((tcp_host, tcp_port))
sock.send("bird")
return sock
def tcp_recv(sock=None):
global gConfig, gWebSocketsMap, gTcpReconnectCounter, gTcpSock
def get_packet(astr):
ret = ''
rest = astr
if '###' in astr:
idx0 = astr.index('###') + 3
astr = astr[idx0:]
if '###' in astr:
idx1 = astr.index('###')
ret = astr[:idx1]
rest = astr[idx1+3:]
return ret, rest
def get_packets(astr):
ret = []
p, rest = get_packet(astr)
while len(p)>0:
ret.append(p)
p, rest = get_packet(rest)
return ret, rest
def send_to_client(packets):
for imei in packets:
try:
obj = {'imei':imei}
for k in gWebSocketsMap.keys():
ws = gWebSocketsMap[k]
if not ws.closed:
ws.send(json.dumps(obj, ensure_ascii=True, indent=4))
except:
e = sys.exc_info()[1]
if hasattr(e, 'message'):
print('send_to_client error:%s' % e.message)
else:
print('send_to_client error:%s' % str(e))
def save_to_cache(astr):
pass
MAX_MSGLEN = int(gConfig['webgis']['anti_bird']['max_msg_len'])
tcp_reconnect_threshold = int(gConfig['webgis']['anti_bird']['tcp_reconnect_threshold'])
recvstr = ''
while 1:
try:
if gTcpSock is None:
gTcpSock = tcp_connect()
if gTcpSock and not gTcpSock.closed:
buf = bytearray(b"\n" * MAX_MSGLEN)
gTcpSock.recv_into(buf)
recvstr += buf.strip().decode("utf-8")
if len(recvstr)>0:
gTcpReconnectCounter = 0;
print('[%s]%s' % (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), recvstr))
packets, recvstr = get_packets(recvstr)
if gConfig['webgis'].has_key('anti_bird') and gConfig['webgis']['anti_bird'].has_key('update_to_cache') and gConfig['webgis']['anti_bird']['update_to_cache'].lower() == 'true':
save_to_cache(packets)
send_to_client(packets)
except:
recvstr = ''
tcp_print_exception()
if gTcpSock:
if not gTcpSock.closed:
gTcpSock.close()
del gTcpSock
gTcpSock = None
gevent.sleep(0.01)
def cycles_task():
global gConfig, gJoinableQueue
if gConfig['wsgi']['application'].lower() == 'authorize_platform':
gevent.spawn(delete_expired_session, int(gConfig['authorize_platform']['session']['session_cycle_check_interval']))
elif gConfig['wsgi']['application'].lower() == 'pay_platform' and gJoinableQueue:
gevent.spawn(joinedqueue_consumer_pay)
elif gConfig['wsgi']['application'].lower() == 'chat_platform' and gJoinableQueue:
gevent.spawn(joinedqueue_consumer_chat)
elif gConfig['wsgi']['application'].lower() == 'webgis':
if gConfig['webgis']['anti_bird'].has_key('enable_fetch') and gConfig['webgis']['anti_bird']['enable_fetch'].lower() == 'true':
interval = 1
if gConfig['webgis']['anti_bird'].has_key('cycle_interval'):
interval = int(gConfig['webgis']['anti_bird']['cycle_interval'])
gevent.spawn(tcp_recv, None)
gevent.spawn(tcp_reconnect_check, interval)
def mainloop_single( port=None, enable_cluster=False, enable_ssl=False):
global gConfig
gen_model_app_cache()
server = None
app = None
key = 'application_' + gConfig['wsgi']['application']
if globals().has_key(key):
print('application ready to start:%s' % gConfig['wsgi']['application'])
app = globals()[key]
else:
print('unknown application:%s' % gConfig['wsgi']['application'])
return
cycles_task()
if port and not enable_cluster:
if enable_ssl:
print('listening at host 127.0.0.1, port %d with ssl crypted' % port)
server = pywsgi.WSGIServer(('127.0.0.1', port), app, handler_class = WebSocketHandler, keyfile = gConfig['listen_port']['keyfile'], certfile = gConfig['listen_port']['certfile'])
else:
print('listening at host 127.0.0.1, port %d' % port)
server = pywsgi.WSGIServer(('127.0.0.1', port), app, handler_class = WebSocketHandler)
server.start()
server.serve_forever()
else:
if enable_ssl:
pport = port
if not pport:
pport = gConfig['listen_port']['ssl_port']
else:
pport = port
if not pport:
pport = gConfig['listen_port']['port']
host_list = get_host_ip()
admin = ''
if enable_cluster:
admin = 'cluster manager '
print('%slistening at host %s, port %s' % (admin, str(host_list), str(pport)))
servers = []
#if gConfig['webservice']['enable'] in [u'true', u'TRUE']:
#h, p = gConfig['webservice']['host'], int(gConfig['webservice']['port'])
#print('listening webservice at http://%s:%d/webservice' % (h, p))
#server = pywsgi.WSGIServer((h, p), get_wsapplication())
#servers.append(server)
#server.start()
if len(host_list)>0:
idx = 0
if isinstance(pport, int):
for i in host_list:
if enable_ssl:
server = pywsgi.WSGIServer((i, pport), app, handler_class = WebSocketHandler, keyfile = gConfig['listen_port']['keyfile'], certfile = gConfig['listen_port']['certfile'])
else:
server = pywsgi.WSGIServer((i, pport), app, handler_class = WebSocketHandler)
servers.append(server)
if idx < len(host_list)-1:
server.start()
idx += 1
servers[-1].serve_forever()
elif isinstance(pport, unicode):
for i in host_list:
if enable_ssl:
server = pywsgi.WSGIServer((i, int(pport)), app, handler_class = WebSocketHandler, keyfile = gConfig['listen_port']['keyfile'], certfile = gConfig['listen_port']['certfile'])
else:
server = pywsgi.WSGIServer((i, int(pport)), app, handler_class = WebSocketHandler)
servers.append(server)
if idx < len(host_list)-1:
server.start()
idx += 1
servers[-1].serve_forever()
elif isinstance(pport, list):
for i in host_list:
for j in pport:
if enable_ssl:
server = pywsgi.WSGIServer((i, int(j)), app, handler_class = WebSocketHandler, keyfile = gConfig['listen_port']['keyfile'], certfile = gConfig['listen_port']['certfile'])
else:
server = pywsgi.WSGIServer((i, int(j)), app, handler_class = WebSocketHandler)
servers.append(server)
if idx < len(host_list) * len(pport)-1:
server.start()
idx += 1
servers[-1].serve_forever()
else:
print('wrong host or port in %s' % db_util.CONFIGFILE)
return server
def mainloop_nginx(popen):
while True:
stdoutdata, stderrdata = popen.communicate()
#if stdoutdata:
#queue.put(stdoutdata)
gevent.sleep(0.01)
def mainloop_manager(queue):
while True:
qget = q.get()
if qget:
print(qget)
gevent.sleep(0.01)
def create_cluster():
#global gConfig, gClusterProcess
conf = ''
with open(gConfig['cluster']['nginx_conf_template']) as f:
conf = f.read()
rg = gConfig['cluster']['port_range']
node_list = '\n'
for port in range(int(rg[0]), int(rg[1]), int(rg[2])):
node_list += ' server 127.0.0.1:%d;\n' % port
listen_port = gConfig['listen_port']['port']
access_log = gConfig['cluster']['nginx_log']
host = get_host_ip()
host.append('localhost')
server_name = ' '.join(host)
conf = conf.replace('[node_list]', str(node_list))
conf = conf.replace('[listen_port]', str(listen_port))
conf = conf.replace('[access_log]', str(access_log))
conf = conf.replace('[server_name]', str(server_name))
p = os.path.abspath(gConfig['cluster']['nginx_conf_template'])
p = os.path.join(os.path.dirname(p), 'nginx.conf')
#print(conf)
with open(p, 'w') as f:
f.write(conf)
idx = 0
for port in range(int(rg[0]), int(rg[1]), int(rg[2])):
print('process%d is starting...' % idx)
proc = Process(target=mainloop_single, args=(port, False, False))
proc.start()
#gClusterProcess[str(proc.pid)] = proc
idx += 1
print('nginx is starting...')
popen = subprocess.Popen([os.path.abspath(gConfig['cluster']['nginx_exe']), '-c', p ])
def get_pid_from_name(name):
out = subprocess.check_output(['tasklist','/SVC'])
#print(out)
l = out.split('\r\n')
findlist = []
for i in l:
arr = i.split(' ')
for j in arr:
if len(j)>0 and name in j:
for k in arr:
if arr.index(k)==0:
continue
if len(k)>0:
try:
pid = int(k)
findlist.append(pid)
break
except:
continue
break
#print(findlist)
if current_process().pid in findlist:
findlist.remove(current_process().pid)
return findlist
def kill_cluster():
#global gClusterProcess
print('kill nginx...')
for pid in get_pid_from_name('nginx'):
try:
out = subprocess.check_output(['taskkill', '/F', '/PID', str(pid), '/T'])
print(out)
except:
pass
for pid in get_pid_from_name('python'):
print('kill python.exe[%s]...' % pid)
out = subprocess.check_output(['taskkill', '/F', '/PID', str(pid), '/T'])
print(out)
#for pid in gClusterProcess.keys():
#print('kill python.exe[%s]...' % pid)
#gClusterProcess[pid].terminate()
print('kill done')
def create_self_signed_cert(cert_dir, year=10):
from OpenSSL import crypto, SSL
CERT_FILE = "ssl_certificate.crt"
KEY_FILE = "ssl_self_signed.key"
if not os.path.exists(os.path.join(cert_dir, CERT_FILE)) or not os.path.exists(os.path.join(cert_dir, KEY_FILE)):
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, 4096)
cert = crypto.X509()
cert.get_subject().C = "AQ"
cert.get_subject().ST = "State"
cert.get_subject().L = "City"
cert.get_subject().O = "Company"
cert.get_subject().OU = "Organization"
cert.get_subject().CN = socket.gethostname()
cert.set_serial_number(1000)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(year*365*24*60*60)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(k)
cert.sign(k, 'sha1')
with open(os.path.join(cert_dir, CERT_FILE), "wt") as f:
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
with open(os.path.join(cert_dir, KEY_FILE), "wt") as f:
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))
#create_self_signed_cert('.')
print('Create SSL key and cert done')
else:
print('SSL key and cert already exist')
def gen_model_app_cache():
global gConfig
if not gConfig.has_key('web_cache'):
return
s = 'CACHE MANIFEST\n'
s += '#' + gConfig['web_cache']['version'] + '\n'
if gConfig['web_cache']['gltf_cache_enable'].lower() == u'true':
modelsdir = os.path.join(STATICRESOURCE_DIR, 'gltf')
if not os.path.exists(modelsdir):
return
l = os.listdir(modelsdir)
for i in l:
s += '/gltf/' + i + '\n'
file_or_dir_cache = gConfig['web_cache']['file_or_dir_cache']
if len(file_or_dir_cache) > 0 :
for root, dirs, files in os.walk(STATICRESOURCE_DIR, topdown=False):
for name in dirs:
if name in file_or_dir_cache:
p = os.path.join(root, name)
for root1, dirs1, files1 in os.walk(p, topdown=False):
for name1 in files1:
p1 = os.path.join(root1, name1)
p1 = p1.replace(STATICRESOURCE_DIR, '').replace('\\', '/')
s += p1 + '\n'
for name in files:
if name in file_or_dir_cache:
p = os.path.join(root, name)
p = p.replace(STATICRESOURCE_DIR, '').replace('\\', '/')
s += p + '\n'
s += 'NETWORK:\n'
s += '*\n'
with open(os.path.join(STATICRESOURCE_DIR, 'kmgd.appcache'), 'w') as f:
f.write(s)
if __name__=="__main1__":
freeze_support()
options = db_util.init_global()
#print(options)
init_global()
s = get_sign_alipay(u'dsadsadsadsadsadsa')
print(s)
print(len(s))
#print(gSecurityConfig)
#key = 'application_' + gConfig['wsgi']['application']
#if globals().has_key(key):
#app = globals()[key]
#else:
#print('unknown application:%s' % gConfig['wsgi']['application'])
if __name__=="__main__":
freeze_support()
options = db_util.init_global()
init_global()
if options.signcert_enable:
create_self_signed_cert( options.signcert_directory, options.signcert_year)
elif options.batch_download_tile_enable:
db_util.command_batch_tile_download(options)
else:
if options.cluster_enable:
mainloop_single(int(gConfig['cluster']['manager_port']), True, False)
else:
if gConfig['listen_port']['enable_ssl'].lower() == u'true':
port = 443
try:
port = int(gConfig['listen_port']['ssl_port'])
except:
pass
mainloop_single(port, False, True)
else:
mainloop_single()
class Win32ServiceHandler(object):
# no parameters are permitted; all configuration should be placed in the
# configuration file and handled in the Initialize() method
def __init__(self):
pass
# called when the service is starting
def Initialize(self, configFileName):
self.server = None
self.stopEvent = threading.Event()
self.stopRequestedEvent = threading.Event()
# called when the service is starting immediately after Initialize()
# use this to perform the work of the service; don't forget to set or check
# for the stop event or the service GUI will not respond to requests to
# stop the service
def Run(self):
#self.stopRequestedEvent.wait()
self.stopEvent.set()
init_global()
self.server = mainloop_single()
# called when the service is being stopped by the service manager GUI
def Stop(self):
self.stopRequestedEvent.set()
self.stopEvent.wait()
if self.server:
self.server.stop()
|
"""Main urls.py file for project."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
import accounts.urls
import core.urls
import logs.urls
import djangonumerics.urls
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'', include(core.urls)),
url(r'^logs/', include(logs.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include(accounts.urls)),
url(r'^numerics/', include(djangonumerics.urls)),
)
|
from spynnaker.pyNN.models.neuron.synapse_types.synapse_type_exponential \
import get_exponential_decay_and_init
from spynnaker.pyNN.models.neural_properties.neural_parameter \
import NeuronParameter
from spynnaker.pyNN.models.neuron.synapse_types.abstract_synapse_type \
import AbstractSynapseType
from spynnaker.pyNN.utilities import utility_calls
from data_specification.enums.data_type import DataType
class SynapseTypeDualExponential(AbstractSynapseType):
def __init__(self, n_neurons, machine_time_step, tau_syn_E, tau_syn_E2,
tau_syn_I):
AbstractSynapseType.__init__(self)
self._n_neurons = n_neurons
self._machine_time_step = machine_time_step
self._tau_syn_E = utility_calls.convert_param_to_numpy(
tau_syn_E, n_neurons)
self._tau_syn_E2 = utility_calls.convert_param_to_numpy(
tau_syn_E2, n_neurons)
self._tau_syn_I = utility_calls.convert_param_to_numpy(
tau_syn_I, n_neurons)
@property
def tau_syn_E(self):
return self._tau_syn_E
@tau_syn_E.setter
def tau_syn_E(self, tau_syn_E):
self._tau_syn_E = utility_calls.convert_param_to_numpy(
tau_syn_E, self._n_neurons)
@property
def tau_syn_E2(self):
return self._tau_syn_E2
@tau_syn_E2.setter
def tau_syn_E2(self, tau_syn_E2):
self._tau_syn_E2 = utility_calls.convert_param_to_numpy(
tau_syn_E2, self._n_neurons)
@property
def tau_syn_I(self):
return self._tau_syn_I
@tau_syn_I.setter
def tau_syn_I(self, tau_syn_I):
self._tau_syn_E = utility_calls.convert_param_to_numpy(
tau_syn_I, self._n_neurons)
def get_n_synapse_types(self):
return 3
def get_synapse_id_by_target(self, target):
if target == "excitatory":
return 0
elif target == "excitatory2":
return 1
elif target == "inhibitory":
return 2
return None
def get_synapse_targets(self):
return "excitatory", "excitatory2", "inhibitory"
def get_n_synapse_type_parameters(self):
return 6
def get_synapse_type_parameters(self):
e_decay, e_init = get_exponential_decay_and_init(
self._tau_syn_E, self._machine_time_step)
e_decay2, e_init2 = get_exponential_decay_and_init(
self._tau_syn_E2, self._machine_time_step)
i_decay, i_init = get_exponential_decay_and_init(
self._tau_syn_I, self._machine_time_step)
return [
NeuronParameter(e_decay, DataType.UINT32),
NeuronParameter(e_init, DataType.UINT32),
NeuronParameter(e_decay2, DataType.UINT32),
NeuronParameter(e_init2, DataType.UINT32),
NeuronParameter(i_decay, DataType.UINT32),
NeuronParameter(i_init, DataType.UINT32)
]
def get_n_cpu_cycles_per_neuron(self):
# A guess
return 100
|
import elasticsearch, elasticsearch.helpers
import iconclass
from django.conf import settings
import redis
import time
import json
def go():
es = elasticsearch.Elasticsearch()
esi = elasticsearch.client.IndicesClient(es)
esi.delete(index=settings.ES_INDEX_NAME + '_en')
init_index('en')
def init_index(language):
ES_MAPPINGS = {
"en" : { "notation" : {
"_source": {"enabled": False },
"properties" : {
"notation": {"type": "string", "store": True, "index": "not_analyzed" },
"txt": {"type": "string", "store": False, "analyzer": "english"},
"iskey": {"type": "boolean", "store": False, "index": "not_analyzed" }
}
}
}
}
if language not in ES_MAPPINGS:
raise Exception('Language %s not found in ES_MAPPINGS %s' % (language, ES_MAPPINGS.keys()))
esi = elasticsearch.client.IndicesClient(elasticsearch.Elasticsearch())
esi.create(index=settings.ES_INDEX_NAME + '_' + language, body={"mappings":ES_MAPPINGS.get(language)})
def ixable(obj, language):
path_texts = [p.get('txt', {}).get(language, u'') for p in iconclass.get_list(obj.get('p', [])) if p]
o = {}
o['_index'] = settings.ES_INDEX_NAME + '_' + language
o['_type'] = 'notation'
o['_id'] = hash(obj['n'])
o['_source'] = {
'txt': '\n'.join(path_texts),
'notation': obj['n']
}
if obj['n'].find('(+') > 0:
o['_source']['iskey'] = True
return o
def ixable_iterator(notation, language, skip_keys=False):
obj = iconclass.get(notation)
if not obj: return
yield ixable(obj, language)
for k in obj.get('c', []):
if skip_keys and k.find('(+') > 0: continue
for kk in ixable_iterator(k, language):
yield kk
def fill_redis_q(notation, language):
redis_c = redis.StrictRedis()
count = 0
for x in ixable_iterator(notation, language):
q_size = redis_c.lpush(settings.REDIS_PREFIX + '_ic_index_q', json.dumps(x))
count += 1
return q_size, count
def index_iterator():
redis_c = redis.StrictRedis()
while True:
tmp = redis_c.lpop(settings.REDIS_PREFIX + '_ic_index_q')
if not tmp: break
yield json.loads(tmp)
def index():
success_count, errors = elasticsearch.helpers.bulk(elasticsearch.Elasticsearch(),
index_iterator(),
chunk_size=9999)
return success_count, errors
def redis_q_velocity():
'''Check the Redis index q to see how the size changes over time. This velocity will indicate growth/shrinking
'''
redis_c = redis.StrictRedis()
last_size = 0
size_diffs = []
for i in range(10):
size = redis_c.llen(settings.REDIS_PREFIX + '_ic_index_q')
size_diffs.append(last_size-size)
last_size = size
time.sleep(0.2)
size_diffs = size_diffs[1:] # discard the first one as we didn't start with the current size
return sum(size_diffs)/float(len(size_diffs)), last_size, size_diffs
# TODO: AT this point all functionality related to redis_q is ripe to be refactored into a class.
|
from floodsystem.stationdata import build_station_list, update_water_levels
from floodsystem.station import MonitoringStation
from floodsystem.utils import sorted_by_key
stations = build_station_list()
update_water_levels(stations)
names = [
'Bourton Dickler', 'Surfleet Sluice', 'Gaw Bridge', 'Hemingford',
'Swindon'
]
def take_second(elem):
return elem[1]
Rank = []
for station in stations:
if station.relative_water_level() != None:
Rank.append((station.name, station.relative_water_level()))
#print("Station name and relative water level: {}, {}".format(
#station.name, station.relative_water_level()))
Rank.sort(key = take_second, reverse=True)
Ranking_N = []
count = 0
N = 10
for station in Rank:
if count < N:
Ranking_N.append(Rank[count])
count += 1
else:
break
print(Ranking_N)
|
'''
This code is part of QuTIpy.
(c) Copyright Sumeet Khatri, 2021
This code is licensed under the Apache License, Version 2.0. You may
obtain a copy of this license in the LICENSE.txt file in the root directory
of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
Any modifications or derivative works of this code must retain this
copyright notice, and modified files need to carry a notice indicating
that they have been altered from the originals.
'''
import numpy as np
from qutipy.Clifford import Clifford_group_generators
from qutipy.general_functions import dag,unitary_distance,eye
def generate_Clifford_group(n,display=False):
'''
Generates the n-qubit Clifford group. The display variable is for testing
purposes, and to see the progress through the code.
Note that even for n=2, this code will take a long time to run! There are
11520 elements of the two-qubit Clifford group!
'''
G=Clifford_group_generators(n)
def in_list(L,elem):
# Last modified: 27 June 2019
'''
Checks if the given unitary elem is in the list L.
'''
x=0
for l in L:
if np.around(unitary_distance(l,elem),10)==0: # Check of the distance is zero (up to 10 decimal places)
x=1
break
return x
C=[eye(2**n)]
generated=False
while not generated:
tmp=[]
num_added=0
for c in C:
for g in G:
t1=c@g
t2=c@dag(g)
# t1 and t2 might be the same, in which case we add only one of the two to the list (if needed).
# Also, t1 and t2 might already by in tmp (up to global phase), so we need to check for that as well.
if np.around(unitary_distance(t1,t2),10)==0:
if not in_list(C,t1) and not in_list(tmp,t1):
tmp.append(t1)
num_added+=1
else: # if t1 and t2 are different, add both to the list (if needed).
if not in_list(C,t1) and not in_list(tmp,t1):
tmp.append(t1)
num_added+=1
if not in_list(C,t2) and not in_list(tmp,t2):
tmp.append(t2)
num_added+=1
if num_added>0:
for t in tmp:
C.append(t)
else:
generated=True
if display:
print(len(C))
return C
|
from os import path
import re
from setuptools import setup
def get_version():
text = open(path.join(path.dirname(__file__), "sphinx_affiliates", "__init__.py")).read()
match = re.compile(r"^__version__\s*\=\s*[\"\']([^\s\'\"]+)", re.M).search(text)
return match.group(1)
with open("README.md") as readme:
long_description = readme.read()
setup(
name="sphinx-affiliates",
version=get_version(),
description="Tools for integrating affiliated Sphinx sites",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="mattip",
author_email="mattigit@picus.org.il",
packages=["sphinx_affiliates"],
include_package_data=True,
url="https://github.com/mattip/sphinx-affiliates",
license="MIT",
python_requires=">=3.6,2.7",
install_requires=["sphinx>=2,<4"],
extras_require={
"testing": [
"coverage",
"pytest",
"pytest-cov",
"sphinx_testing",
],
"code_style": ["pre-commit==2.6"],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Plugins",
"Environment :: Web Environment",
"Framework :: Sphinx :: Extension",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python",
"Topic :: Documentation :: Sphinx",
],
)
|
import json
import math
import os
from math import * # noqa
import bmesh
import bpy
import mathutils
from bpy.props import StringProperty
from bpy.types import Operator
from shutil import copyfile, SameFileError
# ExportHelper is a helper class, defines filename and
# invoke() function which calls the file selector.
from bpy_extras.io_utils import ExportHelper
from mathutils import Matrix, Vector
bl_info = {
"name": "Raylar Export",
"author": "sinan islekdemir",
"version": (0, 0, 0, 2),
"blender": (2, 80, 0),
}
global_matrix = mathutils.Matrix.Rotation(-math.pi / 2.0, 4, "X")
MAT_CONVERT_CAMERA = Matrix.Rotation(math.pi / 2.0, 4, "Y")
global_assets = []
def write_raylar_data(filepath):
global global_assets
print("running raylar export...")
scene = construct_scene()
context = json.dumps(scene)
f = open(filepath, "w", encoding="utf-8")
f.write(context)
f.close()
target_path = os.path.dirname(filepath)
for f in global_assets:
base_name = os.path.basename(f)
try:
copyfile(f, os.path.join(target_path, base_name))
except SameFileError:
pass
global_assets = []
return {"FINISHED"}
def export_object(obj):
if obj.type != "MESH":
return
material_cache = {}
for i, matslot in enumerate(obj.material_slots):
material = matslot.material
material_cache[material.name] = {"_index": i}
mkeys = material.node_tree.nodes.keys()
if "Principled BSDF" in mkeys:
inp = material.node_tree.nodes["Principled BSDF"].inputs
if "Base Color" in inp:
material_cache[material.name]["color"] = [
inp["Base Color"].default_value[0],
inp["Base Color"].default_value[1],
inp["Base Color"].default_value[2],
inp["Base Color"].default_value[3],
]
material_cache[material.name]["light"] = False
else:
material_cache[material.name]["color"] = [1, 1, 1, 1]
if "Alpha" in inp:
alpha = inp[
"Transmission"
].default_value
if alpha > 1.0:
alpha = alpha / 2.0
material_cache[material.name]["transmission"] = alpha
if "IOR" in inp:
material_cache[material.name]["index_of_refraction"] = inp[
"IOR"
].default_value
if "Metallic" in inp:
material_cache[material.name]["glossiness"] = inp[
"Metallic"
].default_value
if "Roughness" in inp:
material_cache[material.name]["roughness"] = inp[
"Roughness"
].default_value
if "Emission" in mkeys:
inp = material.node_tree.nodes["Emission"].inputs
if "Color" in inp:
material_cache[material.name]["color"] = [
inp["Color"].default_value[0],
inp["Color"].default_value[1],
inp["Color"].default_value[2],
inp["Color"].default_value[3],
]
material_cache[material.name]["light"] = True
material_cache[material.name]["light_strength"] = inp[
"Strength"
].default_value
if "Image Texture" in mkeys:
image = material.node_tree.nodes["Image Texture"].image
inp = image.filepath_from_user()
global_assets.append(inp)
base_name = os.path.basename(inp)
material_cache[material.name]["texture"] = base_name
odata = obj.data
original_data = odata.copy() # Backup data
bm = bmesh.new()
bm.from_mesh(odata)
bmesh.ops.triangulate(
bm, faces=bm.faces[:], quad_method="BEAUTY", ngon_method="BEAUTY"
)
bm.to_mesh(odata) # Triangulate the object
vertices = []
normals = []
texcoords = []
index = 0
uvLayer = bm.loops.layers.uv.active
for face in bm.faces:
for loop in face.loops:
# Get position (swizzled)
vertices.append([loop.vert.co[0],
loop.vert.co[1],
loop.vert.co[2]])
# Get normal (swizzled)
# TODO: Should this be face, loop, or vertex normal?
norm = loop.vert.normal
normals.append([norm[0], norm[1], norm[2]])
# Get first UV layer
if uvLayer is not None:
texcoords.append([loop[uvLayer].uv[0], loop[uvLayer].uv[1]])
for mat in material_cache:
if material_cache[mat]["_index"] == face.material_index:
if "indices" not in material_cache[mat]:
material_cache[mat]["indices"] = []
material_cache[mat]["indices"].append([index,
index + 1,
index + 2,
int(face.smooth)])
index += 3
obj_dict = {
"vertices": vertices,
"normals": normals,
"texcoords": texcoords,
"matrix": _conv_matrix(obj.matrix_local),
"materials": material_cache,
"children": {},
}
# Revert back the original object
obj.data = original_data
return obj_dict
def export_light(light):
directional = False
direction = [0, 0, 0, 0]
if bpy.data.lights[light.name].type == 'SUN':
directional = True
lmw = light.matrix_world
direction = lmw.to_quaternion() @ Vector((0.0, 0.0, -1.0))
return {
"position": list(light.location),
"color": list(bpy.data.lights[light.name].color),
"active": True,
"light_strength": bpy.data.lights[light.name].energy / 10,
"directional_light": directional,
"direction": list(direction)
}
def _conv_matrix(matrix):
return [
[matrix[0][0], matrix[1][0], matrix[2][0], matrix[3][0]],
[matrix[0][1], matrix[1][1], matrix[2][1], matrix[3][1]],
[matrix[0][2], matrix[1][2], matrix[2][2], matrix[3][2]],
[matrix[0][3], matrix[1][3], matrix[2][3], matrix[3][3]],
]
def export_camera(camera):
position = camera.location
cmw = camera.matrix_world
up = cmw.to_quaternion() @ Vector((0.0, 1.0, 0.0))
cam_direction = cmw.to_quaternion() @ Vector((0.0, 0.0, -1.0))
x = (cam_direction[0] * 10) + position[0]
y = (cam_direction[1] * 10) + position[1]
z = (cam_direction[2] * 10) + position[2]
target = [x, y, z, 1]
fov = bpy.data.cameras[camera.name].angle * 180 / math.pi
aspect = (
bpy.context.scene.render.resolution_x /
bpy.context.scene.render.resolution_y
)
return {
"position": list(position),
"target": list(target),
"up": list(up),
"fov": fov,
"aspect_ratio": aspect,
"near": 0.01,
"far": 10000,
"perspective": True,
}
def construct_scene():
scene = {"objects": {}, "lights": [], "observers": []}
bpy_scene = bpy.context.scene
for obj in bpy_scene.objects:
obj.select_set(True)
bpy.context.view_layer.objects.active = obj
bpy.ops.object.transform_apply(location=True,
scale=True,
rotation=True)
bpy.ops.object.select_all(action="DESELECT")
obj.select_set(False)
if obj.type == "MESH":
scene["objects"][obj.name] = export_object(obj)
if obj.type == "LIGHT":
scene["lights"].append(export_light(obj))
if obj.type == "CAMERA":
scene["observers"].append(export_camera(obj))
return scene
class ExportRaylarData(Operator, ExportHelper):
"""This appears in the tooltip of the operator and in the generated docs"""
bl_idname = "export_payton.scene_data"
bl_label = "Export Scene to Payton/Raylar JSON"
# ExportHelper mixin class uses this
filename_ext = ".json"
filter_glob: StringProperty(
default="*.json",
options={"HIDDEN"},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
def execute(self, context):
return write_raylar_data(self.filepath)
# Only needed if you want to add into a dynamic menu
def menu_func_export(self, context):
self.layout.operator(ExportRaylarData.bl_idname,
text="Raylar Export (scene.json)")
def register():
bpy.utils.register_class(ExportRaylarData)
bpy.types.TOPBAR_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_class(ExportRaylarData)
bpy.types.TOPBAR_MT_file_export.remove(menu_func_export)
if __name__ == "__main__":
register()
|
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 30 15:49:39 2016
@author: TUD205099
"""
#%% load modules
import triple_dot
import numpy as np
import qtt
from qtt.scans import makeDataset_sweep, makeDataset_sweep_2D
import qcodes
from qcodes import load_data
#from qcodes.plots.pyqtgraph import QtPlot
from qcodes.plots.qcmatplotlib import MatPlot
from qtt.live_plotting import livePlot, fpgaCallback_2d
#%% set directory for data saving
datadir = r'K:\ns\qt\spin-qubits\data\b057_data\2017 3dot Automation\data'
qcodes.DataSet.default_io = qcodes.DiskIO(datadir)
qcodes.DataSet.default_formatter = qcodes.data.gnuplot_format.GNUPlotFormat()
#%% initialize station
remote = False
if __name__=='__main__':
server_name = None
station = triple_dot.initialize(server_name=server_name)
awg = station.awg
fpga = station.fpga
gates = station.gates
RF = station.RF
keithley1 = station.keithley1
keithley2 = station.keithley2
keithley3 = station.keithley3
# siggen = station.siggen
# helium = station.helium
#%% initialize sensing dot
import qtt.structures
if __name__=='__main__' and 1:
ggv = ['SDL', 'SDP', 'SDR']
sdvalv = [gates.get(ggv[0]), gates.get(ggv[1]), gates.get(ggv[2])]
sd = qtt.structures.sensingdot_t(ggv, sdvalv, station, index=1, fpga_ch=1)
if 0:
sdval, dataset = sd.autoTune(scanrange=100)
gates.set(sd.gg[1],sdval)
name = '_'.join(sai.array_id for sai in dataset.keithley1_amplitude.set_arrays)
dataset.location = dataset.location_provider(dataset.default_io, record={'name': name})
dataset.write()
#%% defining virtual gates %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
from collections import OrderedDict
#L = {'P1': 1, 'P2': .454, 'P3': .19, 'D1': 1.535, 'D2': .460, 'LS': 1.020, 'RS': .167}
#L_sweep = {'P1': 1, 'P2': .454, 'P3': .19}
#M = {'P1': .537, 'P2': 1, 'P3': 0.386, 'D1': 1.206, 'D2': .855, 'LS': .318, 'RS': .251}
#M_sweep = {'P1': .537, 'P2': 1, 'P3': 0.386}
#R = {'P1': .21, 'P2': .654, 'P3': 1, 'D1': .526, 'D2': 1.493, 'LS': .164, 'RS': 1.136}
#R_sweep = {'P1': .21, 'P2': .654, 'P3': 1}
## result of capacitance measurement
# new tuning at 000-111
#L = OrderedDict([('P1', 1), ('P2', .481), ('P3', .194), ('D1', 1.224), ('D2', .380), ('LS', 0.843), ('RS', .110)])
#M = OrderedDict([('P1', .560), ('P2', 1), ('P3', .414), ('D1', 1.295), ('D2', .942), ('LS', .330), ('RS', .269)])
#R = OrderedDict([('P1', .208), ('P2', .503), ('P3', 1.25), ('D1', .394), ('D2', 1.189), ('LS', .073), ('RS', 0.914)])
L = OrderedDict([('P1', 1), ('P2', .481), ('P3', .194), ('D1', 1.224), ('D2', .380), ('LS', 0.843), ('RS', .110)])
M = OrderedDict([('P1', .540), ('P2', 1), ('P3', .414), ('D1', 1.295), ('D2', .942), ('LS', .330), ('RS', .269)])
R = OrderedDict([('P1', .208), ('P2', .503), ('P3', 1.25), ('D1', .394), ('D2', 1.189), ('LS', .073), ('RS', 0.914)])
#R = OrderedDict([('P1', 0), ('P2', 0), ('P3', 1), ('D1', 0), ('D2', 0), ('LS', 0), ('RS', 0)])
t1 = OrderedDict([('P1', 0), ('P2', 0), ('P3', 0), ('D1', 1), ('D2', 0), ('LS', 0), ('RS', 0)])
t2 = OrderedDict([('P1', 0), ('P2', 0), ('P3', 0), ('D1', 0), ('D2', 1), ('LS', 0), ('RS', 0)])
t_L = OrderedDict([('P1', 0), ('P2', 0), ('P3', 0), ('D1', 0), ('D2', 0), ('LS', 1), ('RS', 0)])
t_R = OrderedDict([('P1', 0), ('P2', 0), ('P3', 0), ('D1', 0), ('D2', 0), ('LS', 0), ('RS', 1)])
# extraction for sweepable gates
L_sweep = OrderedDict([(list(L.keys())[0], list(L.values())[0]), (list(L.keys())[1], list(L.values())[1]), (list(L.keys())[2], list(L.values())[2])])
M_sweep = OrderedDict([(list(M.keys())[0], list(M.values())[0]), (list(M.keys())[1], list(M.values())[1]), (list(M.keys())[2], list(M.values())[2])])
R_sweep = OrderedDict([(list(R.keys())[0], list(R.values())[0]), (list(R.keys())[1], list(R.values())[1]), (list(R.keys())[2], list(R.values())[2])])
# make the cross-capacitance matrices and invert
cc_sweep = np.array([list(L_sweep.values()), list(M_sweep.values()), list(R_sweep.values())])
cc_sweep_inv = np.linalg.inv(cc_sweep)
cc = np.array([list(L.values()), list(M.values()), list(R.values()), list(t1.values()), list(t2.values()), list(t_L.values()), list(t_R.values())])
cc_inv = np.linalg.inv(cc)
# make the inverted chemical potentials
mu_L_inv = dict()
mu_M_inv = dict()
mu_R_inv = dict()
for i in range(3):
mu_L_inv[list(L_sweep.keys())[i]] = np.dot(cc_sweep_inv, np.array([1,0,0]))[i]
mu_M_inv[list(M_sweep.keys())[i]] = np.dot(cc_sweep_inv, np.array([0,1,0]))[i]
mu_R_inv[list(R_sweep.keys())[i]] = np.dot(cc_sweep_inv, np.array([0,0,1]))[i]
# virtual P1+P2 gate
Dot_LR = {'P1': mu_L_inv['P1'] + mu_M_inv['P1'],
'P2': mu_L_inv['P2'] + mu_M_inv['P2'],
'P3': mu_L_inv['P3'] + mu_M_inv['P3']}
# dot gate dictionary for epsilon, delta, and mu
Dot_epsilon = dict()
Dot_delta = dict()
Dot_mu = dict()
for i in list(mu_L_inv):
Dot_epsilon[i] = ( mu_L_inv.get(i) - mu_R_inv.get(i) ) / mu_L_inv.get('P1')
Dot_delta[i] = (- mu_L_inv.get(i) + mu_M_inv.get(i) - mu_R_inv.get(i) ) / mu_M_inv.get('P2')
Dot_mu[i] = ( mu_L_inv.get(i) + mu_M_inv.get(i) + mu_R_inv.get(i) ) / mu_L_inv.get('P1')
# dot gate dictionary for the other parameters
Dot_t1 = {list(t1.keys())[3]: cc_inv[3,3]}
Dot_t2 = {list(t2.keys())[4]: cc_inv[4,4]}
Dot_t_L = {list(t_L.keys())[5]: cc_inv[5,5]}
Dot_t_R = {list(t_R.keys())[6]: cc_inv[6,6]}
for i in range(3):
Dot_t1[list(L_sweep.keys())[i]] = cc_inv[:,3][i]
Dot_t2[list(L_sweep.keys())[i]] = cc_inv[:,4][i]
Dot_t_L[list(L_sweep.keys())[i]] = cc_inv[:,5][i]
Dot_t_R[list(L_sweep.keys())[i]] = cc_inv[:,6][i]
#%% Do 1D scan for a polarization line
fig = 1001
gg = ['P1','P3']
gatevals = gates.allvalues()
activegates = ['P1','P2','P3','D1','D2','LS','RS','SDP','SDR','SDL','T']
gate = gg[0]
sweeprange = 5
period = 1e-3
gates.set(gg[0], 17)
gates.set(gg[1], -211)
Naverage = 1000
waveform, sweep_info = station.awg.sweep_gate(gate, sweeprange, period)
ReadDevice = ['FPGA_ch%d' % fpga_ch]
_,DataRead_ch1,DataRead_ch2 = station.fpga.readFPGA(Naverage=Naverage, ReadDevice=ReadDevice, waittime=waittime)
station.awg.stop()
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_process(dataread, waveform, Naverage)
dataset, plot = makeDataset_sweep(data, gate, sweeprange, fig=fig, gates=gates)
titletxt = plot.title.get_text()
plot.title.set_text(titletxt + ', diff_dir: %s' % diff_dir)
gates.resetgates(activegates,gatevals)
#%% Do 1D scan for a charge addition line
fig = 1002
gg = ['P2']
gate = gg[0]
sweeprange = 5
period = 1e-3
Naverage = 1000
waittime = Naverage * period
waveform, sweep_info = station.awg.sweep_gate(gate, sweeprange, period)
ReadDevice = ['FPGA_ch%d' % fpga_ch]
_,DataRead_ch1,DataRead_ch2 = station.fpga.readFPGA(Naverage=Naverage, ReadDevice=ReadDevice, waittime=waittime)
station.awg.stop()
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_process(dataread, waveform, Naverage)
dataset, plot = makeDataset_sweep(data, gate, sweeprange, fig=fig, gates=gates)
titletxt = plot.title.get_text()
plot.title.set_text(titletxt + ', diff_dir: %s' % diff_dir)
#%% Retune single gate
gates.set_P2(gates.get_P2()-0.1)
#%% Record time trace with FPGA
fig = 1005
import matplotlib.pyplot as plt
plt.close(fig)
gg = ['P2']
gate = gg[0]
sweeprange = 0
period = 8e-3
Naverage = 1
waittime = 0
waveform, sweep_info = station.awg.sweep_gate(gate, sweeprange, period)
ReadDevice = ['FPGA_ch%d' % fpga_ch]
_,DataRead_ch1,DataRead_ch2 = station.fpga.readFPGA(Naverage=Naverage, ReadDevice=ReadDevice, waittime=waittime)
station.awg.stop()
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_process(dataread, waveform, Naverage)
plot = MatPlot(data, interval=0)
#%%
from qtt.algorithms.tunneling import fit_pol_all, polmod_all_2slopes
par_fit = fit_pol_all(dataset.P1.ndarray, dataset.measured.ndarray)
#TODO: add fit to dataplot
MatPlot(dataset.P1.ndarray, polmod_all_2slopes(dataset.P1.ndarray, par_fit), interval=0)
# convert t1 from mV to GHz
t1_hz = par_fit[0]*80*(cc[0,0]-cc[1,0])/4.2
#%% focus at transition
gates.P1.set(0)
gates.P3.set(-190)
#
gates.P1.set(17)
gates.P3.set(-211)
#%% single fast 2D scan with virtual plungers
# TODO(TF): think how to generalize the step axis for other instruments, also for 1D
import qtt.scans
from imp import reload
reload(qtt.scans)
from qtt.scans import scan2Dfast
#scangates = ['P2','P3']
scangates = ['P1','P2','P3']
gatevals = gates.allvalues()
gg = [gates.get(scangates[0]), gates.get(scangates[1])]
activegates = ['P1','P2','P3','D1','D2','LS','RS','SDP','SDR','SDL','T']
#activegates = scangates
if __name__ == '__main__' and 1:
stepgate = getattr(gates,scangates[0])
stepgateval = stepgate.get()
plot = MatPlot(interval=0)
delay = 0.1
scanjob = dict({'sweepdata': dict({'gate': scangates[1], 'start': gg[1] - 40, 'end': gg[1] + 40, 'step': 2.}), 'delay': delay})
scanjob['stepdata'] = dict({'gate': scangates[0], 'start': gg[0] + 80, 'end': gg[0] - 80, 'step': -2})
scanjob['sd'] = sd
scanjob['sweepdata']['period'] = .5e-3
# scanjob['gates_horz'] = {'P2':1, 'P3': 0}
# scanjob['gates_vert'] = {'P2':0, 'P3': 1}
scanjob['gates_horz'] = mu_R_inv
scanjob['gates_horz'] = {'P1': 0.0066602222076373452*2, 'P2': -0.40117006534666505*2, 'P3': 0.96032257332014703*2} # mu_R_inv * const
# scanjob['gates_vert'] = mu_M_inv
# scanjob['gates_horz'] = Dot_epsilon
# scanjob['gates_vert'] = Dot_mu
# scanjob['gates_horz'] = {'P1':1, 'P2': -0.8, 'P3': -0.2}
# scanjob['gates_vert'] = {'P1':0, 'P2': 1}
# scanjob['gates_horz'] = {'P2':1, 'P3': 0}
# scanjob['gates_vert'] = {'P2':0, 'P1': 1}
# scanjob['gates_horz'] = {'P1':1, 'P3': -1.25}
# scanjob['gates_vert'] = {'P1':1, 'P2':0.6, 'P3':1,'SDP': -.5}
# scanjob['gates_vert'] = {'P1':1, 'P2':0.5, 'P3':1}
# scanjob['gates_horz'] = {'P1': 1, 'P3': -1}
# scanjob['gates_vert'] = {'P1': .5, 'P2': .5, 'P3': .5}
scanjob['gates_vert'] = Dot_t_R
scanjob['fpga_samp_freq'] = fpga.get_sampling_frequency()
diff_dir = 'xy'
RF.on()
alldata = scan2Dfast(station, scanjob, liveplotwindow=plot, diff_dir=diff_dir, wait_time=None, background=False)
plot.fig.axes[0].autoscale(tight=True)
plot.fig.axes[1].autoscale(tight=True)
gates.resetgates(activegates,gatevals)
RF.off()
if 0:
diff_dir = 'x'
imx = qtt.diffImageSmooth(alldata.measured.ndarray, dy=diff_dir)
data_arr = qcodes.DataArray(name='diff', label='diff', array_id='diff', set_arrays=alldata.measured.set_arrays, preset_data=imx)
alldata.add_array(data_arr)
plot_2 = plot # reserving this plot for later analysis
#%% ADDED(TF): multiple fast-2D scans with single virtual plunger sweep, mainly for capacitance measurements
import qtt.scans
from imp import reload
reload(qtt.scans)
from qtt.scans import scan2Dfast
from qtt.tools import mouseClick
## set gate voltages to the center of the relevant charging line before running
#dotgate = 'P3' # gate corresponding to the relevant dot (fast gate)
#stepgates = ['P1','P2','D1','D2','LS','RS','SDP','SDL','SDR','T'] # all the gates for measuring cross-capacitances
#cc_init = [0.198, 0.507, 0.395, 1.184, 0.084, 0.941, 0.074, 0.064, 0.066, 3.632] # initial guess of cross capacitances
#dotgate = 'P2' # gate corresponding to the relevant dot (fast gate)
#stepgates = ['P1','P3','D1','D2','LS','RS','SDP','SDL','SDR','T'] # all the gates for measuring cross-capacitances
#cc_init = [0.537, 0.386, 1.206, 0.855, 0.318, 0.251, 0.084, 0.088, 0.066, 4.022] # initial guess of cross capacitances
dotgate = 'P1' # gate corresponding to the relevant dot (fast gate)
stepgates = ['P2','P3','D1','D2','LS','RS','SDP','SDL','SDR','T'] # all the gates for measuring cross-capacitances
cc_init = [0.454, 0.190, 1.535, 0.460, 1.020, 0.167, 0.079, 0.065, 0.067, 3.186] # initial guess of cross capacitances
try:
len(stepgates) == len(cc_init)
except ValueError:
print("Oops! That was no valid VECTOR. Try again...")
RF.on()
for step_num, stepgate_name in enumerate(stepgates):
scangates = [stepgate_name,dotgate]
gatevals = gates.allvalues()
gg = [gates.get(scangates[0]), gates.get(scangates[1])]
activegates = scangates
if __name__ == '__main__' and 1:
stepgate = getattr(gates,scangates[0])
stepgateval = stepgate.get()
plot = MatPlot(interval=0)
delay = 0.1
step_width = 12 / 2
scanjob = dict({'sweepdata': dict({'gate': scangates[1], 'start': gg[1] - step_width*cc_init[step_num], 'end': gg[1] + step_width*cc_init[step_num], 'step': 1.}), 'delay': delay})
scanjob['stepdata'] = dict({'gate': scangates[0], 'start': gg[0] + step_width, 'end': gg[0] - step_width, 'step': -0.1})
scanjob['sd'] = sd
scanjob['sweepdata']['period'] = .5e-3
if dotgate == 'P2':
scanjob['gates_horz'] = {dotgate:1, 'P1': 0} # second one is a dummy gate
else:
scanjob['gates_horz'] = {dotgate:1, 'P2': 0} # second one is a dummy gate
scanjob['gates_vert'] = {dotgate:0, stepgate_name: 1}
scanjob['fpga_samp_freq'] = fpga.get_sampling_frequency()
diff_dir = 'xy'
alldata = scan2Dfast(station, scanjob, liveplotwindow=plot, wait_time=None, background=False)
plot.fig.axes[0].autoscale(tight=True)
plot.fig.axes[1].autoscale(tight=True)
gates.resetgates(activegates,gatevals)
exec('plot_' + str(step_num) + '=plot') # reserving plot for later analysis
clicks = mouseClick(plot) # run mauseClick() class from the develp_automated_crosscapacitance.py
RF.off()
#%%
for g in scanjob['gates_vert']:
gates.get(g)
#%% do a single qcodes loop of the sensing dot plunger
if __name__=='__main__' and 1:
SDP_val = gates.SDP.get()
RF.on()
station.set_measurement(station.keithley1.amplitude)
loop_1d = qcodes.Loop(gates.SDP[-250:-300:1],delay=0.1)
dataset = loop_1d.run(background=False, data_manager=False)
qcodes.plots.qcmatplotlib.MatPlot(dataset.default_parameter_array(), interval=0)
gates.SDP.set(SDP_val)
#%% 2d scan
if __name__=='__main__' and 1:
# loop_2d = qcodes.Loop(gates.SDL[-300:0:5],delay=0.1).loop(gates.SDR[-10:0:5])
loop_2d = qcodes.Loop(gates.SDL[-300:0:5],delay=0.1).each(qcodes.Loop(gates.SDR[-300:0:5],delay=.1).each(station.keithley2.amplitude))
dataset_2d = loop_2d.run(background=False, data_manager=False)
qcodes.plots.qcmatplotlib.MatPlot(dataset_2d.default_parameter_array(), interval=0)
#%% single veryfast 2D scan
if __name__=='__main__':
fig = 111
sweepgates = ['P1','P3']
sweepranges = [80, 80]
resolution = [90,90]
Naverage = 1000
fpga_ch = 1
diff_dir = 'xy'
# diff_dir = None
waveform, sweep_info = station.awg.sweep_2D(station.fpga.get_sampling_frequency(), sweepgates, sweepranges, resolution)
waittime = resolution[0]*resolution[1]*Naverage/fpga.get_sampling_frequency()
ReadDevice = ['FPGA_ch%d' % fpga_ch]
_,DataRead_ch1,DataRead_ch2 = station.fpga.readFPGA(Naverage=Naverage, ReadDevice=ReadDevice, waittime=waittime)
station.awg.stop()
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_2D_process(dataread, waveform, diff_dir=diff_dir)
dataset, plot = makeDataset_sweep_2D(data, gates, sweepgates, sweepranges, fig=111)
titletxt = plot.title.get_text()
plot.title.set_text(titletxt + ', diff_dir: %s' % diff_dir)
name = '_'.join(sai.array_id for sai in dataset.measured.set_arrays)
dataset.location = dataset.location_provider(dataset.default_io, record={'name': name})
dataset.write()
#%%
station.awg.sweep_run(sweep_info)
station.awg.stop()
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_2D_process(dataread, waveform, diff_dir=diff_dir)
dataset, plot = makeDataset_sweep_2D(data, gates, sweepgates, sweepranges, fig=111)
titletxt = plot.title.get_text()
plot.title.set_text(titletxt + ', diff_dir: %s' % diff_dir)
name = '_'.join(sai.array_id for sai in dataset.measured.set_arrays)
dataset.location = dataset.location_provider(dataset.default_io, record={'name': name})
dataset.write()
#%% single veryfast 2D scan of virtual gates
if __name__=='__main__':
fig = 203
# gates_horz = {'P1': 1, 'P3': -.0}
# gates_vert = {'P3': 1, 'P1': -.0}
# gates_horz = {'P1': 1, 'P3': -1}
# gates_vert = {'P1': -.5, 'P2': 1, 'P3': -.5}
# gates_horz = {'P1': 1, 'P3': -1}
# gates_vert = {'P1': .5, 'P2': .5, 'P3': .5}
# gates_horz = mu_M_inv
# gates_vert = mu_R_inv
gates_horz = Dot_epsilon
gates_vert = Dot_delta
gates_vert = Dot_mu
sweepranges = [90, 150]
sweepgates = ['P1', 'P2']
resolution = [90,90]
Naverage = 1000
fpga_ch = 1
diff_dir = 'xy'
# diff_dir = None
waveform, sweep_info = station.awg.sweep_2D_virt(station.fpga.get_sampling_frequency(), gates_horz, gates_vert, sweepranges, resolution)
waittime = resolution[0]*resolution[1]*Naverage/fpga.get_sampling_frequency()
ReadDevice = ['FPGA_ch%d' % fpga_ch]
_,DataRead_ch1,DataRead_ch2 = station.fpga.readFPGA(Naverage=Naverage, ReadDevice=ReadDevice, waittime=waittime)
station.awg.stop()
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_2D_process(dataread, waveform, diff_dir=None)
dataset, plot = makeDataset_sweep_2D(data, gates, sweepgates, sweepranges, fig=fig)
titletxt = plot.title.get_text()
plot.title.set_text(titletxt + ', diff_dir: %s' % diff_dir)
plot.fig.axes[0].autoscale(tight=True)
plot.fig.axes[1].autoscale(tight=True)
fig = 204
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_2D_process(dataread, waveform, diff_dir=diff_dir)
dataset, plot = makeDataset_sweep_2D(data, gates, sweepgates, sweepranges, fig=fig)
titletxt = plot.title.get_text()
plot.title.set_text(titletxt + ', diff_dir: %s' % diff_dir)
plot.fig.axes[0].autoscale(tight=True)
plot.fig.axes[1].autoscale(tight=True)
name = '_'.join(sai.array_id for sai in dataset.measured.set_arrays)
dataset.location = dataset.location_provider(dataset.default_io, record={'name': name})
dataset.write()
#%% videomode tuning
if __name__=='__main__':
sweepgates = ['P1','P3']
sweepranges = [100,100]
resolution = [90,90]
Naverage = 25
fpga_ch = 1
diff_dir = 'xy'
waveform, sweep_info = station.awg.sweep_2D(station.fpga.get_sampling_frequency(), sweepgates, sweepranges, resolution)
lp = livePlot(gates, sweepgates, sweepranges)
lp.datafunction = fpgaCallback_2d(station, waveform, Naverage, fpga_ch, resolution, diff_dir)
lp.startreadout(rate=10)
#%% videomode tuning with virtual gates
if __name__=='__main__':
sweepgates = ['P1','P3']
# gates_horz = {'P1':1, 'P3': -1.25}
# gates_vert = {'P1':1, 'P2':.5, 'P3':0.8}
## gates_horz = {'P1': 1, 'P3': -.2}
## gates_vert = {'P3': 1, 'P1': -.2}
# gates_horz = {'P1': 1, 'P3': -1.25}
# gates_vert = {'P1': -.5, 'P2': 1, 'P3': -.4}
# gates_horz = {'P1': 1, 'P2': 0}
# gates_vert = {'P1': 0, 'P3': 1}
## gates_horz = mu_R_inv
gates_horz = mu_R_inv
gates_vert = Dot_LR
# gates_horz = Dot_epsilon
# gates_vert = Dot_mu
sweepranges = [90, 140]
sweepranges = [140, 90]
resolution = [90,90]
Naverage = 25
fpga_ch = 1
diff_dir = 'xy'
RF.on()
waveform, sweep_info = station.awg.sweep_2D_virt(station.fpga.get_sampling_frequency(), gates_horz, gates_vert, sweepranges, resolution)
lp = livePlot(gates, sweepgates, sweepranges)
lp.datafunction = fpgaCallback_2d(station, waveform, Naverage, fpga_ch, resolution, diff_dir)
lp.startreadout(rate=10)
#%% stop video mode
lp.stopreadout()
awg.stop()
RF.off()
#%% single 2D scan (2D scan in scans does not work)
# crashes still! (does it matter which direction we step?)
scangates = ['SDL','SDR']
gg = [0, 0]
#gg = [136, 100]
#gg = [gates.get(sweepgates[0]), gates.get(sweepgates[1])]
if __name__ == '__main__' and 1:
gate_horz = getattr(gates, scangates[0])
gate_vert = getattr(gates, scangates[1])
delay = .01
scanjob = dict({'sweepdata': dict({'gate': scangates[1], 'start': gg[1] - 500, 'end': gg[1] , 'step': 5.}), 'delay': delay})
scanjob['stepdata'] = dict({'gate': scangates[0], 'start': gg[0] - 500, 'end': gg[0], 'step': 5.})
# move/combine this code to scan2D in qtt.scans
station.set_measurement(station.keithley2.amplitude)
loop2D = qcodes.Loop(gate_horz[scanjob['sweepdata']['start']:scanjob['sweepdata']['end']:scanjob['sweepdata']['step']], delay=delay)
loop2D_full = loop2D.loop(gate_vert[scanjob['stepdata']['start']:scanjob['stepdata']['end']:scanjob['stepdata']['step']], delay=delay)
alldata = loop2D_full.run(background=False, data_manager=False)
gates.set(scangates[0], gg[0])
gates.set(scangates[1], gg[1])
#%% scan all pairs of plungers for dots below
plungers = ['P1','P2','P3']
fpga.set_sampling_frequency(200000)
pairs = []
for i in plungers:
for j in plungers:
if i != j:
pairs += [[i,j]]
if __name__=='__main__':
plungers = ['P1','P2','P3']
fignum = 500
for i in range(0,len(plungers)*2):
fig = fignum + i
sweepgates = pairs[i]
sweepranges = [80, 80]
resolution = [90,90]
Naverage = 1000
fpga_ch = 1
diff_dir = 'xy'
waveform, sweep_info = station.awg.sweep_2D(station.fpga.get_sampling_frequency(), sweepgates, sweepranges, resolution)
waittime = resolution[0]*resolution[1]*Naverage/fpga.get_sampling_frequency()
ReadDevice = ['FPGA_ch%d' % fpga_ch]
_,DataRead_ch1,DataRead_ch2 = station.fpga.readFPGA(Naverage=Naverage, ReadDevice=ReadDevice, waittime=waittime)
station.awg.stop()
dataread = [DataRead_ch1,DataRead_ch2][fpga_ch-1]
data = station.awg.sweep_2D_process(dataread, waveform, diff_dir=diff_dir)
dataset, plot = makeDataset_sweep_2D(data, gates, sweepgates, sweepranges, fig=fig)
titletxt = plot.title.get_text()
plot.title.set_text(titletxt + ', diff_dir: %s' % diff_dir)
name = '_'.join(sai.array_id for sai in dataset.measured.set_arrays)
dataset.location = dataset.location_provider(dataset.default_io, record={'name': name})
dataset.write()
qtt.pmatlab.tilefigs([500,501,502,503,504,505],[3,2])
#%% Fit lines and calculate slopes
from qtt.deprecated.linetools import costFunctionLine
from scipy.optimize import minimize
import matplotlib.pyplot as plt
pp = ['P3', 'P1']
verbose = 0
cb = None
param0 = [-5,5,.5*np.pi] # x,y,theta,
px = [dataset.measured.ndarray.shape[0]//2,dataset.measured.ndarray.shape[1]//2]
imx = dataset.measured.ndarray
istep = .5
cgate = pp[0]
igate = pp[1]
costfun = lambda x : costFunctionLine(x, -imx, verbose=0, istep=istep, px=px, dthr=1, dwidth=2)
res = minimize(costfun, param0, method='powell', options={'maxiter': 3000, 'maxfev': 101400, 'xtol': 1e-8, 'disp': verbose>=2}, callback=cb)
c = costFunctionLine(res.x, imx, istep, verbose=1, fig=fig, px=px); plt.figure(fig); plt.xlabel(cgate); plt.ylabel(igate); plt.close(fig+1)
#%% parameterviewer
from qtt.parameterviewer import createParameterWidgetRemote
from qtt.parameterviewer import createParameterWidget
if __name__=='__main__' and not remote:
p = createParameterWidget([gates,])
if __name__=='__main__' and remote:
p=createParameterWidgetRemote([gates,])
#%% load data and plot results
if __name__=='__main__' and 1:
olddatadir = r'K:\ns\qt\spin-qubits\data\b057_data\2016 3dot experiment\data\2016-11-11\18-20-44_P2_P3'
dataset_old = load_data(location=olddatadir)
# qcodes.plots.pyqtgraph.QtPlot(dataset_old.measured, interval=0)
plotje = qcodes.plots.qcmatplotlib.MatPlot(dataset_old.measured, interval=0)
#%% delta
x = 2
gates.P1.set(gates.P1.get()+Dot_delta['P1']*x)
gates.P2.set(gates.P2.get()+Dot_delta['P2']*x)
gates.P3.set(gates.P3.get()+Dot_delta['P3']*x)
#%% epsilon
x = 2
gates.P1.set(gates.P1.get()+Dot_epsilon['P1']*x)
gates.P2.set(gates.P2.get()+Dot_epsilon['P2']*x)
gates.P3.set(gates.P3.get()+Dot_epsilon['P3']*x)
#%% mu
x = 4
gates.P1.set(gates.P1.get()+Dot_mu['P1']*x)
gates.P2.set(gates.P2.get()+Dot_mu['P2']*x)
gates.P3.set(gates.P3.get()+Dot_mu['P3']*x)
gates.SDP.set(gates.SDP.get() - 0.15*x)
#%% tuning t1
x = 40
gates.D1.set(gates.D1.get()+Dot_t1['D1']*x)
gates.P1.set(gates.P1.get()+Dot_t1['P1']*x)
gates.P2.set(gates.P2.get()+Dot_t1['P2']*x)
gates.P3.set(gates.P3.get()+Dot_t1['P3']*x)
gates.allvalues()
#%% tuning t2
x = 10
gates.D2.set(gates.D2.get()+Dot_t2['D2']*x)
gates.P1.set(gates.P1.get()+Dot_t2['P1']*x)
gates.P2.set(gates.P2.get()+Dot_t2['P2']*x)
gates.P3.set(gates.P3.get()+Dot_t2['P3']*x)
gates.allvalues()
#%% tuning t_L
x = -20
gates.LS.set(gates.LS.get()+Dot_t_L['LS']*x)
gates.P1.set(gates.P1.get()+Dot_t_L['P1']*x)
gates.P2.set(gates.P2.get()+Dot_t_L['P2']*x)
gates.P3.set(gates.P3.get()+Dot_t_L['P3']*x)
gates.allvalues()
#%% tuning t_R
x = -5
gates.RS.set(gates.RS.get()+Dot_t_R['RS']*x)
gates.P1.set(gates.P1.get()+Dot_t_R['P1']*x)
gates.P2.set(gates.P2.get()+Dot_t_R['P2']*x)
gates.P3.set(gates.P3.get()+Dot_t_R['P3']*x)
gates.allvalues()
#%% L
x = -5
gates.P1.set(gates.P1.get()+mu_L_inv['P1']*x)
gates.P2.set(gates.P2.get()+mu_L_inv['P2']*x)
gates.P3.set(gates.P3.get()+mu_L_inv['P3']*x)
#%% M
x = -10
gates.P1.set(gates.P1.get()+mu_M_inv['P1']*x)
gates.P2.set(gates.P2.get()+mu_M_inv['P2']*x)
gates.P3.set(gates.P3.get()+mu_M_inv['P3']*x)
#%% R
x = -5
gates.P1.set(gates.P1.get()+mu_R_inv['P1']*x)
gates.P2.set(gates.P2.get()+mu_R_inv['P2']*x)
gates.P3.set(gates.P3.get()+mu_R_inv['P3']*x)
#%% sensing dot
x = 2
gates.SDP.set(gates.SDP.get() + x)
#%% LS,RS?
x = 5
gates.P1.set(gates.P1.get()-x)
gates.P2.set(gates.P2.get()+.6*x)
gates.P3.set(gates.P3.get()-x)
gates.LS.set(gates.LS.get()+x)
gates.RS.set(gates.RS.get()+x)
#%%
x = 5
gates.P1.set(gates.P1.get()+x*1)
gates.P2.set(gates.P2.get()+x*.5)
gates.P3.set(gates.P3.get()+x*.8)
#%%
x = 10
gates.T.set(gates.T.get()+x)
gates.P1.set(gates.P1.get()-x)
gates.P2.set(gates.P2.get()-x)
gates.P3.set(gates.P3.get()-x)
gates.D1.set(gates.D1.get()-x/2)
gates.D2.set(gates.D2.get()-x/2)
gates.LS.set(gates.LS.get()-x/2)
gates.RS.set(gates.RS.get()-x/2)
gates.SDR.set(gates.SDR.get()-x)
gates.SDP.set(gates.SDP.get()-x)
gates.SDL.set(gates.SDL.get()-x)
#%%
x = -10
gates.D2.set(gates.D2.get()+x)
gates.P2.set(gates.P2.get()-x/2)
gates.P3.set(gates.P3.get()-x/2)
#%%
# pid 5604: WARNING helpers.py:204 - negative delay -0.054258 sec
#%% FPGA marker error?
#Sending the waveform sweep_P1
#Sending the waveform sweep_P3
#scan2Dfast: 0/50: setting P3 to -189.059
#Traceback (most recent call last):
#
# File "<ipython-input-6-b0f69151b5a6>", line 40, in <module>
# alldata = scan2Dfast(station, scanjob, liveplotwindow=plot, diff_dir=diff_dir, wait_time=None, background=False)
#
# File "D:\Users\diepencjv\qtt\qtt\scans.py", line 492, in scan2Dfast
# alldata.measured.ndarray[ix] = readfunc(waveform, Naverage)
#
#ValueError: could not broadcast input array from shape (462) into shape (16)
#%% voltages to reset to
basevalues = {'D1': 0.030518043793335892,
'D2': 0.030518043793335892,
'LS': 0.030518043793335892,
'LS_fine': 0.030518043793335892,
'P1': 0.030518043793335892,
'P1_fine': 0.21362630655380599,
'P2': 0.030518043793335892,
'P2_fine': 1.0070954451819034,
'P3': 0.030518043793335892,
'P3_fine': 4.242008087281647,
'QPC': 0.030518043793335892,
'RS': 0.030518043793335892,
'RS_fine': 0.030518043793335892,
'SDL': -326.02426184481578,
'SDP': 0.030518043793335892,
'SDP_fine': 0.030518043793335892,
'SDR': -389.99008163576718,
'T': -95.796139467460307,
'bias_1': 0.030518043793335892,
'bias_2': -499.97711146715505,
'bias_3': 0.030518043793335892,
'bias_4': 0.030518043793335892}
#%% reset gates to basevalues
activegates = basevalues.keys()
gates.resetgates(activegates, basevalues)
|
import requests
import json
import numpy as np
import pandas as pd
urla = "https://data.smartdublin.ie/cgi-bin/rtpi/realtimebusinformation"
urlb = str(235)
urlc = "&format=json"
url = urla+urlb+urlc
response = requests.get(urla)
data = response.json()
print(data)
#create with json codedump from Dublin Bus API
filename = 'realtime.json'
f=open(filename,'w')
json.dump(data, f, indent=4)
#create a formatted pandas df and .csv file data from Dublin Bus API
busArrivals = []
for nextBus in data["results"]:
busArrivals.append(nextBus["route"])
busArrivals.append(nextBus["destination"])
busArrivals.append(nextBus["duetime"])
print('RealTime Arrival Info for Bus Stop:', data["stopid"])
print(busArrivals)
if busArrivals == 0:
print('No Bus Routes Found... you will have to walk')
else:
a = np.asarray([ busArrivals ])
a = pd.DataFrame(a).T
b = pd.DataFrame(np.asarray(a.iloc[::3, :]))
c = np.asarray(a.iloc[1::3, :])
d = np.asarray(a.iloc[2::3, :])
b['2'] = c
b['3'] = d
b.columns=['Route','Destination','Duetime']
# b.to_csv("realtime.csv")
# b.to_csv("server/realtime.csv")
live = b.to_dict('index')
print(live)
|
# Copyright 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import torch, glob, os
from .sparseConvNetTensor import SparseConvNetTensor
from .metadata import Metadata
def toLongTensor(dimension, x):
if hasattr(x, 'type') and x.type() == 'torch.LongTensor':
return x
elif isinstance(x, (list, tuple)):
assert len(x) == dimension
return torch.LongTensor(x)
else:
return torch.LongTensor(dimension).fill_(x)
def optionalTensor(a, b):
return getattr(a, b) if hasattr(a, b) else torch.Tensor()
def optionalTensorReturn(a):
return a if a.numel() else None
def threadDatasetIterator(d):
try:
import queue
except BaseException:
import Queue as queue
import threading
def iterator():
def worker(i):
for k in range(i, len(d), 8):
q.put(d[k])
q = queue.Queue(16)
for i in range(8):
t = threading.Thread(target=worker, args=(i,))
t.start()
for _ in range(len(d)):
item = q.get()
yield item
q.task_done()
q.join()
return iterator
def concatenate_feature_planes(input):
output = SparseConvNetTensor()
output.metadata = input[0].metadata
output.spatial_size = input[0].spatial_size
output.features = torch.cat([i.features for i in input], 1)
return output
def add_feature_planes(input):
output = SparseConvNetTensor()
output.metadata = input[0].metadata
output.spatial_size = input[0].spatial_size
output.features = sum([i.features for i in input])
return output
def append_tensors(tensors):
spatial_size=tensors[0].spatial_size
dimension=len(spatial_size)
x=SparseConvNetTensor(
features=torch.cat([t.features for t in tensors],0),
metadata=Metadata(dimension),
spatial_size=spatial_size)
for t in tensors:
x.metadata.appendMetadata(t.metadata,spatial_size)
return x
class AddCoords(torch.nn.Module):
def forward(self, input):
output = SparseConvNetTensor()
if input.features.numel():
with torch.no_grad():
coords = input.get_spatial_locations()
d = (input.spatial_size.type_as(input.features)-1)/2
coords=coords[:,:-1].type_as(input.features)/ d[None,:] - 1
output.features = torch.cat([input.features,coords],1)
else:
output.features = input.features
output.metadata = input.metadata
output.spatial_size = input.spatial_size
return output
def compare_sparse(x, y):
cL,cR,L,R = x.metadata.compareSparseHelper(y.metadata, x.spatial_size)
if x.features.is_cuda:
cL=cL.cuda()
cR=cR.cuda()
L=L.cuda()
R=R.cuda()
e = 0
if cL.numel():
e += (x.features[cL]-y.features[cR]).pow(2).sum()
if L.numel():
e += x.features[L].pow(2).sum()
if R.numel():
e += y.features[R].pow(2).sum()
return e / (cL.numel() + L.numel() + R.numel())
def spectral_norm_svd(module):
w=module.weight
if w.ndimension()==3:
w=w.view(-1,w.size(2))
_,s,_=torch.svd(w)
return s[0]
def pad_with_batch_idx(x,idx): #add a batch index to the list of coordinates
return torch.cat([x,torch.LongTensor(x.size(0),1).fill_(idx)],1)
def batch_location_tensors(location_tensors):
a=[]
for batch_idx, lt in enumerate(location_tensors):
if lt.numel():
a.append(pad_with_batch_idx(lt,batch_idx))
return torch.cat(a,0)
def checkpoint_restore(model,exp_name,name2,use_cuda=True,epoch=0):
if use_cuda:
model.cpu()
if epoch>0:
f=exp_name+'-%09d-'%epoch+name2+'.pth'
assert os.path.isfile(f)
print('Restore from ' + f)
model.load_state_dict(torch.load(f))
else:
f=sorted(glob.glob(exp_name+'-*-'+name2+'.pth'))
if len(f)>0:
f=f[-1]
print('Restore from ' + f)
model.load_state_dict(torch.load(f))
epoch=int(f[len(exp_name)+1:-len(name2)-5])
if use_cuda:
model.cuda()
return epoch+1
def is_power2(num):
return num != 0 and ((num & (num - 1)) == 0)
def checkpoint_save(model,exp_name,name2,epoch, use_cuda=True):
f=exp_name+'-%09d-'%epoch+name2+'.pth'
model.cpu()
torch.save(model.state_dict(),f)
if use_cuda:
model.cuda()
#remove previous checkpoints unless they are a power of 2 to save disk space
epoch=epoch-1
f=exp_name+'-%09d-'%epoch+name2+'.pth'
if os.path.isfile(f):
if not is_power2(epoch):
os.remove(f)
|
import numpy as np
import copy
import multiprocessing as mpc
import pandas as pd
import functools as fct
import os
from .inference_utils import init_search_directory, dset_list_logl
from .inference_utils import save_search_initial_setup
from .inference_utils import generate_variated_par
from .inference_utils import mc_accept, mc_switch
class parallel_tempering:
'''
Class that implements the likelihood maximization inference procedure.
It displays three main methods: the class initializer and two search method.
The initializer methods creates an empty folder in the specified location,
in which it saves the search parameters (search_setup.txt), the
experimental datasets (dataset_list.pkl) and the initial value of the
model parameters (par_i.pkl).
The only difference between the two search methods is that the
search_parallel method is parallelized with the use of the multiprocessing
library. Both methods implement the parallel termpering technique to find
the maximum-likelihood value of the model parameters.
During the search results are progressively saved in a csv file, whose name
ends with 'search_history.csv'. This file stores all the updates of all
the parameters for every layer. Next to each parameter set also other
values are saved, namely:
- logl: the log-likelihood of the parameter set
- layer: the layer to whom the parameter set belongs
- temp: the temperature associated to the layer
- switch: wether the parameter set was switched with another layer in this
round
- round: the round at which the parameter set was saved.
- traj_id: the trajectory to whom the parameter set belongs. Trajectory ids
are switched together with the parameter sets.
'''
def __init__(self, dset_list, par_i, n_layers, T_max, pars_to_mutate,
save_folder, beta_list=None, mut_strength_list=None,
mut_single_list=None, save_every=100):
'''
Class initializer for parallel_tempering class. This function
initializes the search parameters, creates the folder in which to save
data, and saves the initial state of the parameters, the experimental
dataset and the initial value of the parameters.
Args:
- dset_list (list of dataset objects): list of dataset objects
containing all the experimental measurements and schemes of which
the likelihood must be maximized.
- par_i (model parameters dictionary): initial value of the model
parameters in the maximization algorithm.
- n_layers (int): number of parallel tempering layers.
- T_max (int): total number of iterations of the likelihood
maximization procedure.
- pars_to_mutate (list of str): list containing the parameters whose
maximum-likelihood value must be found.
- save_folder (str): name of the folder in which details and results of
the search will be saved. As a precaution the folder must be either
non-existent (in which case it will be created) or empty.
- beta_list (optional, list of float): list of inverse temperatures per
layer. If specified it must have the same dimensions of the number
of layers. If not specified is initialized as log-spaced between
10^3 and 10^-3. Inverse temperatures should be in decreasing order.
- mut_strength_list (optional, list of float): values of the parameters
mutation strength, quantifying the variation magnitude of the
parameters for each layer. It should be a small number (0.1~0.01).
If specified it must have the same dimension of the number of
layers, otherwise it is initialized as log-spaced between 0.1 and
0.01. Mutation strength should be in increasing order.
- mut_single_list (otpional, list of bool): wether parameters variation
concerns all search parameters, or one at a time chosen randomly.
If specified it must have the same dimension as the number of
layers.
- save_every (optional, int): number of rounds between two successive
update of the save-file. Default value is 100.
'''
# if save folder does not exists create it
print('Initializing directory')
init_search_directory(save_folder)
# initialize search parameters
self.dsets = dset_list # list of datasets
self.n_layers = n_layers # number of parallel tempering layers
self.T_max = T_max # maximum number of search steps
self.pars_to_mutate = pars_to_mutate # list of parameters to vary
self.save_folder = save_folder # save directory
self.save_every = save_every # number of iteration between two saves
# list of layer temperatures.
if beta_list is None:
self.betas = np.logspace(-3, 3, n_layers)[::-1]
else:
self.betas = np.array(beta_list)
# mutation strength for each layer
if mut_strength_list is None:
self.mut_str = np.logspace(-2, -1, n_layers)
else:
self.mut_str = np.array(mut_strength_list)
# in which layer parameter variation concerns a single parameter at a time
if mut_single_list is None:
self.mut_sing = np.zeros(n_layers, dtype=np.bool)
self.mut_sing[:(n_layers // 2) + 1] = True
else:
self.mut_sing = np.array(mut_single_list)
# initialize layers and save initial setup
print('Initializing layers')
self.init_layers_and_save(par_i)
def search_parallel(self):
'''
Search function. This function launches the likelihood-maximization
algorithm. The algorithm is parallel and runs on a number of cores
equal (or minor if not enough cores are avalilable) to the number of
parallel tempering layers. Every 'save_every' number of rounds all the
parameters variation for all the layers are saved in a csv file
whose name ends in 'search_history.csv'.
'''
# initialize empty search history archive
self.hist_df = pd.DataFrame()
self.temp_hist = []
# save initial state for all layers
print('Saving initial state of all layers')
self.history_append_state(
t=0,
is_accepted=np.ones(self.n_layers, dtype=np.bool),
is_switched=np.zeros(self.n_layers, dtype=np.bool)
)
# define function to evaluate posterior log-likelihood of parameters
logl_funct = fct.partial(dset_list_logl, dset_list=self.dsets)
# spawn pool of workers for parallel evaluation
n_procs = np.min([self.n_layers, mpc.cpu_count()])
print(f'Generating a pool of {n_procs} workers')
with mpc.Pool(processes=n_procs) as pool:
# start maximization cycle:
for t in range(1, self.T_max + 1):
print(f'round {t} / {self.T_max}')
# produce random parameters
new_pars = self.vary_pars()
# in parallel evaluate logl of parameter sets for all layers
new_logls = pool.map(logl_funct, new_pars)
new_logls = np.array(new_logls)
# monte-carlo step to accept variated parameters
is_accepted = mc_accept(self.logls, new_logls, self.betas)
self.pars[is_accepted] = new_pars[is_accepted]
self.logls[is_accepted] = new_logls[is_accepted]
# parallel tempering step to switch layers
is_switched, order = mc_switch(self.logls, betas=self.betas)
# update the new order
self.logls = self.logls[order]
self.pars = self.pars[order]
self.traj_id = self.traj_id[order]
# save all parameter changes
self.history_append_state(t, is_accepted, is_switched)
# every one hundred iterations save search history
if t % self.save_every == 0:
print(f'Save search history at search round t = {t}')
self.save_search_history(t=t)
pool.close()
# save final version of the search history
self.save_search_history(t='final')
def search(self):
'''
Search function. This function launches the likelihood-maximization
algorithm. Every 'save_every' number of rounds all the
parameters variation for all the layers are saved in a csv file
whose name ends in 'search_history.csv'.
'''
# initialize empty search history archive
self.hist_df = pd.DataFrame()
self.temp_hist = []
# save initial state for all layers
print('Saving initial state of all layers')
self.history_append_state(
t=0,
is_accepted=np.ones(self.n_layers, dtype=np.bool),
is_switched=np.zeros(self.n_layers, dtype=np.bool)
)
# define function to evaluate posterior log-likelihood of parameters
logl_funct = fct.partial(dset_list_logl, dset_list=self.dsets)
# start maximization cycle:
for t in range(1, self.T_max + 1):
print(f'round {t} / {self.T_max}')
# produce random parameters
new_pars = self.vary_pars()
# evaluate logl of parameter sets for all layers
new_logls = [logl_funct(par_set) for par_set in new_pars]
new_logls = np.array(new_logls)
# monte-carlo step to accept variated parameters
is_accepted = mc_accept(self.logls, new_logls, self.betas)
self.pars[is_accepted] = new_pars[is_accepted]
self.logls[is_accepted] = new_logls[is_accepted]
# parallel tempering step to switch layers
is_switched, order = mc_switch(self.logls, betas=self.betas)
# update the new order
self.logls = self.logls[order]
self.pars = self.pars[order]
self.traj_id = self.traj_id[order]
# save all parameter changes
self.history_append_state(t, is_accepted, is_switched)
# every one hundred iterations save search history
if t % self.save_every == 0:
print(f'Save search history at search round t = {t}')
self.save_search_history(t=t)
# save final version of the search history
self.save_search_history(t='final')
def init_layers_and_save(self, par_i):
'''
Utility function used to initialize the layers and save the initial
state of the search. It takes as argument the initial value of the
model parameters.
'''
# create a list of parameter sets, all equal to the initial one
self.pars = [copy.deepcopy(par_i) for _ in range(self.n_layers)]
self.pars = np.array(self.pars)
# evaluate log-likelihood of the initial parameters set
logl_0 = dset_list_logl(par_i, self.dsets)
# initialize array of log-likelihoods
self.logls = np.ones(self.n_layers) * logl_0
# initialize id of parameter sets
self.traj_id = np.arange(self.n_layers)
# save search parameters, dataset and initial parameter choice
save_search_initial_setup(self)
def history_append_state(self, t, is_accepted, is_switched):
'''
Utility function to save the current round of the search. It takes as
argument the round number, and two boolean arrays. These array specify
wether the parameter set corresponding to every layer have been changed
or switched during the round. In any of these two cases the parameter
set is saved, together with some extra information related to the
search, in the temporary history list. This list will be regularly
emptied when parameters are saved into the '.csv' file
'''
# which parameters have changed since last round
is_changed = np.logical_or(is_accepted, is_switched)
# indices of parameters that have changed
idx_ch = np.argwhere(is_changed).flatten()
for idx in idx_ch:
# create a copy of the parameter and add additional entries
# related to the search state
par = copy.deepcopy(self.pars[idx])
par['logl'] = self.logls[idx]
par['layer'] = idx
par['temp'] = 1. / self.betas[idx]
par['switch'] = is_switched[idx]
par['round'] = t
par['traj_id'] = self.traj_id[idx]
# add the changes to the temporary history
self.temp_hist.append(par)
def save_search_history(self, t):
'''
Utility function to empty the temporary history list and save the
search results into the '.csv' file. It takes as argument the iteration
round t, which is used to add a signature to the save-file. If a prior
version of the save-file is present then it is removed.
'''
# append temporary history to full history database
self.hist_df = self.hist_df.append(self.temp_hist, ignore_index=True)
# empty temporary history
self.temp_hist = []
# find the old save file if present in the folder
files = os.listdir(self.save_folder)
files = [f for f in files if f.endswith('search_history.csv')]
old_filename = None
if len(files) > 0:
if len(files) == 1:
old_filename = files[0]
else:
print('WARNING: multiple search_history files in folder.\n' +
'as a precaution not erasing previous history when' +
' saving the new.')
# save current history
current_filename = f't_{t}_search_history.csv'
self.hist_df.to_csv(os.path.join(self.save_folder, current_filename))
# remove previous save file if present
if old_filename is not None:
print(f'Removing old save file: {old_filename}')
os.remove(os.path.join(self.save_folder, old_filename))
def vary_pars(self):
'''
Utility function that from the values of the parameter set in every
layer generates and returns a mutatated version of the parameters.
Mutations is introduced in the form of a small parameter variation,
whose intensity depends on the mutation strength of the layers, and
wether a single or multiple mutation is allowed.
'''
# returns a list of parameters generated from the previous ones with
# a small variation
new_pars = []
for n, par in enumerate(self.pars):
new_par = generate_variated_par(par,
keys_to_mut=self.pars_to_mutate,
mut_str=self.mut_str[n],
mut_sing=self.mut_sing[n])
new_pars.append(new_par)
return np.array(new_pars)
|
# -*- coding: utf-8 -*-
# @Time : 2022
# @Author : Yong Zheng
r"""
NeuCMFw0
################################################
References
-----
Yong Zheng, Gonzalo Florez Arias. "A Family of Neural Contextual Matrix Factorization Models for Context-Aware Recommendations", ACM UMAP, 2022
Notes
-----
1). NeuCMFw0 has 4 towers: MLP tower without contexts, MF tower with UI, MF with UC, MF with IC
2). w => we consider context situation as a whole/single dimension and create embedding for it, when we fuse them into the MF towers
"""
import torch
import torch.nn as nn
from torch.nn.init import normal_
from deepcarskit.model.context_recommender import ContextRecommender
from recbole.model.layers import MLPLayers
from recbole.utils import InputType, EvaluatorType
class NeuCMFw0(ContextRecommender):
input_type = InputType.POINTWISE
def __init__(self, config, dataset):
super(NeuCMFw0, self).__init__(config, dataset)
# load parameters info
self.mf_embedding_size = config['mf_embedding_size']
self.mlp_embedding_size = config['mlp_embedding_size']
self.mlp_hidden_size = config['mlp_hidden_size']
self.dropout_prob = config['dropout_prob']
self.mf_train = config['mf_train']
self.mlp_train = config['mlp_train']
self.use_pretrain = config['use_pretrain']
self.mf_pretrain_path = config['mf_pretrain_path']
self.mlp_pretrain_path = config['mlp_pretrain_path']
# define layers and loss
self.user_mf_embedding = nn.Embedding(self.n_users, self.mf_embedding_size)
self.item_mf_embedding = nn.Embedding(self.n_items, self.mf_embedding_size)
self.context_situation_mf_embedding = nn.Embedding(self.n_context_situation, self.mf_embedding_size)
self.user_mlp_embedding = nn.Embedding(self.n_users, self.mlp_embedding_size)
self.item_mlp_embedding = nn.Embedding(self.n_items, self.mlp_embedding_size)
self.context_situation_mlp_embedding = nn.Embedding(self.n_context_situation, self.mlp_embedding_size)
# mlp layers = user, item
self.mlp_layers = MLPLayers([2 * self.mlp_embedding_size] + self.mlp_hidden_size, self.dropout_prob)
self.mlp_layers.logger = None # remove logger to use torch.save()
if self.mf_train and self.mlp_train:
self.predict_layer = nn.Linear(3 * self.mf_embedding_size + self.mlp_hidden_size[-1], 1)
elif self.mf_train:
self.predict_layer = nn.Linear(3 * self.mf_embedding_size, 1)
elif self.mlp_train:
self.predict_layer = nn.Linear(self.mlp_hidden_size[-1], 1)
# parameters initialization
if self.use_pretrain:
self.load_pretrain()
else:
self.apply(self._init_weights)
def _init_weights(self, module):
if isinstance(module, nn.Embedding):
normal_(module.weight.data, mean=0.0, std=0.01)
def forward(self, user, item, context_situation):
user_mf_e = self.user_mf_embedding(user)
item_mf_e = self.item_mf_embedding(item)
context_situation_mf_e = self.context_situation_mf_embedding(context_situation)
user_mlp_e = self.user_mlp_embedding(user)
item_mlp_e = self.item_mlp_embedding(item)
if self.mf_train:
mf_ui_output = torch.mul(user_mf_e, item_mf_e) # [batch_size, embedding_size]
mf_uc_output = torch.mul(user_mf_e, context_situation_mf_e) # [batch_size, embedding_size]
mf_ic_output = torch.mul(item_mf_e, context_situation_mf_e) # [batch_size, embedding_size]
if self.mlp_train:
mlp_output = self.mlp_layers(torch.cat((user_mlp_e, item_mlp_e), -1)) # [batch_size, layers[-1]]
if self.mf_train and self.mlp_train:
output = self.actfun(self.predict_layer(torch.cat((mf_ui_output, mf_uc_output, mf_ic_output, mlp_output), -1)))
elif self.mf_train:
output = self.actfun(self.predict_layer(torch.cat((mf_ui_output, mf_uc_output, mf_ic_output), -1)))
elif self.mlp_train:
output = self.actfun(self.predict_layer(mlp_output))
else:
raise RuntimeError('mf_train and mlp_train can not be False at the same time')
return output.squeeze(-1)
def calculate_loss(self, interaction):
user = interaction[self.USER_ID]
item = interaction[self.ITEM_ID]
context_situation = interaction[self.CONTEXT_SITUATION_ID]
label = interaction[self.LABEL]
output = self.forward(user, item, context_situation)
return self.loss(output, label)
def predict(self, interaction):
user = interaction[self.USER_ID]
item = interaction[self.ITEM_ID]
context_situation = interaction[self.CONTEXT_SITUATION_ID]
return self.forward(user, item, context_situation)
def dump_parameters(self):
r"""A simple implementation of dumping model parameters for pretrain.
"""
if self.mf_train and not self.mlp_train:
save_path = self.mf_pretrain_path
torch.save(self, save_path)
elif self.mlp_train and not self.mf_train:
save_path = self.mlp_pretrain_path
torch.save(self, save_path)
|
import json
from datetime import datetime
from datetime import timedelta
#import fileinput
import os
import re
import io
class DataManipulation:
def manipulate_timestamp(self, file_path, sourcetype, source):
#print('Updating timestamps in attack_data before replaying')
if sourcetype == 'aws:cloudtrail':
self.manipulate_timestamp_cloudtrail(file_path)
if source == 'WinEventLog:System' or source == 'WinEventLog:Security':
self.manipulate_timestamp_windows_event_log_raw(file_path)
if source == 'exchange':
self.manipulate_timestamp_exchange_logs(file_path)
def manipulate_timestamp_exchange_logs(self, file_path):
path = os.path.join(os.path.dirname(__file__), '../' + file_path)
path = path.replace('modules/../','')
f = io.open(path, "r", encoding="utf-8")
first_line = f.readline()
d = json.loads(first_line)
latest_event = datetime.strptime(d["CreationTime"],"%Y-%m-%dT%H:%M:%S")
now = datetime.now()
now = now.strftime("%Y-%m-%dT%H:%M:%S")
now = datetime.strptime(now,"%Y-%m-%dT%H:%M:%S")
difference = now - latest_event
f.close()
#Mimic the behavior of fileinput but in a threadsafe way
#Rename the file, which fileinput does for inplace.
#Note that path will now be the new file
original_backup_file = f"{path}.bak"
os.rename(path, original_backup_file)
with open(original_backup_file, "r") as original_file:
with open(path, "w") as new_file:
for line in original_file:
d = json.loads(line)
original_time = datetime.strptime(d["CreationTime"],"%Y-%m-%dT%H:%M:%S")
new_time = (difference + original_time)
original_time = original_time.strftime("%Y-%m-%dT%H:%M:%S")
new_time = new_time.strftime("%Y-%m-%dT%H:%M:%S")
#There is no end character appended, no need for end=''
new_file.write(line.replace(original_time, new_time))
os.remove(original_backup_file)
def manipulate_timestamp_windows_event_log_raw(self, file_path):
path = os.path.join(os.path.dirname(__file__), '../' + file_path)
path = path.replace('modules/../','')
f = io.open(path, "r", encoding="utf-8")
self.now = datetime.now()
self.now = self.now.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
self.now = datetime.strptime(self.now,"%Y-%m-%dT%H:%M:%S.%fZ")
# read raw logs
regex = r'\d{2}/\d{2}/\d{4} \d{2}:\d{2}:\d{2} [AP]M'
data = f.read()
lst_matches = re.findall(regex, data)
if len(lst_matches) > 0:
latest_event = datetime.strptime(lst_matches[-1],"%m/%d/%Y %I:%M:%S %p")
self.difference = self.now - latest_event
f.close()
result = re.sub(regex, self.replacement_function, data)
with io.open(path, "w+", encoding='utf8') as f:
f.write(result)
else:
f.close()
return
def replacement_function(self, match):
try:
event_time = datetime.strptime(match.group(),"%m/%d/%Y %I:%M:%S %p")
new_time = self.difference + event_time
return new_time.strftime("%m/%d/%Y %I:%M:%S %p")
except Exception as e:
self.logger.error("Error in timestamp replacement occured: " + str(e))
return match.group()
def manipulate_timestamp_cloudtrail(self, file_path):
path = os.path.join(os.path.dirname(__file__), '../' + file_path)
path = path.replace('modules/../','')
f = io.open(path, "r", encoding="utf-8")
try:
first_line = f.readline()
d = json.loads(first_line)
latest_event = datetime.strptime(d["eventTime"],"%Y-%m-%dT%H:%M:%S.%fZ")
now = datetime.now()
now = now.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
now = datetime.strptime(now,"%Y-%m-%dT%H:%M:%S.%fZ")
except ValueError:
first_line = f.readline()
d = json.loads(first_line)
latest_event = datetime.strptime(d["eventTime"],"%Y-%m-%dT%H:%M:%SZ")
now = datetime.now()
now = now.strftime("%Y-%m-%dT%H:%M:%SZ")
now = datetime.strptime(now,"%Y-%m-%dT%H:%M:%SZ")
difference = now - latest_event
f.close()
#Mimic the behavior of fileinput but in a threadsafe way
#Rename the file, which fileinput does for inplace.
#Note that path will now be the new file
original_backup_file = f"{path}.bak"
os.rename(path, original_backup_file)
with open(original_backup_file, "r") as original_file:
with open(path, "w") as new_file:
for line in original_file:
try:
d = json.loads(line)
original_time = datetime.strptime(d["eventTime"],"%Y-%m-%dT%H:%M:%S.%fZ")
new_time = (difference + original_time)
original_time = original_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
new_time = new_time.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
new_file.write(line.replace(original_time, new_time))
except ValueError:
d = json.loads(line)
original_time = datetime.strptime(d["eventTime"],"%Y-%m-%dT%H:%M:%SZ")
new_time = (difference + original_time)
original_time = original_time.strftime("%Y-%m-%dT%H:%M:%SZ")
new_time = new_time.strftime("%Y-%m-%dT%H:%M:%SZ")
new_file.write(line.replace(original_time, new_time))
os.remove(original_backup_file)
|
import collections
class StreamHelper:
""" Staticly available class with a bunch of useful variables.
streamer: The name of the streamer in full lowercase
streamer_id: The Twitch user ID of the streamer (a string)
stream_id: The ID of the current stream. False if the stream is not live
"""
streamer = "Unknown"
streamer_id = "Unknown"
stream_manager = None
social_keys_unsorted = {
"twitter": {"format": "https://twitter.com/{}", "title": "Twitter"},
"github": {"format": "https://github.com/{}", "title": "Github"},
"youtube": {"format": "{}", "title": "YouTube"},
"instagram": {"format": "https://www.instagram.com/{}/", "title": "Instagram"},
"reddit": {"format": "https://www.reddit.com/r/{}/", "title": "Reddit"},
"steam": {"format": "{}", "title": "Steam"},
"facebook": {"format": "{}", "title": "Facebook"},
"discord": {"format": "https://discord.gg/{}", "title": "Discord"},
"star": {"format": "{}", "title": "Website"},
"patreon": {"format": "https://www.patreon.com/{}", "title": "Patreon"},
"snapchat": {"format": "https://snapchat.com/add/{}", "title": "Snapchat"},
}
social_keys = collections.OrderedDict(sorted(social_keys_unsorted.items(), key=lambda t: t[0]))
valid_social_keys = set(social_keys.keys())
@staticmethod
def init_stream_manager(stream_manager):
StreamHelper.stream_manager = stream_manager
@staticmethod
def init_streamer(streamer, streamer_id):
StreamHelper.streamer = streamer
StreamHelper.streamer_id = streamer_id
@staticmethod
def get_streamer():
return StreamHelper.streamer
@staticmethod
def get_streamer_id():
return StreamHelper.streamer_id
@staticmethod
def get_current_stream_id():
""" Gets the stream ID of the current stream.
Returns None if the stream manager has not been initialized.
Returns False if there is no stream online.
Returns the current streams ID (integer) otherwise.
"""
if StreamHelper.stream_manager is None:
# Stream manager not initialized, web interface?
return None
if StreamHelper.stream_manager.current_stream is None:
# Stream is offline
return False
return StreamHelper.stream_manager.current_stream.id
@staticmethod
def get_last_stream_id():
""" Gets the stream ID of the last stream.
Returns None if the stream manager has not been initialized.
Returns False if there is no stream online.
Returns the current streams ID (integer) otherwise.
"""
if StreamHelper.stream_manager is None:
# Stream manager not initialized, web interface?
return None
if StreamHelper.stream_manager.last_stream is None:
# Stream is offline
return False
return StreamHelper.stream_manager.last_stream.id
@staticmethod
def get_viewers():
""" Returns how many viewers are currently watching the stream.
Returns 0 if something fails
"""
if StreamHelper.stream_manager is None:
# Stream manager not initialized, web interface?
return 0
if StreamHelper.stream_manager.current_stream is None:
# Stream is offline
return 0
return StreamHelper.stream_manager.num_viewers
|
import datetime as DT
import numpy as NP
import matplotlib.pyplot as PLT
import matplotlib.colors as PLTC
import scipy.constants as FCNST
from astropy.io import fits
from astropy.io import ascii
from astropy.table import Table
import progressbar as PGB
import antenna_array as AA
import data_interface as DI
import geometry as GEOM
import sim_observe as SIM
import my_DSP_modules as DSP
from pycallgraph import PyCallGraph, Config, GlobbingFilter
from pycallgraph.output import GraphvizOutput
import ipdb as PDB
infile = '/data3/t_nithyanandan/project_MOFF/data/samples/lwa_data.CDF.fits'
du = DI.DataHandler(indata=infile)
max_n_timestamps = 4
config = Config(max_depth=5, groups=True)
graphviz = GraphvizOutput(output_file='/data3/t_nithyanandan/project_MOFF/data/samples/figures/profile_graph_{0:0d}_iterations.png'.format(max_n_timestamps))
config.trace_filter = GlobbingFilter(include=['antenna_array.*'])
# exclude=['progressbar.*', 'numpy.*', 'warnings.*', 'matplotlib.*', 'scipy.*', 'weakref.*', 'threading.*', 'six.*', 'Queue.*', 'wx.*', 'abc.*', 'posixpath.*', '_weakref*', 'astropy.*', 'linecache.*', 'multiprocessing.*', 'my_*', 'geometry.*'],
lat = du.latitude
f0 = du.center_freq
nts = du.nchan
nchan = nts * 2
fs = du.sample_rate
dt = 1/fs
freqs = du.freq
channel_width = du.freq_resolution
f_center = f0
bchan = 100
echan = 925
max_antenna_radius = 75.0 # in meters
# max_antenna_radius = 75.0 # in meters
antid = du.antid
antpos = du.antpos
n_antennas = du.n_antennas
timestamps = du.timestamps
n_timestamps = du.n_timestamps
npol = du.npol
ant_data = du.data
core_ind = NP.logical_and((NP.abs(antpos[:,0]) < max_antenna_radius), (NP.abs(antpos[:,1]) < max_antenna_radius))
# core_ind = NP.logical_and((NP.abs(antpos[:,0]) <= NP.max(NP.abs(antpos[:,0]))), (NP.abs(antpos[:,1]) < NP.max(NP.abs(antpos[:,1]))))
antid = antid[core_ind]
antpos = antpos[core_ind,:]
ant_info = NP.hstack((antid.reshape(-1,1), antpos))
n_antennas = ant_info.shape[0]
ant_data = ant_data[:,core_ind,:,:]
with PyCallGraph(output=graphviz, config=config):
ants = []
aar = AA.AntennaArray()
for i in xrange(n_antennas):
ant = AA.Antenna('{0:0d}'.format(int(ant_info[i,0])), lat, ant_info[i,1:], f0, nsamples=nts)
ant.f = ant.f0 + DSP.spectax(2*nts, dt, shift=True)
ants += [ant]
aar = aar + ant
aar.grid()
antpos_info = aar.antenna_positions(sort=True)
if max_n_timestamps is None:
max_n_timestamps = len(timestamps)
else:
max_n_timestamps = min(max_n_timestamps, len(timestamps))
timestamps = timestamps[:max_n_timestamps]
stand_cable_delays = NP.loadtxt('/data3/t_nithyanandan/project_MOFF/data/samples/cable_delays.txt', skiprows=1)
antennas = stand_cable_delays[:,0].astype(NP.int).astype(str)
cable_delays = stand_cable_delays[:,1]
for it in xrange(max_n_timestamps):
timestamp = timestamps[it]
update_info = {}
update_info['antennas'] = []
update_info['antenna_array'] = {}
update_info['antenna_array']['timestamp'] = timestamp
print 'Consolidating Antenna updates...'
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Antennas '.format(n_antennas), PGB.ETA()], maxval=n_antennas).start()
antnum = 0
for ia, label in enumerate(antid):
adict = {}
adict['label'] = label
adict['action'] = 'modify'
adict['timestamp'] = timestamp
adict['t'] = NP.arange(nts) * dt
adict['gridfunc_freq'] = 'scale'
adict['gridmethod'] = 'NN'
adict['distNN'] = 0.5 * FCNST.c / f0
adict['tol'] = 1.0e-6
adict['maxmatch'] = 1
adict['Et'] = {}
adict['flags'] = {}
adict['stack'] = True
adict['wtsinfo'] = {}
adict['delaydict'] = {}
for ip in range(npol):
adict['delaydict']['P{0}'.format(ip+1)] = {}
adict['delaydict']['P{0}'.format(ip+1)]['frequencies'] = freqs
adict['delaydict']['P{0}'.format(ip+1)]['delays'] = cable_delays[antennas == label]
adict['delaydict']['P{0}'.format(ip+1)]['fftshifted'] = True
adict['wtsinfo']['P{0}'.format(ip+1)] = [{'orientation':0.0, 'lookup':'/data3/t_nithyanandan/project_MOFF/simulated/LWA/data/lookup/E_illumination_isotropic_radiators_lookup_zenith.txt'}]
adict['Et']['P{0}'.format(ip+1)] = ant_data[it,ia,:,ip]
if NP.any(NP.isnan(adict['Et']['P{0}'.format(ip+1)])):
adict['flags']['P{0}'.format(ip+1)] = True
else:
adict['flags']['P{0}'.format(ip+1)] = False
update_info['antennas'] += [adict]
progress.update(antnum+1)
antnum += 1
progress.finish()
aar.update(update_info, parallel=True, verbose=True)
aar.grid_convolve(pol='P1', method='NN', distNN=0.5*FCNST.c/f0, tol=1.0e-6, maxmatch=1, identical_antennas=True, cal_loop=False, gridfunc_freq='scale', mapping='weighted', wts_change=False, parallel=True, pp_method='pool')
# fp1 = [ad['flags']['P1'] for ad in update_info['antennas']]
# p1f = [a.antpol.flag['P1'] for a in aar.antennas.itervalues()]
imgobj = AA.NewImage(antenna_array=aar, pol='P1')
imgobj.imagr(weighting='natural', pol='P1')
img = imgobj.img['P1']
# for chan in xrange(imgobj.holograph_P1.shape[2]):
# imval = NP.abs(imgobj.holograph_P1[imgobj.mf_P1.shape[0]/2,:,chan])**2 # a horizontal slice
# imval = imval[NP.logical_not(NP.isnan(imval))]
# immax2[it,chan,:] = NP.sort(imval)[-2:]
if it == 0:
avg_img = NP.copy(img)
else:
avg_img += NP.copy(img)
if NP.any(NP.isnan(avg_img)):
PDB.set_trace()
avg_img /= max_n_timestamps
beam = imgobj.beam['P1']
fig = PLT.figure()
ax = fig.add_subplot(111)
imgplot = ax.imshow(NP.mean(avg_img[:,:,bchan:echan+1], axis=2), aspect='equal', origin='lower', extent=(imgobj.gridl.min(), imgobj.gridl.max(), imgobj.gridm.min(), imgobj.gridm.max()))
ax.set_xlim(imgobj.gridl.min(), imgobj.gridl.max())
ax.set_ylim(imgobj.gridm.min(), imgobj.gridm.max())
PLT.savefig('/data3/t_nithyanandan/project_MOFF/data/samples/figures/MOFF_image_{0:0d}_iterations.png'.format(max_n_timestamps), bbox_inches=0)
fig = PLT.figure()
ax = fig.add_subplot(111)
imgplot = ax.imshow(NP.mean(beam[:,:,bchan:echan+1], axis=2), aspect='equal', origin='lower', extent=(imgobj.gridl.min(), imgobj.gridl.max(), imgobj.gridm.min(), imgobj.gridm.max()))
ax.set_xlim(imgobj.gridl.min(), imgobj.gridl.max())
ax.set_ylim(imgobj.gridm.min(), imgobj.gridm.max())
PLT.savefig('/data3/t_nithyanandan/project_MOFF/data/samples/figures/MOFF_psf_square_illumination.png'.format(max_n_timestamps), bbox_inches=0)
|
"""
Get the polyinterface objects we need. Currently Polyglot Cloud uses
a different Python module which doesn't have the new LOG_HANDLER functionality
"""
from udi_interface import Custom,Node,LOG_HANDLER,LOGGER
import logging
# My Template Node
from nodes import TemplateNode
# IF you want a different log format than the current default
LOG_HANDLER.set_log_format('%(asctime)s %(threadName)-10s %(name)-18s %(levelname)-8s %(module)s:%(funcName)s: %(message)s')
class TemplateController(Node):
"""
The Node class represents a node on the ISY. The first node started and that is
is used to for interaction with the node server is typically called a 'Controller'
node. If this node has the address 'controller', Polyglot will automatically populate
the 'ST' driver of this node with the node server's on-line/off-line status.
This node will also typically handle discovery & creation of other nodes and deal
with the user configurable options of the node server.
Class Variables:
self.name: String name of the node
self.address: String Address of Node, must be less than 14 characters (ISY limitation)
self.primary: String Address of Node's parent, must be less than 14 characters (ISY limitation)
self.poly: Interface class object. Provides access to the interface API.
Class Methods
query(): Queries and reports ALL drivers for ALL nodes to the ISY.
getDriver('ST'): gets the current value from Polyglot for driver 'ST' returns a STRING, cast as needed
setDriver('ST', value, report, force, uom): Updates the driver with the value (and possibly a new UOM)
reportDriver('ST', force): Send the driver value to the ISY, normally it will only send if the value has changed, force will always send
reportDrivers(): Send all driver values to the ISY
status()
delNode(): Delete the node from the ISY and Polyglot database
"""
def __init__(self, polyglot, primary, address, name):
"""
Optional.
Super runs all the parent class necessities. You do NOT have
to override the __init__ method, but if you do, you MUST call super.
"""
super(TemplateController, self).__init__(polyglot, primary, address, name)
self.poly = polyglot
self.name = 'Template Controller'
self.hb = 0
# Create data storage classes to hold specific data that we need
# to interact with.
self.Parameters = Custom(polyglot, 'customparams')
self.Notices = Custom(polyglot, 'notices')
self.TypedParameters = Custom(polyglot, 'customtypedparams')
self.poly.onConfig(self.configHandler) # register to get config data sent by Polyglot
self.poly.onCustomParams(self.parameterHandler) # register to get parameter info sent by Polyglot
self.poly.onCustomTypedParams(self.typedParameterHandler) # register to get typed parameter info sent by Polyglot
self.poly.onStart(address, self.start) # register a function to run when the node is added
self.poly.onPoll(self.poll) # register to get short and long poll events
# Tell the interface we exist.
self.poly.addNode(self)
def start(self):
"""
Optional.
Polyglot v3 Interface startup done. Here is where you start your integration.
This is called via the onStart callback configured above, once the node has
been added to the interface.
In this example we call various methods that deal with initializing the
node server. This is where you should start. No need to Super this method,
the parent version does nothing.
"""
self.check_params()
# Send the profile files to the ISY if neccessary. The profile version
# number will be checked and compared. If it has changed since the last
# start, the new files will be sent.
self.poly.updateProfile()
# Send the default custom parameters documentation file to Polyglot
# for display in the dashboard.
self.poly.setCustomParamsDoc()
self.heartbeat(0)
self.discover()
# Here you may want to send updated values to the ISY rather
# than wait for a poll interval. The user will get more
# immediate feedback that the node server is running
"""
Called via the onConfig event. When the interface receives a
configuration structure from Polyglot, it will send that config
to your node server via this callback.
The config structure does contain the list of nodes & last
driver values stored in the database. These can be accessed
here to update your node server with the previous state.
"""
def configHandler(self, config):
pass
"""
Called via the onCustomParams event. When the user enters or
updates Custom Parameters via the dashboard. The full list of
parameters will be sent to your node server via this callback.
Here we're loading them into our local storage so that we may
use them as needed.
New or changed parameters are marked so that you may trigger
other actions when the user changes or adds a parameter.
"""
def parameterHandler(self, params):
self.Parameters.load(params)
"""
Called via the onCustomParams event. When the user enters or
updates Custom Parameters via the dashboard. The full list of
parameters will be sent to your node server via this callback.
Here we're loading them into our local storage so that we may
use them as needed.
"""
def typedParameterHandler(self, params):
self.TypedParameters.load(params)
"""
Called via the onPoll event. The onPoll event is triggerd at
the intervals specified in the node server configuration. There
are two separate poll events, a long poll and a short poll. Which
one is indicated by the flag. flag==True indicates a long poll
event.
Use this if you want your node server to do something at fixed
intervals.
"""
def poll(self, flag):
if flag:
LOGGER.debug('longPoll (controller)')
self.heartbeat()
else:
LOGGER.debug('shortPoll (controller)')
def query(self,command=None):
"""
Optional.
By default a query to the control node reports the FULL driver set for ALL
nodes back to ISY. If you override this method you will need to Super or
issue a reportDrivers() to each node manually.
"""
self.check_params()
nodes = self.poly.getNodes()
for node in nodes:
nodes[node].reportDrivers()
def discover(self, *args, **kwargs):
"""
Example
Do discovery here. Does not have to be called discovery. Called from example
controller start method and from DISCOVER command recieved from ISY as an exmaple.
"""
self.poly.addNode(TemplateNode(self.poly, self.address, 'templateaddr', 'Template Node Name'))
def delete(self):
"""
Example
This is sent by Polyglot upon deletion of the NodeServer. If the process is
co-resident and controlled by Polyglot, it will be terminiated within 5 seconds
of receiving this message.
"""
LOGGER.info('Oh God I\'m being deleted. Nooooooooooooooooooooooooooooooooooooooooo.')
def stop(self):
LOGGER.debug('NodeServer stopped.')
def heartbeat(self,init=False):
LOGGER.debug('heartbeat: init={}'.format(init))
if init is not False:
self.hb = init
LOGGER.debug('heartbeat: hb={}'.format(self.hb))
if self.hb == 0:
self.reportCmd("DON",2)
self.hb = 1
else:
self.reportCmd("DOF",2)
self.hb = 0
def set_module_logs(self,level):
logging.getLogger('urllib3').setLevel(level)
def check_params(self):
"""
This is an example if using custom Params for user and password and an example with a Dictionary
"""
self.Notices.clear()
self.Notices['hello'] = 'Hey there, my IP is {}'.format(self.poly.network_interface['addr'])
self.Notices['hello2'] = 'Hello Friends!'
default_user = "YourUserName"
default_password = "YourPassword"
#self.user = self.getCustomParam('user')
self.user = self.Parameters.user
if self.user is None:
self.user = default_user
LOGGER.error('check_params: user not defined in customParams, please add it. Using {}'.format(self.user))
#self.addCustomParam({'user': self.user})
self.Parameters.user = self.user
#self.password = self.getCustomParam('password')
self.password = self.Parameters.password
if self.password is None:
self.password = default_password
LOGGER.error('check_params: password not defined in customParams, please add it. Using {}'.format(self.password))
#self.addCustomParam({'password': self.password})
self.Parameters.password = self.password
# Always overwrite this, it's just an example...
self.Parameters.type = "TheType"
self.Parameters.host = "host_or_IP"
self.Parameters.port = "port_number"
# Add a notice if they need to change the user/password from the default.
if self.user == default_user or self.password == default_password:
self.Notices['auth'] = 'Please set proper user and password in configuration page'
self.Notices['test'] = 'This is only a test'
# Typed Parameters allow for more complex parameter entries.
#self.poly.save_typed_params(
self.TypedParameters.load( [
{
'name': 'item',
'title': 'Item',
'desc': 'Description of Item',
'isList': False,
'params': [
{
'name': 'id',
'title': 'The Item ID',
'isRequired': True,
},
{
'name': 'title',
'title': 'The Item Title',
'defaultValue': 'The Default Title',
'isRequired': True,
},
{
'name': 'extra',
'title': 'The Item Extra Info',
'isRequired': False,
}
]
},
{
'name': 'itemlist',
'title': 'Item List',
'desc': 'Description of Item List',
'isList': True,
'params': [
{
'name': 'id',
'title': 'The Item ID',
'isRequired': True,
},
{
'name': 'title',
'title': 'The Item Title',
'defaultValue': 'The Default Title',
'isRequired': True,
},
{
'name': 'names',
'title': 'The Item Names',
'isRequired': False,
'isList': True,
'defaultValue': ['somename']
},
{
'name': 'extra',
'title': 'The Item Extra Info',
'isRequired': False,
'isList': True,
}
]
},
], True)
def remove_notice_test(self,command):
LOGGER.info('remove_notice_test: notices={}'.format(self.Notices))
# Remove all existing notices
self.Notices.delete('test')
#self.removeNotice('test')
def remove_notices_all(self,command):
LOGGER.info('remove_notices_all: notices={}'.format(self.Notices))
# Remove all existing notices
self.Notices.clear()
"""
Optional.
Since the controller is a node in ISY, it will actual show up as a node.
So it needs to know the drivers and what id it will use. The controller should
report the node server status and have any commands that are needed to control
operation of the node server.
Typically, node servers will use the 'ST' driver to report the node server status
and it a best pactice to do this unless you have a very good reason not to.
The id must match the nodeDef id="controller" in the nodedefs.xml
"""
id = 'controller'
commands = {
'QUERY': query,
'DISCOVER': discover,
'REMOVE_NOTICES_ALL': remove_notices_all,
'REMOVE_NOTICE_TEST': remove_notice_test,
}
drivers = [
{'driver': 'ST', 'value': 1, 'uom': 2},
]
|
# Modified by Yulun Nie 11/25/2021 for version 0.1
#makes straight line
import numpy as np
import cv2
def get_points(img):
# Set up points to return
data = {}
data['img'] = img.copy()
data['lines'] = []
# Set the callback function for any mouse event
# print(img)
cv2.imshow("Image", img)
cv2.setMouseCallback("Image", mouse_handler, data)
cv2.waitKey(0)
cv2.destroyAllWindows()
# aggregate lines data
ys = data['lines']
assert len(ys) == 2
_, width, _ = img.shape
line1 = ((0, ys[0]), (width, ys[0]))
line2 = ((0, ys[1]), (width, ys[1]))
# line = (start_point, end_point), point = (x, y)
return (line1, line2)
def mouse_handler(event, x, y, flags, data):
# if mouse left is pressed
if event == cv2.EVENT_LBUTTONDOWN:
img = data['img'].copy()
# modify lines list
data['lines'].append(y)
if len(data['lines']) > 2:
data['lines'].pop(0)
# display current lines
_, width, _ = img.shape
for i in data['lines']:
cv2.line(img, (0, i), (width, i), (0,0,255), 2)
cv2.imshow("Image", img)
# # Running the code
# img = cv2.imread('test.png', 1)
# # get lines y's
# line1, line2 = get_points(img)
# print(line1, line2)
# # draw lines
# final_img = cv2.line(img, line1[0], line1[1], (0,0,0), 1)
# final_img = cv2.line(final_img, line2[0], line2[1], (0,0,0), 1)
# cv2.imshow('Image', final_img)
# cv2.waitKey(0)
|
from os import environ, path
environ['PYART_QUIET'] = ''
conf_path = path.join(path.dirname(path.realpath(__file__)), 'pyart_config.py')
environ['PYART_CONFIG'] = conf_path
#conf = config.load_config(conf_path)
|
import argparse as argp
SAVE_LIST_FILENAME = 'Lottery649_History.bin'
SAVE_LIST_FILENAME_2 = 'SuperLotto638_History.bin'
LOTTERY_NUM = 7
LOTTERY_HEIGHT = 64
EMBEDDED_CH = 64
LOTTERY_1_MAX_NUM = 49
LOTTERY_2_MAX_NUM = 38
# 0 ~ 6
TRAIN_NUM_INDEX = 0
CHECKPOINT_FILENAME = 'checkpoint{0}.ckpt'.format(TRAIN_NUM_INDEX)
CHECKPOINT_FILENAME2 = 'checkpoint_2_{0}.ckpt'.format(TRAIN_NUM_INDEX)
def ARGS():
_parser_ = argp.ArgumentParser()
_parser_.add_argument('--ReFetchLog', dest="ReFetchLog", default=False, type=bool, help='Refetch Log from website')
return _parser_.parse_args()
|
def resolve():
'''
code here
'''
N, K = [int(item) for item in input().split()]
xs = [int(item) for item in input().split()]
min_lr = 10**9
min_rl = 10**9
if N != K:
for i in range(N-K+1):
min_lr = min(min_lr, abs(xs[i]) + abs(xs[i+K-1] - xs[i]))
min_rl = min(min_rl, abs(xs[i+K-1]) + abs(xs[i+K-1] - xs[i]))
print(min(min_lr, min_rl))
elif N ==1:
print(abs(xs[0]))
else:
print(min(abs(xs[0]) + abs(xs[N-1] - xs[0]), abs(xs[N-1]) + abs(xs[N-1] - xs[0])))
if __name__ == "__main__":
resolve()
|
from app.models import yandex_translate, converter, opensubtitles, registration, learning_mode
def register(email, username, hashed_password):
return registration.register_user(email, username, hashed_password)
class Controller(object):
def __init__(self, yandex_translate_api_key):
self._opensubtitles_mdl = opensubtitles.OpenSubtitlesModel()
self._yandextranslate_mdl = yandex_translate.YandexTranslateModel(yandex_translate_api_key)
def search_by_name_get_first_n(self, name, n=10):
return self._opensubtitles_mdl.search_by_name_get_first_n(name, n)
def get_learning_mode_data(self, idsubtitlefile, username):
path_to_subtitle_file = self._opensubtitles_mdl.download_by_file_id(idsubtitlefile)
converter_mdl = converter.Converter(path_to_subtitle_file)
words = converter_mdl.to_list()
translated_words = self._yandextranslate_mdl.translate_words_en_ru(words)
return learning_mode.build_learning_mode_data(words, translated_words, username)
|
import json
import os
import boto3
from slack import WebClient
from slack.errors import SlackApiError
from data_access.dataRepos import MafiaSerializer, GameStateRepo
from stateManagers.gameStateManager import Actions
from models.player import Roles
from models.gameState import States as GameStates
from util.env import getEnvVar
from util.constants import Header
from util.game_message_builder import (
get_state_change_message, get_blocks_for_message)
from util.messagetext import MessageText as txt
def getToken(id):
dynamodb = boto3.resource('dynamodb')
tokenStore = dynamodb.Table(getEnvVar('TOKEN_SOURCE'))
result = tokenStore.get_item(Key={'_id': id})
print(f'Getting token for {id}')
if 'Item' in result:
return result['Item']['token']
def processRecords(record_list):
serializer = MafiaSerializer()
for r in record_list:
try:
body = json.loads(r['body'])
state = serializer.DeserializeGame(body['state'])
token = getToken(state.id)
client = WebClient(token=token)
action = body['action']
sourcePlayer = body['source']
targetPlayer = body['target']
mainChannel = state.meta['channel_id']
message, header = get_state_change_message(
state, True, action, sourcePlayer, targetPlayer)
blocks = get_blocks_for_message(message, header)
client.chat_postMessage(channel=mainChannel, blocks=blocks)
if action == Actions.START_GAME:
# create a private channel for the mafia
mafiaMembers = ','.join(
[p.id for p in state.players if p.role == Roles.MAFIA])
mafiaChannelName = 'mafia-secrets'
response = client.conversations_list(types='private_channel')
mafiaChannels = [c for c in response['channels']
if c['name'] == mafiaChannelName]
if len(mafiaChannels) > 0:
print('Unarchiving mafia channel')
channelId = mafiaChannels[0]['id']
response = client.conversations_unarchive(
channel=channelId)
else:
print('Creating mafia channel')
response = client.conversations_create(
name=mafiaChannelName, is_private=True)
print(response)
channelId = response['channel']['id']
print(f'Inviting {mafiaMembers} to mafia channel')
client.conversations_invite(
channel=channelId, users=mafiaMembers)
message = txt.MAFIA_TEAM_INTRO
header = Header.MAFIA_ONLY
blocks = get_blocks_for_message(message, header)
client.chat_postMessage(channel=channelId, blocks=blocks)
# store the mafia channel
state.meta['mafia_channel'] = channelId
repo = GameStateRepo()
repo.UpdateGame(state)
elif state.state == GameStates.GAME_OVER:
# clean up the mafia channel and archive it
mafia_channel = state.meta['mafia_channel']
for player_id in [
p.id for p in state.players if p.role == Roles.MAFIA]:
print(f'kicking {player_id} from mafia channel')
client.conversations_kick(
channel=mafia_channel, user=player_id)
print(f'archiving channel {mafia_channel}')
client.conversations_archive(channel=mafia_channel)
except SlackApiError as e:
print(e)
def lambda_handler(event, context):
print(f"Received event:\n{json.dumps(event)}\nWith context:\n{context}")
processRecords(event['Records'])
response = {
'statusCode': 200,
'headers': {},
'body': {}
}
return response
|
class Flow:
def __init__(self, flow):
self.flow = flow
def isNextInFlow(self, next, currentName):
current = self._getCurrent(currentName)
if (current is None):
return False
return next in current['next']
def getRestOfFlow(self, currentName):
current = self._getCurrent(currentName)
if (current is None):
return []
flowcopy = self.flow[:]
flowcopy.remove(current)
return flowcopy
def _getCurrent(self, currentName):
for node in self.flow:
if node['source'] == currentName:
current = node
return current
return None
|
#-*- encoding:utf-8 -*-
import sys
import hashlib
from hashlib import sha1
import hmac
import base64
from socket import *
import json, time, threading
from websocket import create_connection
import websocket
from urllib import quote
import logging
reload(sys)
sys.setdefaultencoding("utf8")
logging.basicConfig()
base_url = "ws://rtasr.xfyun.cn/v1/ws"
app_id = "5be0fbcb"
api_key = "9502d486f048d76978099f0646fe4f52"
file_path = "./test_1.pcm"
file_path = '/Users/higgs/tmpxxx/718895a9-31b3-4e0d-85fb-cad14bbad0d6.wav'
file_path = 'all.wav'
# file_path = '/Users/higgs/beast/data/audio/兔司机与候选人首次沟通录音/0f73049d-c131-4b83-b7f0-c73a34eea77c.wav'
file_path = '1.wav'
# file_path = 'aa826fc3-36c0-4c11-bd62-395f767ace61.wav'
end_tag = "{\"end\": true}"
def parse_rt_json(s_json):
'''
:param s_json: string
:return:
'''
# res_dict = json.loads(s_json)
res_dict = s_json
action = res_dict["action"]
if action == 'result':
data = res_dict['data']
data_dict = json.loads(data)
rt_list = data_dict['cn']['st']['rt']
if int(data_dict['cn']['st']['type']) == 0:
for rt in rt_list:
ws_list = rt['ws']
sent = ''
for ws in ws_list:
sent += ws['cw'][0]['w']
print(sent)
class Client():
def __init__(self):
# 生成鉴权参数
ts = str(int (time.time()))
tmp = app_id + ts
hl = hashlib.md5()
hl.update(tmp.encode(encoding='utf-8'))
my_sign = hmac.new(api_key, hl.hexdigest(), sha1).digest()
signa = base64.b64encode(my_sign)
self.ws = create_connection(base_url + "?appid=" + app_id + "&ts=" + ts + "&signa=" + quote(signa))
self.trecv = threading.Thread(target=self.recv)
self.trecv.start()
def send(self, file_path):
file_object = open(file_path, 'rb')
try:
index = 1
while True:
chunk = file_object.read(1280)
if not chunk:
break
self.ws.send(chunk)
index += 1
time.sleep(0.04)
finally:
# print str(index) + ", read len:" + str(len(chunk)) + ", file tell:" + str(file_object.tell())
file_object.close()
self.ws.send(bytes(end_tag))
print "send end tag success"
def recv(self):
try:
while self.ws.connected:
result = str(self.ws.recv())
if len(result) == 0:
print "receive result end"
break
result_dict = json.loads(result)
# 解析结果
if result_dict["action"] == "started":
print "handshake success, result: " + result
if result_dict["action"] == "result":
parse_rt_json(result_dict)
print "rtasr result: " + result
if result_dict["action"] == "error":
print "rtasr error: " + result
self.ws.close()
return
except websocket.WebSocketConnectionClosedException:
print "receive result end"
def close(self):
self.ws.close()
print "connection closed"
if __name__ == '__main__':
client = Client()
client.send(file_path)
|
import unittest
import sys
from PyQt5.QtWidgets import QApplication
from PyQt5.QtTest import QTest
from PyQt5 import Qt
from PyQt5.QtCore import QSize
from unittest import TestCase
import main
app = QApplication(sys.argv)
class MemoryTest(unittest.TestCase):
"""Tests the memory scanner"""
def setUp(self):
"""Create The Gui"""
self.form = main.MainWindow()
self.form.log.setLevel("NOTSET")
self.results = self.form.specs.memory.test()
def test_total(self):
"""Tests that total memory is not None"""
self.assertNotEqual(self.results.total, None, "total should not be None")
def test_available(self):
"""Tests that available memory is not None"""
self.assertNotEqual(self.results.available, None, "available should not be None")
def test_used(self):
"""Tests that used memory is not None"""
self.assertNotEqual(self.results.used, None, "used should not be None")
def test_percentage(self):
"""Tests that percentage memory is not None"""
self.assertNotEqual(self.results.percentage, None, "percentage should not be None")
if __name__ == "__main__":
unittest.main()
|
from project.user import User
class Library:
def __init__(self):
self.user_records: 'user objects' = []
self.books_available: '{authors: [books]}' = {}
self.rented_books: '{usernames: {book names: days left}}' = {}
self.days_to_return_book: '{book_name: [days]}' = {}
self.user_ids: '{user_id: user}' = {}
def add_user(self, user):
if user in self.user_records:
return f'User with id = {user.user_id} already registered in the library!'
self.user_records.append(user)
self.user_ids[user.user_id] = user
def remove_user(self, user: 'User'):
if user not in self.user_records:
return 'We could not find such user to remove!'
self.user_ids.pop(user.user_id)
self.user_records.remove(user)
def change_username(self, user_id: int, new_username: str):
try:
user = self.user_ids.get(user_id)
if user.username == new_username:
return 'Please check again the provided username - it should be different than the username used so far!'
user.username = new_username
return f'Username successfully changed to: {user.username} for userid: {user_id}'
except AttributeError:
return f'There is no user with id = {user_id}!'
user = User(12, 'Peter')
library = Library()
library.add_user(user)
print(library.add_user(user))
library.remove_user(user)
print(library.remove_user(user))
library.add_user(user)
print(library.change_username(2, 'Igor'))
print(library.change_username(12, 'Peter'))
print(library.change_username(12, 'George'))
[print(f'{user_record.user_id}, {user_record.username}, {user_record.books}') for user_record in library.user_records]
library.books_available.update({'J.K.Rowling': ['The Chamber of Secrets',
'The Prisoner of Azkaban',
'The Goblet of Fire',
'The Order of the Phoenix',
'The Half-Blood Prince',
'The Deathly Hallows']})
user.get_book('J.K.Rowling', 'The Deathly Hallows', 17, library)
print(library.books_available)
print(library.rented_books)
print(user.books)
print(user.get_book('J.K.Rowling', 'The Deathly Hallows', 10, library))
print(user.return_book('J.K.Rowling', 'The Cursed Child', library))
user.return_book('J.K.Rowling', 'The Deathly Hallows', library)
print(library.books_available)
print(library.rented_books)
print(user.books)
|
from ..api import narrow_buttons, wide_buttons
from django import template
register = template.Library()
@register.simple_tag
def wide_social_buttons(request, title, url):
return wide_buttons(request, title, url)
@register.simple_tag
def narrow_social_buttons(request, title, url):
return narrow_buttons(request, title, url)
|
cat_to_num_cluster = {"ladies palazzo": {"1 per set": 4,
"1 / unit": 15,
"1 per packet": 2},
"hmi touch panel": {"1 unit": 11},
"steam iron": {"1 / unit": 4,
"1 pr set": 2},
"copper cables": {"1 unit": 9,
"1 / meter": 6,
"1 per bundle": 3,
"1 / unit": 10},
"wooden crates": {"1 / meter ** 3": 5,
"1 / kilogram": 2,
"1 / meter": 2,
"1 / meter ** 2": 5,
"1 / unit": 5
},
"filling machine": {
"1 / unit": 5,
},
"button hole machine": {
"1 / unit": 7,
"1 per set": 5,
},
"bottle filling machines": {
"1 per set": 6,
"1 / unit": 8,
},
"busbar systems": {
"1 / unit": 2,
"1 / meter": 1,
},
"coffee makers": {
"1 / unit": 6,
},
"toaster": {"1 / unit": 6},
"belt conveyors": {"1 / meter ** 2": 2,
"1 per set": 6,
"1 per running feet": 2,
"1 / meter": 6,
"1 / second / unit": 4,
"1 / unit": 10},
"vertical form fill seal machines": {
"1 / unit": 6
},
"food processor": {
"1 / unit": 3
},
"biometric devices": {
"1 / unit": 11,
"1 per one": 1
},
"pneumatic impact wrenches": {
"1 / unit": 8
},
"polyester thread": {
"1 / meter": 2,
"1 / kilogram": 6,
"1 per roll": 3,
"1 / unit": 8,
"1 per packet": 4
},
"backhoe loader": {
"1 per one": 2,
"1 / unit": 8
},
"twisted yarn": {
"1 / kilogram": 3
},
"powder filling machine": {
"1 per set": 4,
"1 / unit": 7
},
"colored contact lenses": {
"1 / unit": 2
},
"fleece fabrics": {
"1 / meter": 6,
"1 / kilogram": 12,
},
"pvc cables": {
"1 / meter": 6,
"1 per roll": 1,
"1 / unit": 6,
"1 / meter / second": 1
},
"solar fans": {
"1 / unit": 9
},
"knitted yarn": {
"1 / kilogram": 7
},
"spun yarn": {
"1 / kilogram": 4
},
"camera lenses": {
"1 / unit": 4
},
"air spray gun": {
"1 / unit": 6
},
"hydraulic bending machine": {
"1 / unit": 8
},
"granules packing machine": {
"1 / unit": 10,
"1 per set": 3
},
"solar pump controller": {
"1 / unit": 10
},
"wheel loader": {
"1 / unit": 10
}
}
|
# Generated by Django 2.1.3 on 2018-11-21 08:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Partner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('partner_code', models.CharField(help_text='Partner Short Code eg. UNDP', max_length=5, verbose_name='Partner Code')),
('partner_name', models.CharField(max_length=150, verbose_name='Partner Name')),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(default='default.png', upload_to='profile_pics')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
# Generated by Django 3.0.6 on 2020-05-20 16:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('entities', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='questionset',
name='was_send',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='channel',
name='receiver',
field=models.ManyToManyField(blank=True, to='entities.Receiver'),
),
]
|
import os
import sys
sys.path.append( './' )
os.environ["TOKENIZERS_PARALLELISM"] = "false"
import numpy as np
import argparse
import torch
import torch.nn as nn
from models.Transformers import PairSupConBert
from training import PairSupConTrainer
from dataloader.dataloader import pair_loader
from utils.utils import set_global_random_seed, setup_path
from utils.optimizer import get_optimizer, MODEL_CLASS, get_bert_config_tokenizer
import subprocess
def run(args):
args.resPath, args.tensorboard = setup_path(args)
set_global_random_seed(args.seed)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
device_id = torch.cuda.device_count()
print("\t {} GPUs available to use!".format(device_id))
# dataloader
train_loader = pair_loader(args)
config, tokenizer = get_bert_config_tokenizer(args.bert)
model = PairSupConBert.from_pretrained(MODEL_CLASS[args.bert])
optimizer = get_optimizer(model, args)
model = nn.DataParallel(model)
model.to(device)
# set up the trainer
trainer = PairSupConTrainer(model, tokenizer, optimizer, train_loader, args)
trainer.train()
return None
def get_args(argv):
parser = argparse.ArgumentParser()
parser.add_argument('--training_instance', type=str, default='local')
parser.add_argument('--gpuid', nargs="+", type=int, default=[0], help="The list of gpuid, ex:--gpuid 3 1. Negative value means cpu-only")
parser.add_argument('--seed', type=int, default=0, help="")
parser.add_argument('--resdir', type=str, default='./results')
parser.add_argument('--logging_step', type=int, default=250, help="")
parser.add_argument('--dev_set', default="None", help="use sts-b as dev set or not", choices=["None", "sts"])
parser.add_argument('--path_sts_data', type=str, default='', help="use sts-b as dev set")
parser.add_argument('--s3_ckptdir', type=str, default='', help="s3path for ckpts")
# Dataset
parser.add_argument('--datapath', type=str, default='../datasets/NLI/')
parser.add_argument('--dataname', type=str, default='nli_pairsupcon.csv', help="")
parser.add_argument('--num_classes', type=int, default=2)
parser.add_argument('--text', type=str, default='text')
parser.add_argument('--pairsimi', type=str, default='pairsimi')
# Training parameters
parser.add_argument('--max_length', type=int, default=32)
parser.add_argument('--batch_size', type=int, default=1024)
parser.add_argument('--lr', type=float, default=5e-06, help="")
parser.add_argument('--lr_scale', type=int, default=100, help="")
parser.add_argument('--epochs', type=int, default=3)
parser.add_argument('--max_iter', type=int, default=100000000)
# Contrastive learning
parser.add_argument('--mode', type=str, default='pairsupcon', help="")
parser.add_argument('--bert', type=str, default='bertbase', choices=["bertbase", "bertlarge"], help="")
parser.add_argument('--contrast_type', type=str, default="HardNeg")
parser.add_argument('--feat_dim', type=int, default=128, help="dimension of the projected features for instance discrimination loss")
parser.add_argument('--temperature', type=float, default=0.05, help="temperature required by contrastive loss")
parser.add_argument('--beta', type=float, default=1, help=" ")
args = parser.parse_args(argv)
args.use_gpu = args.gpuid[0] >= 0
args.resPath = None
args.tensorboard = None
return args
if __name__ == '__main__':
args = get_args(sys.argv[1:])
if args.training_instance == "sagemaker":
# set the input data path if use sts-b as dev set, as ec2 instance cannot read most the data formats included in SentEval
# by default, we do not use the sts-b as the dev set for PairSupCon
args.path_sts_data = os.environ["SM_CHANNEL_DATA"]
print(f"\n path to sts data {args.path_sts_data} \n")
run(args)
# upload the saved checkpoints to s3 folder
subprocess.run(["aws", "s3", "cp", "--recursive", args.resdir, args.s3_ckptdir])
else:
run(args)
|
"""Unit test for parsing."""
import unittest
from fractions import Fraction
from decimal import Decimal
from integral.expr import Var, Const, Op, Fun, trig_identity
from integral.parser import parse_expr
class ParserTest(unittest.TestCase):
def testParseTerm(self):
test_data = [
"x", "1", "11/10", "-1", "-11/10",
"x + y", "x - y", "-x", "x * y", "x / y", "x ^ y",
"x + y * z", "(x + y) * z",
"x * y + z", "x * (y + z)",
"x * y ^ 2", "(x * y) ^ 2",
"sin(x)", "cos(x)", "log(x)", "exp(x)",
"D x. 3 * x",
"INT x:[1,2]. 3 * x",
"[3 * x]_x=1,2",
"INT x:[0,pi / 4]. sin(x)",
"x ^ (1/2)"
]
for s in test_data:
e = parse_expr(s)
self.assertEqual(str(e), s)
def testParseTerm2(self):
test_data = [
("-x", -Var("x")),
("-2", Const(-2)),
("1/2", Const(Fraction(1) / 2)),
("-1/2", Const(Fraction(-1) / 2)),
("0.5", Const(Decimal("0.5"))),
("pi", Fun("pi")),
("-x^2", Op("-", Op("^", Var("x"), Const(2))))
]
for s, e, in test_data:
self.assertEqual(parse_expr(s), e)
def testParseTerm3(self):
test_data = [
("$sin(x)^2$*sin(x)", Op("*", Op("^",Fun("sin",Var("x")),Const(2)), Fun("sin",Var("x")))),
("x + $x + y$", Op("+", Var("x"), Op("+", Var("x"), Var("y")))),
]
for s, e in test_data:
self.assertEqual(parse_expr(s), e)
self.assertTrue(Op("^",Fun("sin",Var("x")),Const(2)) in trig_identity)
if __name__ == "__main__":
unittest.main()
|
"""
"""
import hashlib
import hmac
from html.parser import HTMLParser
import logging
import os
from typing import Any, cast, Dict
import urllib
import urllib.parse
from fastapi import Request
from sqlalchemy.orm import Session
from . import admin
from .data import BroodUser
from .models import (
SlackOAuthEvent,
SlackIndexConfiguration,
SlackBugoutUser,
)
from ..broodusers import Method, bugout_api, process_group_in_journal_holders
logger = logging.getLogger(__name__)
BUGOUT_SCOPES = [
"app_mentions:read",
"channels:read",
"chat:write",
"emoji:read",
"groups:read",
"groups:write",
"im:history",
"im:read",
"im:write",
"links:read",
"mpim:read",
"mpim:write",
"reactions:read",
"users.profile:read",
]
class InstallationNotFound(Exception):
"""
Raised when a handler requires @bugout to be installed in a Slack workspace but the installation
is not found.
"""
class SlackParseError(Exception):
"""
Raised when there is an error parsing a Slack message.
"""
class SlackPostMessageError(Exception):
"""
Raised when there is an error posting a message to Slack.
"""
class HTMLToText(HTMLParser):
"""
Converts (even parentless) HTML into raw text. This is used when displaying results containing
HTML enrichments to users in Slack.
TODO(neeraj): Should this be handled by the index server?
"""
def __init__(self):
self.tokens = []
super().__init__()
def handle_starttag(self, tag, attrs):
if tag == "br":
self.tokens.append("\n")
def handle_endtag(self, tag):
if tag == "p":
self.tokens.append("\n")
def handle_data(self, data):
self.tokens.append(data)
def generate(self):
return " ".join(self.tokens)
def reset(self):
self.tokens = []
super().reset()
async def verify_slack_request_p(request: Request) -> bool:
"""
Verifies the request as per Slack's instructions:
https://api.slack.com/authentication/verifying-requests-from-slack
There is a reference implementation available as part of the slack-events-api package:
https://github.com/slackapi/python-slack-events-api/blob/9e36236a7488f54cad0d76ec8d2366a43283e2cc/slackeventsapi/server.py#L50
The reason we didn't directly use that package is that we have to handle FastAPI (actually
Starlette) requests, not Flask requests.
"""
BUGOUT_SLACK_SIGNING_SECRET = os.environ.get("BUGOUT_SLACK_SIGNING_SECRET")
if BUGOUT_SLACK_SIGNING_SECRET is None:
raise ValueError(
"Could not verify request: BUGOUT_SLACK_SIGNING_SECRET environment variable not set"
)
signing_secret = str.encode(BUGOUT_SLACK_SIGNING_SECRET)
slack_signature = request.headers["X-Slack-Signature"]
version = "v0"
timestamp = request.headers["X-Slack-Request-Timestamp"]
body_bytes = await request.body()
req = str.encode(f"{version}:{timestamp}:") + body_bytes
req_digest = hmac.new(signing_secret, req, hashlib.sha256).hexdigest()
request_hash = f"v0={req_digest}"
return hmac.compare_digest(request_hash, slack_signature)
def authorize_url(
redirect_uri: str = "http://spire.bugout.dev:7475/slack/oauth",
) -> str:
"""
Creates authorization URL as per Slack OAuth instructions:
https://api.slack.com/authentication/oauth-v2
"""
client_id = os.environ.get("BUGOUT_SLACK_CLIENT_ID")
if client_id is None:
raise ValueError(
"Could not create Authorization URL: BUGOUT_SLACK_CLIENT_ID not set"
)
quoted_client_id = urllib.parse.quote_plus(client_id)
client_id_section = f"client_id={quoted_client_id}"
quoted_redirect_uri = urllib.parse.quote_plus(redirect_uri)
redirect_uri_section = f"redirect_uri={quoted_redirect_uri}"
quoted_scopes = urllib.parse.quote_plus(",".join(BUGOUT_SCOPES))
scope_section = f"scope={quoted_scopes}"
url = f"https://slack.com/oauth/v2/authorize?{client_id_section}&{scope_section}&{redirect_uri_section}"
return url
async def handle_app_uninstall(
db_session: Session, team_id: str, spire_api_url: str
) -> None:
"""
Handles uninstall of app from workspace by marking SlackOAuthEvent as deleted, removing groups
and it's ids from journal permissions pertaining to that workspace from internal database.
BroodUser, SlackBugoutUser, index configuration with journal is saved.
"""
query = db_session.query(SlackOAuthEvent).filter(SlackOAuthEvent.team_id == team_id)
oauth_event = query.one()
oauth_event.deleted = True
db_session.add(oauth_event)
db_session.commit()
# Receive user, his token and groups he belongs to
installation_user_query = db_session.query(SlackBugoutUser).filter(
SlackBugoutUser.slack_oauth_event_id == oauth_event.id
)
installation_user = installation_user_query.first()
# Extract journal_id from SlackIndexConfiguration and delete holders from this group
installation_journal = (
db_session.query(SlackIndexConfiguration)
.filter(SlackIndexConfiguration.slack_oauth_event_id == oauth_event.id)
.filter(SlackIndexConfiguration.index_name == "journal")
.first()
)
journal_id = installation_journal.index_url.rstrip("/").split("/")[-2]
journal_api_url = spire_api_url + "/journals/"
process_group_in_journal_holders(
method=Method.delete,
journal_id=journal_id,
journal_api_url=journal_api_url,
access_token=installation_user.bugout_access_token,
group_id=installation_user.bugout_group_id,
bot_installation=oauth_event,
)
bugout_api.delete_group(
installation_user.bugout_access_token, installation_user.bugout_group_id
)
installation_user_query.update({SlackBugoutUser.bugout_group_id: None})
db_session.commit()
logger.info(
f"Uninstallation process for bot_installation: {oauth_event.id} complete"
)
def handle_url_verification(item: Dict[str, Any]) -> str:
"""
Handles a Slack url_verification request:
https://api.slack.com/events/url_verification
"""
challenge = item.get("challenge")
if challenge is None:
raise ValueError("No challenge in Slack URL verification request")
cast(str, challenge)
return challenge
|
#coding=utf-8
"""
Description: 多文档格式转换工具
Author:伏草惟存
Prompt: code in Python3 env
"""
import os,fnmatch
from win32com import client as wc
from win32com.client import Dispatch,gencache
'''
功能描述:抽取文件文本信息
参数描述:1 filePath:文件路径 2 savePath: 指定保存路径
'''
def Files2Txt(filePath,savePath=''):
try:
# 1 切分文件上级目录和文件名
dirs,filename = os.path.split(filePath)
# print('目录:',dirs,'\n文件名:',filename)
# 2 修改转化后的文件名
typename = os.path.splitext(filename)[-1].lower() # 获取后缀
new_name = TranType(filename,typename)
# print('新的文件名:',new_name)
# 3 文件转化后的保存路径
if savePath=="": savePath = dirs
else: savePath = savePath
new_save_path = os.path.join(savePath,new_name)
print('保存路径:',new_save_path)
# 4 加载处理应用
wordapp = wc.Dispatch('Word.Application')
mytxt = wordapp.Documents.Open(filePath)
mytxt.SaveAs(new_save_path,4)
mytxt.Close()
except Exception as e:
pass
'''
功能描述:根据文件后缀修改文件名
参数描述:1 filePath:文件路径 2 typename 文件后缀
返回数据:new_name 返回修改后的文件名
'''
def TranType(filename,typename):
# 新的文件名称
new_name = ""
if typename == '.pdf' : # pdf->txt
if fnmatch.fnmatch(filename,'*.pdf') :
new_name = filename[:-4]+'.txt' # 截取".pdf"之前的文件名
else: return
elif typename == '.doc' or typename == '.docx' : # word->txt
if fnmatch.fnmatch(filename, '*.doc') :
new_name = filename[:-4]+'.txt'
elif fnmatch.fnmatch(filename, '*.docx'):
new_name = filename[:-5]+'.txt'
else: return
else:
print('警告:\n您输入[',typename,']不合法,本工具支持pdf/doc/docx格式,请输入正确格式。')
return
return new_name
if __name__ == '__main__':
filePath1 = os.path.abspath(r'../dataSet/Corpus/wordtotxt/一种改进的朴素贝叶斯文本分类方法研究.doc')
filePath2 = os.path.abspath(r'../dataSet/Corpus/pdftotxt/改进朴素贝叶斯文本分类方法研究.pdf')
filePath3 = os.path.abspath(r'../dataSet/Corpus/wordtotxt/科技项目数据挖掘决策架构.docx')
Files2Txt(filePath3)
|
# Copyright 2021 Rafał Safin (rafsaf). All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import json
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import Count
from django.forms.models import model_to_dict
from base import models
from utils import basic
class TargetWeightQueries:
def __init__(
self,
outline,
ruin=False,
fake=False,
every=False,
only_with_weights=False,
filtr=None,
):
self.outline = outline
targets = (
models.TargetVertex.objects.select_related("outline_time")
.prefetch_related(
"weightmodel_set",
)
.filter(outline=outline)
.order_by("id")
)
if not every:
self.targets = targets.filter(fake=fake, ruin=ruin)
else:
if only_with_weights:
self.targets = targets.annotate(
num_of_weights=Count("weightmodel")
).filter(num_of_weights__gt=0)
else:
self.targets = targets
if filtr is not None:
player = filtr[0]
coord = filtr[1]
self.targets = self.targets.filter(
target__icontains=coord, player__icontains=player
)
def targets_json_format(self):
context = {}
target: models.TargetVertex
for target in self.targets:
context[target.pk] = {
"target": target.target,
"player": target.player,
"fake": target.fake,
"ruin": target.ruin,
}
return json.dumps(context)
def __create_target_dict(self, for_json=False):
if for_json:
result = {}
for target in self.targets:
result[target.target] = list()
return result
result = {}
for target in self.targets:
result[target] = list()
return result
def __weights(self):
return (
models.WeightModel.objects.select_related("target")
.filter(target__in=self.targets)
.order_by("order")
)
def target_dict_with_weights_read(self):
"""Create dict key-target, value-lst with weights, add dist"""
context = self.__create_target_dict()
for weight in self.__weights():
weight.distance = round(basic.dist(weight.start, weight.target.target), 1)
weight.off = f"{round(weight.off / 1000,1)}k" # type: ignore
context[weight.target].append(weight)
return context
def target_dict_with_weights_extended(self):
context = self.__create_target_dict()
for weight in self.__weights().iterator(chunk_size=3000):
context[weight.target].append(weight)
return context
def target_dict_with_weights_json_format(self):
context = self.__create_target_dict(for_json=True)
for weight in self.__weights().iterator(chunk_size=3000):
context[weight.target.target].append(
model_to_dict(
weight,
fields=[
"start",
"player",
"off",
"nobleman",
"distance",
"t1",
"t2",
],
)
)
return json.dumps(context, cls=DjangoJSONEncoder)
def target_period_dictionary(self):
result_dict = {}
outline_time_dict = {}
for target in self.targets:
outline_time_dict[target.outline_time] = list()
for period in self.__time_periods():
outline_time_dict[period.outline_time].append(period)
for target in self.targets:
result_dict[target] = outline_time_dict[target.outline_time]
return result_dict
def time_period_dictionary(self):
id_time = {}
time_periods = {}
for time in self.__all_outline_times():
time_periods[time] = list()
for period in self.__all_time_periods():
id_time[period.outline_time.order] = period.outline_time
time_periods[period.outline_time].append(period)
return (id_time, time_periods)
def __all_time_periods(self):
times = self.__all_outline_times()
periods = (
models.PeriodModel.objects.select_related("outline_time").filter(
outline_time__in=times
)
).order_by("from_time", "-unit")
return periods
def __all_outline_times(self):
try:
outline_times = self.outline_times
except AttributeError:
outline_times = list(
(models.OutlineTime.objects.filter(outline=self.outline)).order_by(
"order"
)
)
self.outline_times = outline_times
return outline_times
def __time_periods(self):
periods = (
models.PeriodModel.objects.select_related("outline_time")
.filter(outline_time__in=[target.outline_time for target in self.targets])
.order_by("from_time", "-unit")
)
return periods
def __dict_with_village_ids(self, iterable_with_ids):
result_id_dict = {}
for village in (
models.VillageModel.objects.select_related()
.filter(coord__in=iterable_with_ids, world=self.outline.world)
.values("coord", "village_id", "player__player_id")
):
result_id_dict[village["coord"]] = village["village_id"]
return result_id_dict
|
import numpy as np
import sys as sys
try:
from pycuda import cumath, driver, gpuarray, tools
from pycuda.elementwise import ElementwiseKernel
from scikits.cuda import cublas
import pycuda.autoinit
except Exception as e:
sys.stderr.write("WARNING: Pycuda or cublas was not found on python path\n")
|
from rental.models import Car, Language, User
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'lang', 'password', )
write_only_fields = ('password', )
def create(self, validated_data):
return User.objects.create_user(**validated_data)
class CarSerializer(serializers.ModelSerializer):
name = serializers.SerializerMethodField()
class Meta:
model = Car
fields = ('id', 'name', 'creation_year', 'created_at', )
def get_name(self, obj):
request = self.context.get('request')
if request.user.lang == Language.RU:
return obj.name_ru
return obj.name_en
|
from django import template
register = template.Library()
@register.filter
def get_answer(offer, user):
"""Returns an user answer for the given offer"""
if not user.is_authenticated:
return None
return offer.answers.filter(user=user).first()
|
from datetime import datetime
from .Base import Base
from . import db
class Rental(Base):
_ignored_fields = Base._ignored_fields + ['user_id', 'movie_id']
user_id = db.Column(db.ForeignKey('users.id'))
movie_id = db.Column(db.ForeignKey('movies.id'))
date_rented = db.Column(db.DateTime, default=datetime.utcnow())
date_returned = db.Column(db.DateTime, default=None)
user = db.relationship('User', back_populates='rentals', lazy=True)
movie = db.relationship('Movie', back_populates='rentals', lazy=True)
def was_returned(self):
return self.date_returned is not None
|
"""Support function for parsing JPL ephemeris text files.
This is for parsing a NASA ephemeris text header file, like:
ftp://ssd.jpl.nasa.gov/pub/eph/planets/ascii/de421/header.421
You can use this routine like this::
from jplephem.ascii import parse_header
d = parse_header(open('header.421'))
from pprint import pprint
pprint(d)
pprint(dict(zip(d['names'], d['values'])))
"""
import numpy as np
def parse_header(lines):
lines = iter(lines)
while next(lines).strip() != 'GROUP 1030':
continue
assert next(lines).strip() == ''
jalpha, jomega, jdelta = (float(s) for s in e(next(lines)).split())
while next(lines).strip() != 'GROUP 1040':
continue
assert next(lines).strip() == ''
nconstants = int(next(lines))
names = []
while len(names) < nconstants:
names.extend(next(lines).split())
while next(lines).strip() != 'GROUP 1041':
continue
assert next(lines).strip() == ''
assert int(next(lines)) == nconstants
values = []
while len(values) < nconstants:
values.extend(float(s) for s in e(next(lines)).split())
while next(lines).strip() != 'GROUP 1050':
continue
assert next(lines).strip() == ''
planet_offsets = np.array(next(lines).split(), np.int_)
num_coefficients = np.array(next(lines).split(), np.int_)
coefficient_sets = np.array(next(lines).split(), np.int_)
del lines
return(locals())
def e(s):
"""Convert a string in 0.1D+01 FORTRAN notation into 0.1e+10."""
return s.replace('D', 'e')
|
import torch
import torch.nn as nn
from bootstrap.lib.logger import Logger
class ReduceLROnPlateau():
def __init__(self,
optimizer,
engine=None,
mode='min',
factor=0.1,
patience=10,
verbose=False,
threshold=0.0001,
threshold_mode='rel',
cooldown=0,
min_lr=0,
eps=1e-08):
self.optimizer = optimizer
self.lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
mode=mode,
factor=factor,
patience=patience,
verbose=verbose,
threshold=threshold,
threshold_mode=threshold_mode,
cooldown=cooldown,
min_lr=min_lr,
eps=eps)
if engine is not None:
engine.register_hook('train_on_begin_epoch', self.step_lr_scheduler)
#engine.register_hook('eval_on_end_epoch', self.step_lr_scheduler)
def step_lr_scheduler(self):
prev_lr = self.lr_scheduler.optimizer.param_groups[0]['lr']
Logger().log_value('train_epoch.lr', prev_lr)
# first epoch
if len(Logger().values['eval_epoch.loss']) == 0:
return
val_loss = Logger().values['eval_epoch.loss'][-1]
self.lr_scheduler.step(val_loss)
new_lr = self.lr_scheduler.optimizer.param_groups[0]['lr']
if new_lr != prev_lr:
Logger()('ReduceLROnPlateau: lr has changed from {} to {}'.format(prev_lr, new_lr))
def __getattr__(self, key):
try:
return super(ReduceLROnPlateau, self).__getattr__(key)
except AttributeError:
return self.optimizer.__getattribute__(key)
# Inspired from https://github.com/jnhwkim/ban-vqa/blob/master/train.py
class BanOptimizer():
def __init__(self, engine,
name='Adamax',
lr=0.0007,
gradual_warmup_steps=[0.5, 2.0, 4],
lr_decay_epochs=[10, 20, 2],
lr_decay_rate=.25):
self.engine = engine
self.optimizer = torch.optim.__dict__[name](
filter(lambda p: p.requires_grad, engine.model.network.parameters()),
lr=lr
)
self.lr_decay_rate = lr_decay_rate
self.lr_decay_epochs = eval("range({},{},{})".format(*lr_decay_epochs))
self.gradual_warmup_steps = [
weight * lr for weight in eval("torch.linspace({},{},{})".format(
gradual_warmup_steps[0],
gradual_warmup_steps[1],
int(gradual_warmup_steps[2])
))
]
self.grad_clip = .25
self.total_norm = 0
self.count_norm = 0
if engine:
engine.register_hook('train_on_start_epoch', self.set_lr)
engine.register_hook('train_on_print', self.display_norm)
def set_lr(self):
epoch_id = self.engine.epoch
optim = self.optimizer
old_lr = optim.param_groups[0]['lr']
if epoch_id < len(self.gradual_warmup_steps):
new_lr = self.gradual_warmup_steps[epoch_id]
optim.param_groups[0]['lr'] = new_lr
Logger()('Gradual Warmup lr: {:.8f} -> {:.8f}'.format(old_lr, new_lr))
elif epoch_id in self.lr_decay_epochs:
new_lr = optim.param_groups[0]['lr'] * self.lr_decay_rate
optim.param_groups[0]['lr'] = new_lr
Logger()('Decrease lr: {:.8f} -> {:.8f}'.format(old_lr, new_lr))
else:
Logger()('No change to lr: {:.8f}'.format(old_lr))
Logger().log_value('train_epoch.lr', optim.param_groups[0]['lr'].item())
def display_norm(self):
Logger()(' norm: {:.5f}'.format(self.total_norm / self.count_norm))
def step(self):
self.total_norm += nn.utils.clip_grad_norm_(
self.engine.model.network.parameters(),
self.grad_clip
)
self.count_norm += 1
self.optimizer.step()
Logger().log_value('train_batch.norm', self.total_norm / self.count_norm)
def zero_grad(self):
self.optimizer.zero_grad()
def state_dict(self):
state = {}
state['optimizer'] = self.optimizer.state_dict()
return state
def load_state_dict(self, state):
self.optimizer.load_state_dict(state['optimizer'])
def __getattr__(self, key):
try:
return super(ReduceLROnPlateau, self).__getattr__(key)
except AttributeError:
return self.optimizer.__getattribute__(key)
|
from . import config as c
from .basetypes import Vector2
import math
def clamp(x, a, b):
"""Clamps value x between a and b"""
return max(a, min(b, x))
def accelerate(obj, accel_x, accel_y, limit_x = None):
"""Accelerate until limit is reached"""
obj.vel += Vector2(accel_x, accel_y) * c.delta_time
if limit_x != None:
if obj.vel.x > 0:
obj.vel.x = clamp(obj.vel.x, 0, limit_x)
elif obj.vel.x < 0:
obj.vel.x = clamp(obj.vel.x, -limit_x, 0)
def get_flipped_sprite(sprite):
"""Returns coordinates of a flipped sprite"""
#429 is the width of the atlas
return (429 - sprite[0] - sprite[2], sprite[1], sprite[2], sprite[3])
|
import socket
def get_host_ip():
"""Return the local IP address"""
try:
ss = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ss.connect(('8.8.8.8', 8070))
ip = ss.getsockname()[0]
finally:
ss.close()
return ip
|
#!/usr/bin/env python
from __future__ import print_function
import logging
import os, re
import SimpleITK as sitk
import radiomics
from radiomics import featureextractor
import json
import csv
import random
from . import utils
def tqdmProgressbar():
"""
This function will setup the progress bar exposed by the 'tqdm' package.
Progress reporting is only used in PyRadiomics for the calculation of GLCM and GLSZM in full python mode, therefore
enable GLCM and full-python mode to show the progress bar functionality
N.B. This function will only work if the 'click' package is installed (not included in the PyRadiomics requirements)
"""
global extractor
extractor.kwargs['enableCExtensions'] = False
# Enable the GLCM class to show the progress bar
extractor.enableFeatureClassByName('glcm')
radiomics.setVerbosity(logging.INFO) # Verbosity must be at least INFO to enable progress bar
import tqdm
radiomics.progressReporter = tqdm.tqdm
def clickProgressbar():
"""
This function will setup the progress bar exposed by the 'click' package.
Progress reporting is only used in PyRadiomics for the calculation of GLCM and GLSZM in full python mode, therefore
enable GLCM and full-python mode to show the progress bar functionality.
Because the signature used to instantiate a click progress bar is different from what PyRadiomics expects, we need to
write a simple wrapper class to enable use of a click progress bar. In this case we only need to change the 'desc'
keyword argument to a 'label' keyword argument.
N.B. This function will only work if the 'click' package is installed (not included in the PyRadiomics requirements)
"""
global extractor
extractor.kwargs['enableCExtensions'] = False
# Enable the GLCM class to show the progress bar
extractor.enableFeatureClassByName('glcm')
radiomics.setVerbosity(logging.INFO) # Verbosity must be at least INFO to enable progress bar
import click
class progressWrapper():
def __init__(self, iterable, desc=''):
# For a click progressbar, the description must be provided in the 'label' keyword argument.
self.bar = click.progressbar(iterable, label=desc)
def __iter__(self):
return self.bar.__iter__() # Redirect to the __iter__ function of the click progressbar
def __enter__(self):
return self.bar.__enter__() # Redirect to the __enter__ function of the click progressbar
def __exit__(self, exc_type, exc_value, tb):
return self.bar.__exit__(exc_type, exc_value,
tb) # Redirect to the __exit__ function of the click progressbar
radiomics.progressReporter = progressWrapper
# Get some test data
def myRadiomicsData(nrrdDir):
# repositoryRoot points to the root of the repository. The following line gets that location if this script is run
# from it's default location in \pyradiomics\bin. Otherwise, it will point to some (invalid) folder, causing the
# getTestCase function to fail to find the test case in the repository. In that case, a test case will be downloaded to
# temporary files and it's location is returned.
# repositoryRoot = os.path.abspath(os.path.join(os.getcwd(), ".."))
# data = ['brain1', 'brain2', 'breast1', 'lung1', 'lung2']
# print(random.choice(data))
# imageName, maskName = radiomics.getTestCase(random.choice(data), repositoryRoot)
ctPattern = re.compile('^ct.nrrd$', re.IGNORECASE)
nrrdFilePattern = re.compile('.*nrrd$', re.IGNORECASE)
# imageName = os.path.join(r'/media/sf_LAMBDA/2223563/NRRD/ct.nrrd')
# maskName = os.path.join(r'/media/sf_LAMBDA/2223563/NRRD/RS1/AP_PTV1.nrrd')
for f in os.listdir(nrrdDir):
abpff = os.path.join(nrrdDir, f)
# If the sub-path is an ignorable file, ignore it.
if ctPattern.match(f) and os.path.isfile(abpff):
imageName = str(abpff)
# If the sub-path is a directory
if utils.RTSTRUCT_FOLDER_PATTERN.match(f) and os.path.isdir(abpff):
rsFolder = str(abpff)
if imageName is None or rsFolder is None: # Something went wrong, in this case PyRadiomics will also log an error
print('Error getting testcase!')
exit()
tableHead = []
tableData = []
rowData = []
s = ''
for rs in os.listdir(rsFolder):
if nrrdFilePattern.match(rs):
maskName = os.path.join(rsFolder, rs)
if imageName is None or maskName is None: # Something went wrong, in this case PyRadiomics will also log an error
print('Error getting testcase!')
exit()
# Regulate verbosity with radiomics.verbosity (default verbosity level = WARNING)
# radiomics.setVerbosity(logging.INFO)
# Get the PyRadiomics logger (default log-level = INFO)
logger = radiomics.logger
logger.setLevel(logging.DEBUG) # set level to DEBUG to include debug log messages in log file
# Set up the handler to write out all log entries to a file
handler = logging.FileHandler(filename='testLog.txt', mode='w')
formatter = logging.Formatter("%(levelname)s:%(name)s: %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
# Define settings for signature calculation
# These are currently set equal to the respective default values
settings = {}
# settings['binWidth'] = 25
settings[
'resampledPixelSpacing'] = None # [3,3,3] is an example for defining resampling (voxels with size 3x3x3mm)
settings['interpolator'] = sitk.sitkBSpline
settings['correctMask'] = True
# Initialize feature extractor
extractor = featureextractor.RadiomicsFeaturesExtractor(**settings)
# By default, only original is enabled. Optionally enable some image types:
# extractor.enableImageTypes(Original={}, LoG={}, Wavelet={})
# Disable all classes except firstorder
extractor.enableAllFeatures()
# Enable all features in firstorder
# extractor.enableFeatureClassByName('firstorder')
# Only enable mean and skewness in firstorder
extractor.enableFeaturesByName(firstorder=['Mean', 'Skewness'])
# Uncomment one of these functions to show how PyRadiomics can use the 'tqdm' or 'click' package to report progress when
# running in full python mode. Assumes the respective package is installed (not included in the requirements)
# tqdmProgressbar()
# clickProgressbar()
# print(str(rs).replace('.nrrd',''))
rsName = str(rs).replace('.nrrd', '')
try:
featureVector = extractor.execute(imageName, maskName)
except:
print("Structure name: %s " % (rsName))
continue
handler.close()
if len(tableHead) == 0:
tableHead.append('Structure_Name')
rowData.append(rsName)
for featureName in featureVector.keys():
tableHead.append(featureName)
rowData.append(featureVector[featureName])
tableData.append(rowData)
rowData = []
else:
rowData.append(rsName)
for featureName in featureVector.keys():
rowData.append(featureVector[featureName])
tableData.append(rowData)
rowData = []
return tableHead, tableData
def calculateRadiomics(imageNrrd, structureNrrdFolder):
# imageName, maskName = radiomics.getTestCase(random.choice(data), repositoryRoot)
CT_nrrd_pattern = re.compile(r'.*-CT-.*.nrrd', re.IGNORECASE)
RTSTRUCT_folder_pattern = re.compile(r'.*-RTSTRUCT-.*', re.IGNORECASE)
nrrdFilePattern = re.compile('.*nrrd$', re.IGNORECASE)
if CT_nrrd_pattern.match(imageNrrd) and RTSTRUCT_folder_pattern.match(structureNrrdFolder):
if os.path.exists(imageNrrd) and os.path.exists(structureNrrdFolder):
tableHead = []
tableData = []
rowData = []
s = ''
for rs in os.listdir(structureNrrdFolder):
if nrrdFilePattern.match(rs):
maskName = str(os.path.join(structureNrrdFolder, rs))
if imageNrrd is None or maskName is None: # Something went wrong, in this case PyRadiomics will also log an error
print('Error getting testcase!')
exit()
# check roi size, if the roi size is too big, ignore this roi
rsName = str(rs).replace('.nrrd', '')
print(rsName)
#check whether this file is a large file
isLargeFile = utils.checkLargeRoiNrrd(maskName)
isTarget = utils.checkRoiNameIsTarget(rsName)
isOrgan = utils.checkRoiNameIsOrgans(rsName)
if isLargeFile: # Yes, it is large
if isTarget or isOrgan: #check whether this is a target or a organ
pass # Yes, calculate
else:
print(rsName + ' is a large file and it is neither a target nor a organ.')
continue # No, jump out
else: # No, it is not a large file
pass
# Regulate verbosity with radiomics.verbosity (default verbosity level = WARNING)
# radiomics.setVerbosity(logging.INFO)
# Get the PyRadiomics logger (default log-level = INFO)
#logger = radiomics.logger
#logger.setLevel(logging.DEBUG) # set level to DEBUG to include debug log messages in log file
# Set up the handler to write out all log entries to a file
#handler = logging.FileHandler(filename='testLog.txt', mode='w')
#formatter = logging.Formatter("%(levelname)s:%(name)s: %(message)s")
#handler.setFormatter(formatter)
#logger.addHandler(handler)
# Define settings for signature calculation
# These are currently set equal to the respective default values
settings = {}
# settings['binWidth'] = 25
settings[
'resampledPixelSpacing'] = None # [3,3,3] is an example for defining resampling (voxels with size 3x3x3mm)
settings['interpolator'] = sitk.sitkBSpline
settings['correctMask'] = True
# Initialize feature extractor
extractor = featureextractor.RadiomicsFeaturesExtractor(**settings)
# By default, only original is enabled. Optionally enable some image types:
# extractor.enableImageTypes(Original={}, LoG={}, Wavelet={})
# Disable all classes except firstorder
extractor.enableAllFeatures()
# Enable all features in firstorder
# extractor.enableFeatureClassByName('firstorder')
# Only enable mean and skewness in firstorder
extractor.enableFeaturesByName(firstorder=['Mean', 'Skewness'])
# Uncomment one of these functions to show how PyRadiomics can use the 'tqdm' or 'click' package to report progress when
# running in full python mode. Assumes the respective package is installed (not included in the requirements)
# tqdmProgressbar()
# clickProgressbar()
try:
featureVector = extractor.execute(imageNrrd, maskName)
except:
print("Structure name: %s " % (rsName))
continue
#handler.close()
if len(tableHead) == 0:
tableHead.append('Structure_Name')
rowData.append(rsName)
for featureName in featureVector.keys():
tableHead.append(featureName)
rowData.append(featureVector[featureName])
tableData.append(rowData)
rowData = []
else:
rowData.append(rsName)
for featureName in featureVector.keys():
rowData.append(featureVector[featureName])
tableData.append(rowData)
rowData = []
return tableHead, tableData
# repositoryRoot = os.path.abspath("/media/sf_LAMBDA/2223563/NRRD")
# myRadiomicsData(repositoryRoot)
|
#!/usr/bin/python3
import os
import sys
from pymongo import MongoClient, errors
class Mongo:
def __init__(self, connectionString, databaseName, collectionName):
self.connectionString = connectionString
self.databaseName = databaseName
self.collectionName = collectionName
#Function ensures that user defined database and collection names are valid
def checkValidDbAndConnectionName(mongoDbName, mongoCollectionName):
#Illegal characters in a Mongo database name
invalidCharDbList = ['/',"\\",'.',' ','"','$']
#Database name has to be wrapped in two levels of quotes, single quotes, and inside them, double quotes
#This is done in an attempt to sanitize the data, though this can still be overcome by using 4 levels of quotes
if not ((mongoDbName[0] == '"') and (mongoDbName[-1] == '"')):
sys.exit('Database name must be wrapped in 2 levels of quotes, \'"<DB NAME>"\', \
\nthe quotes will be removed automatically, program terminating')
else:
#Stripping away double quotes, single quotes are eaten by bash automatically
mongoDbName = mongoDbName[1:-1]
#Python 'exec' function will be used later, pass cannot be present in this function
if (mongoDbName == 'pass') or (mongoCollectionName == 'pass'):
sys.exit("Database name or collection name cannot be 'pass', program terminating")
#Collection name has to be wrapped in two levels of quotes, single quotes, and inside them, double quotes
#This is done in an attempt to sanitize the data, though this can still be overcome by using 4 levels of quotes
if not ((mongoCollectionName[0] == '"') and (mongoCollectionName[-1] == '"')):
sys.exit('Collection name must be wrapped in 2 levels of quotes, \'"<COLLECTION NAME>"\', \
\nthe quotes will be removed automatically, program terminating')
else:
#Stripping away double quotes, single quotes are eaten by bash automatically
mongoCollectionName = mongoCollectionName[1:-1]
#Mongo Database will not allow database names to be over 63 characters
if len(mongoDbName) >= 64:
sys.exit('Length of database name is over 63 characters, program terminating')
#Making sure any character from list above ^, is not present
for invalidChar in invalidCharDbList:
if invalidChar in mongoDbName:
sys.exit("Invalid char: '" + invalidChar + "' in database name, program terminating")
#Only illegal character for collections is '$'
if '$' in mongoCollectionName:
sys.exit("Invalid char: '$' in collection name, program terminating")
#Both collection and database names cannot be empty string
if mongoCollectionName == '':
sys.exit('Collection name cannot be empty string, program terminating')
if mongoDbName == '':
sys.exit('Database name cannot be empty string, program terminating')
#Collection name cannot start with 'system.'
if '.' in mongoCollectionName:
if mongoCollectionName.split('.')[0] == 'system':
sys.exit("Collection name cannot start with 'system.', program terminating")
#Total length of collection namespace has to be below 120 bytes, so 119 characters (there is a mandatory dot)
if len(mongoDbName) + len(mongoCollectionName) > 119:
sys.exit("Max size of collection namespace '<db name>.<collection name>' is 120 bytes, program terminating")
return mongoDbName, mongoCollectionName
#Function checks to see if the user wants to use default connection string or supply their own in a file
def askUserForConnectionString():
userAnswer = ''
connectionString = ''
print('Program by default connects to Mongo Database @ localhost, port 27017')
#Will only take 'Y' or 'N' as a response
while True:
print('Would you like to change this? (Y/N)')
userAnswer = input()
if userAnswer == 'Y' or userAnswer == 'N':
break
if userAnswer == 'N':
return None
#If user does want to modify connection string:
#User enters file name that contains connection string into stdin
print('Please insert the connection string into a text file and provide the file name below: ')
connectionStringFileName = input()
#Seeing if file actually exists
if not os.path.isfile(connectionStringFileName):
sys.exit('Could not open file containing connection string, program terminating')
#If it does, opening it, entire contents become connection string
#If file is correct, it should only be one line long
with open(connectionStringFileName) as openFile:
connectionString = openFile.read()
return connectionString
#Function checks to see if a connection can be established to a Mongo Database instance
def checkForValidConnection(userDefinedConnectionString):
timeOutVal = 1
connectionString = ''
#If the user opted to use the default string, with connects to localhost at port 27017
if userDefinedConnectionString != None:
connectionString = 'mongodb://localhost:27017'
#If they provided their own string
else:
connectionString = userDefinedConnectionString
#Modifying timeout, default is 30 seconds, now it is 1 microsecond
client = MongoClient(connectionString, serverSelectionTimeoutMS = timeOutVal)
db= client.admin
#Seeing if server responds, or if timeout error occurs
try:
client.server_info()
print("Established connection to Mongo Database")
except errors.ServerSelectionTimeoutError:
sys.exit('Could not establish connection with Mongo Database, program terminating')
#Function writes compressed string, and lookup information into Mongo Database
def writeToDatabase(self, compressedStream, tag, mongoFieldNames):
#mongoFieldNames are the field (column) names defined by the user to break up the CSV on
#tag is the set of actual values for each field that make up this segment
#Zipping them together to create dictionary (JSON), allows for lookup in Mongo Db
dataTagDict = dict(zip(mongoFieldNames, tag))
#Adding a new field to dictionary for our compressed object
dataTagDict['compressedObject'] = compressedStream
#Reconnecting to Mongo Database
client = MongoClient(self.connectionString)
#Has to be done in exec, because code doesn't allow for dynamic database names
dbString = 'db = client.' + self.databaseName
exec(dbString)
#Has to be done in exec, because code doesn't allow for dynamic collection names
collectionString = 'result = db.' + self.collectionName + '.insert_one(dataTagDict)'
exec(collectionString)
print('Segment successfully written to Mongo Database:', list(tag))
#Function retrieves compressed string from Mongo Databased based on lookup information
def retrieveSegmentsFromDatabase(self, columnsToReconstructOn):
queryDict = {}
queryResult = None
#Reconnecting into Mongo Database
client = MongoClient(self.connectionString)
#Since argument is in format FIELD=VALUE, splitting on '='
for fieldValuePair in columnsToReconstructOn:
brockenUp = fieldValuePair.split('=')
#User forgot to include '='
if len(brockenUp) == 1:
sys.exit('Field name for decompression must be in format FIELD=VALUE, no equal sign detected, program terminating')
#'FIELD=VALUE' becomes {FIELD: VAlUE} entries in a dictionary
else:
queryDict[brockenUp[0]] = brockenUp[1]
#Has to be done in exec, because code doesn't allow for dynamic database names
dbString = 'db = client.' + self.databaseName
exec(dbString)
#Has to be done in exec, because code doesn't allow for dynamic collection names
_locals = locals()
queryString = 'queryResult = db.' + self.collectionName + '.find(' + str(queryDict) + ')'
exec(queryString, globals(), _locals)
queryResult = list(_locals['queryResult'])
#Query returned nothing
if len(queryResult) == 0:
sys.exit('Successfully queried input, 0 results, program terminating')
#Query returned something
else:
print('Successfully retrieved query results on:', self.collectionName + ', from Mongo Database:', self.databaseName)
return queryResult
|
import os
import vlc
surahFiles = os.listdir("./")
surahFiles.sort()
j=0
while j<len(surahFiles):
print "len: " + str(len(surahFiles)) + " j " + str(j)
surahFile = surahFiles[j]
print surahFile
if not surahFile.endswith('.mp3'):
j = j + 1
continue;
#surahFile = "037-as-saffat.mp3"
surahPath = surahFile
print "vlc load"
player = vlc.MediaPlayer(surahPath)
player.play()
timingFile = "./" + surahFile[:3] + ".txt"
lines = [line.rstrip('\n') for line in open(timingFile)]
i=0
#for line in lines:
while i<len(lines):
ayahTime = int(lines[i])
player.set_time(ayahTime)
print str(lines[i])
input = raw_input('Press enter for next ayah. Press any other key for next surah. ' + str(i+1))
if input == "n":
player.stop()
print "exiting"
break;
elif input == "ps":
player.stop()
j = j - 3
break;
elif input == "p":
i = i - 1
continue;
i = i + 1
continue;
j = j + 1
print "incrementing"
player.stop()
|
# -*- coding: utf-8 -*-
import pytest
from todolist import create_app
from todolist.services import todo as todo_service
@pytest.fixture
def app():
app = create_app()
return app
@pytest.fixture(scope='function', autouse=True)
def reset_todo_list():
"""Reset `todo_service._TODOS` state."""
todo_service._TODOS = {}
|
import platform
import click
from awsflock.parsing import Duration
DEFAULT_DYNAMO_TABLE = "awsflock"
def lease_duration_opt(func):
return click.option(
"--lease-duration",
type=Duration(),
default="2 hours",
show_default=True,
help=(
"The duration of the lease, after which it expires if not released. "
"Given as an integer and a duration unit. Valid durations are "
"seconds, minutes, hours, and days. "
"Defaults to seconds if no unit is given"
),
)(func)
def help_opt(func):
return click.help_option("-h", "--help")(func)
def table_opt(func):
return click.option(
"--tablename",
envvar="AWSFLOCK_TABLE",
default=DEFAULT_DYNAMO_TABLE,
show_default=True,
help=(
"A custom name for the lock table to use. "
"Can be set with the AWSFLOCK_TABLE env var"
),
)(func)
def owner_opt(func):
def callback(ctx, param, value):
# if no owner is given, default to hostname of the current machine
# failover to NULL in the worst case
if not value:
return platform.node() or "NULL"
return value
return click.option(
"--owner",
help=(
"The name of the lock owner. Defaults to using the hostname from the "
"calling environment. Informational only, no impact on lock logic"
),
callback=callback,
)(func)
|
import os
import numpy as np
import matplotlib
# Force matplotlib to not use any Xwindows backend.
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from networkx.drawing.nx_pylab import draw_spring
from meta_graph import convert_to_original_graph
from events import detect_events_given_path
from util import load_json_by_line
CURDIR = os.path.dirname(os.path.abspath(__file__))
interactions = load_json_by_line(CURDIR + '/data/enron.json')
people_info = load_json_by_line(CURDIR + '/data/people.json')
peopleid2info = {r['id']: (r['name'], r['email'])
for r in people_info}
summary_kws = {
'temporal_traffic': False,
'topics': False,
'email_content': False,
'participants': {
'people_info': people_info,
'interactions': interactions
}
}
def draw_kws_graphs(g):
degrees_dict = g.degree(g.nodes())
degrees = [degrees_dict[n] for n in g.nodes()]
return {
'node_size': np.log(np.asarray(degrees) + 1) * 100
}
def draw_kws_events(g):
data = draw_kws_graphs(g)
nodes = g.nodes()
degrees = g.degree(nodes)
important_nodes = sorted(nodes, key=lambda k: degrees[k], reverse=True)[:3]
data['labels'] = {n: peopleid2info[n]
for n in important_nodes}
data.update({'font_size': 16,
'font_weight': 'bold',
'font_color': '#2980B9'})
return data
def plot_events(events, figure_dir):
"""for original graph
"""
gs = map(convert_to_original_graph, events)
plot_graphs(gs, figure_dir, gen_kws_func=draw_kws_events)
def plot_graphs(gs, figure_dir, gen_kws_func=draw_kws_graphs):
"""more meta graph
"""
if not os.path.exists(figure_dir):
os.makedirs(figure_dir)
for i, g in enumerate(gs):
kws = gen_kws_func(g)
draw_spring(g, **kws)
plt.hold(False)
plt.savefig(os.path.join(figure_dir, "{}.png".format(i+1)))
def main():
import sys
result_path = sys.argv[1]
dirname = os.path.basename(result_path).replace('.pkl', '')
events = detect_events_given_path(result_path, 5)
# plot_graphs(events, 'figures/{}'.format(dirname))
plot_events(events, 'figures/original-graph-of-event/{}'.format(dirname))
if __name__ == '__main__':
main()
|
import math
def circleArea(r):
return math.pi*r*r
print(circleArea(55))
print(math.pi)
|
import torch
from torch import Tensor
import math
from .benchmarks import BenchmarkBase
from botorch.utils.sampling import draw_sobol_samples
class SineBenchmark(BenchmarkBase):
"""
One-dimensional sine function with two global optimizers with different noise level.
Noise in the measurements is zero-mean Gaussian with heteroscedastic (i.e., input-dependent) variance induced by
sigmoid function. This results into small noise on [0, 1] and higher noise on (1, 2].
"""
def __init__(self, config):
super(SineBenchmark, self).__init__()
self.sigma = config['sigma']
self.repeat_eval = config['repeat_eval']
self._optimizers = [0.25, 1.25]
self._max_value = 1
self.seed_test = 42
def evaluate(self, x: Tensor, seed_eval=None) -> Tensor:
y_true = self.f(x)
sigmas = self.get_noise_var(x)
if seed_eval is not None:
shape = torch.cat([y_true] * self.repeat_eval, dim=1).shape
y = y_true + sigmas * torch.randn(shape, generator=torch.Generator().manual_seed(seed_eval))
else:
y_true = torch.cat([y_true] * self.repeat_eval, dim=1)
y = y_true + sigmas.reshape((-1, 1)) * torch.randn_like(y_true)
return y
def evaluate_on_test(self, x: Tensor) -> Tensor:
y_true = self.f(x)
sigmas = self.get_noise_var(x)
shape = y_true.shape
noise = sigmas * torch.randn(shape, generator=torch.Generator().manual_seed(self.seed_test))
y = y_true + noise
return y
def get_domain(self):
bounds = torch.zeros(2, 1, dtype=torch.float)
bounds[1] = 2
return bounds
def get_random_initial_points(self, num_points, seed) -> Tensor:
x = draw_sobol_samples(self.get_domain(), num_points, q=1, seed=seed).reshape((-1, 1))
return x
@staticmethod
def f(x):
y_true = torch.sin(x * (2 * math.pi))
return y_true.reshape((-1, 1))
def get_noise_var(self, x):
sigmas = torch.sigmoid((x - 1) * 30) * self.sigma + 0.1
return sigmas
def get_info_to_dump(self, x):
dict_to_dump = {
'f': self.f(x).squeeze(),
'rho': self.get_noise_var(x).squeeze()
}
return dict_to_dump
|
# Time: O(|V| + |E|)
# Space: O(|E|)
# There are a total of n courses you have to take, labeled from 0 to n - 1.
#
# Some courses may have prerequisites, for example to take course 0 you have to first take course 1,
# which is expressed as a pair: [0,1]
#
# Given the total number of courses and a list of prerequisite pairs, return the ordering of courses
# you should take to finish all courses.
#
# There may be multiple correct orders, you just need to return one of them. If it is impossible
# to finish all courses, return an empty array.
#
# For example:
#
# 2, [[1,0]]
# There are a total of 2 courses to take. To take course 1 you should have finished course 0.
# So the correct course order is [0,1]
#
# 4, [[1,0],[2,0],[3,1],[3,2]]
# There are a total of 4 courses to take. To take course 3 you should have finished both courses 1 and 2.
# Both courses 1 and 2 should be taken after you finished course 0. So one correct course order is [0,1,2,3].
# Another correct ordering is[0,2,1,3].
#
# Note:
# The input prerequisites is a graph represented by a list of edges, not adjacency matrices.
# Read more about how a graph is represented.
#
# Hints:
# This problem is equivalent to finding the topological order in a directed graph.
# If a cycle exists, no topological ordering exists and therefore it will be impossible to take all courses.
# Topological Sort via DFS - A great video tutorial (21 minutes) on Coursera explaining
# the basic concepts of Topological Sort.
# Topological sort could also be done via BFS.
#
from collections import defaultdict, deque
import collections
# bfs solution
class Solution(object):
def findOrder(self, numCourses, prerequisites):
"""
:type numCourses: int
:type prerequisites: List[List[int]]
:rtype: List[int]
"""
in_degree, graph = defaultdict(int), defaultdict(set)
for c, pre in prerequisites:
in_degree[c] += 1
graph[pre].add(c)
# 每次只能选 入度为 0 的课,因为它不依赖别的课
ans = []
zero_in_degree_queue = deque([i for i in range(numCourses) if i not in in_degree])
while zero_in_degree_queue:
prerequisite = zero_in_degree_queue.popleft()
ans.append(prerequisite)
for course in graph[prerequisite]:
in_degree[course] -= 1 # 减小相关课的入度
if not in_degree[course]:
zero_in_degree_queue.append(course)
return ans if len(ans) == numCourses else []
def findOrder_bfs2(self, numCourses, prerequisites):
in_degree = collections.defaultdict(set)
out_degree = collections.defaultdict(set)
for i, j in prerequisites:
in_degree[i].add(j)
out_degree[j].add(i)
q = collections.deque([i for i in xrange(numCourses) if i not in in_degree])
result = []
while q:
node = q.popleft()
result.append(node)
for i in out_degree[node]:
in_degree[i].remove(node)
if not in_degree[i]:
q.append(i)
del in_degree[i]
del out_degree[node]
return result if not in_degree and not out_degree else []
# Time: O(|V| + |E|)
# Space: O(|E|)
# dfs solution
class Solution2(object):
def findOrder(self, numCourses, prerequisites):
"""
:type numCourses: int
:type prerequisites: List[List[int]]
:rtype: List[int]
"""
in_degree = collections.defaultdict(set)
out_degree = collections.defaultdict(set)
for i, j in prerequisites:
in_degree[i].add(j)
out_degree[j].add(i)
stk = [i for i in xrange(numCourses) if i not in in_degree]
result = []
while stk:
node = stk.pop()
result.append(node)
for i in out_degree[node]:
in_degree[i].remove(node)
if not in_degree[i]:
stk.append(i)
del in_degree[i]
del out_degree[node]
return result if not in_degree and not out_degree else []
|
import os
import re
import sys
import subprocess
from termcolor import colored
def __find_file_in_dir(directory, filename):
if re.search(r'^~', directory):
directory = os.path.expanduser(directory)
matches = []
if os.path.isdir(directory):
for root, dirs, files in os.walk(directory):
for file in files:
path = os.path.join(root, file)
if path.endswith('/' + filename):
matches.append(path)
else:
print colored(directory + " is not a directory", 'red')
sys.exit(1)
return matches
def run(dirs, filename):
paths = []
for d in dirs:
matches = __find_file_in_dir(d, filename)
if len(matches) == 1:
paths.append(matches[0])
elif len(matches) > 1:
print colored("More than one file found for " + filename + " in " + d, 'red')
print colored("Matches Found:", 'red')
pre = "\n "
print colored(pre + pre.join(matches), 'yellow')
sys.exit(1)
else:
print colored("No file found for " + filename + " in " + d, 'red')
try:
cmd = "diffmerge.sh " + " ".join(paths)
print colored(cmd, 'yellow')
subprocess.call(cmd.split())
except KeyboardInterrupt:
print
print colored("Exit diffmerge", 'red')
sys.exit(1)
|
# Copyright 2019 Graphcore Ltd.
import numpy as np
import popart
import time
from functools import partial
class PerfIntervalTimer:
# Define a simple timer object:
def __init__(self):
self.time = None
def not_set(self):
return self.time is None
def last(self):
return self.time
def reset(self):
self.time = time.perf_counter()
def interval(self):
now = time.perf_counter()
interval = now - self.time
return interval
# Define a function to build and run the graph with
# the specified data size:
def build_and_run_graph(data_size):
# Create a builder object:
builder = popart.Builder()
# Specifiy two input vectors:
data_spec = popart.TensorInfo("FLOAT", [data_size])
id_a = builder.addInputTensor(data_spec)
id_b = builder.addInputTensor(data_spec)
# Describe the computation:
o1 = builder.aiOnnx.add([id_a, id_b])
o2 = builder.aiOnnx.mul([id_a, id_b])
# Designate the two output vectors and how
# often the result will be required:
builder.addOutputTensor(o1)
builder.addOutputTensor(o2)
dataFlow = popart.DataFlow(
1,
{o1: popart.AnchorReturnType("ALL"),
o2: popart.AnchorReturnType("ALL")})
# Setup an inference graph:
proto = builder.getModelProto()
session = popart.InferenceSession(
fnModel=proto,
dataFeed=dataFlow,
deviceInfo=popart.DeviceManager().createIpuModelDevice({}))
# Compile graph:
session.prepareDevice()
# Create input data buffers:
data_a = np.random.rand(data_size).astype(np.float32)
data_b = np.random.rand(data_size).astype(np.float32)
inputs = {id_a: data_a, id_b: data_b}
# Create output data buffers:
anchors = session.initAnchorArrays()
# Create timer objects and dictionaries:
timer = PerfIntervalTimer()
rtts = {}
# Input callback is called when the data is needed:
def input_callback(id, is_prefetch: bool):
if is_prefetch:
return
if timer.not_set():
timer.reset()
return inputs[id]
# Called after the input buffer has been consumed:
def input_complete_callback(id):
return
# Output callback is called when a buffer is needed for the result:
def output_callback(id):
return anchors[id]
# Complete callback is called when the output buffer has
# been filled (result is ready to be consumed by the host):
def output_complete_callback(id):
rtt = timer.interval()
rtts[id] = rtt
# Create the callback IO system:
stepio = popart.PyStepIOCallback(input_callback,
input_complete_callback,
output_callback,
output_complete_callback)
# Run the graph and return timings:
session.run(stepio)
return rtts
if __name__ == '__main__':
sizes = [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]
results = []
for i in sizes:
results.append(build_and_run_graph(i))
keys = results[0].keys()
for k in keys:
print(f"\nLatencies for {k}")
for s, d in enumerate(results):
print(f"{sizes[s]}, {d[k]}")
|
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.11.3
# kernelspec:
# display_name: Python 3
# name: python3
# ---
# + [markdown] id="view-in-github" colab_type="text"
# <a href="https://colab.research.google.com/github/always-newbie161/pyprobml/blob/hermissue_gcs/notebooks/GCS_demo_v2.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
# + [markdown] id="NR_ZjHc79eil"
# ### Authenticate in order to access the GCS
# + [markdown] id="shFPvZGDC4wX"
# We don't need authentication to access public data in GCloud but
# in order to access protected data or write to a protected bucket, you need to set up credentials (authenticate)
# + id="xiNlHqTy87vv"
from google.colab import auth
auth.authenticate_user()
# + [markdown] id="lT8OpNHGCvdW"
# ### Setting a project
# + [markdown] id="aKDAI0sbBjMj"
# We need to choose a project inorder to work with buckets, if you dont have any, create a project in Gcloud [**Console**](https://console.cloud.google.com/storage/browser?_ga=2.260409357.104440840.1625245885-649740148.1623669299)
# + [markdown] id="EWCPn3ZrAqKx"
# First we need to set a default project using the project_id so that you are able to use commands which unless require specifying the project.
# + [markdown] id="SOuPjpRSBOmZ"
# `glcoud` can be used to set the default project.
# + id="zHCWgTL8Cnlc"
project_id = "your_project_id"
# + colab={"base_uri": "https://localhost:8080/"} id="z_qBhNW0BNHI" outputId="ae51ee86-e960-4f97-d619-08999dc300a2"
# !gcloud config set project {project_id}
# + [markdown] id="lW1KylyKnyxd"
# ## Using **gsutil**- CLI for GCloud
#
# [Docs](https://cloud.google.com/storage/docs/gsutil/commands/help)
#
#
# + [markdown] id="0NXOriaMyShh"
# ### Create
# + [markdown] id="SS9LgEeUyPof"
# command to create a bucket
#
# ```
# gsutil mb [-b (on|off)] [-c <class>] [-l <location>] [-p <proj_id>]
# [--retention <time>] [--pap <setting>] gs://<bucket_name>...
# ```
#
# + id="ED3IhiSsn24X"
import uuid
bucket_name = 'colab-sample-bucket-' + str(uuid.uuid1())
# + [markdown] id="SsejI5v3oowM"
# This will create a bucket with `bucket_name` with default configurations.
# + colab={"base_uri": "https://localhost:8080/"} id="KX7T4pWYn-Ui" outputId="7763a69f-dc64-4c07-e13a-f41e8c29d1da"
# !gsutil mb gs://{bucket_name}
# + [markdown] id="RBiYaUkLpXVL"
# Creating a local folder with a test file to upload to the bucket
# + id="RJp9oEoql6GA"
# !mkdir /tmp/test_folder
# + id="X8gVcAhvmA7j"
with open('/tmp/test_folder/test_file.txt', 'w') as f:
f.write('this file get saved in the test_folder')
# + [markdown] id="lHN5lYRIqFNY"
# Uploading the folder to the bucket
#
# The object `gs://{bucket_name}/test_folder/test_file.txt` is created.
# + colab={"base_uri": "https://localhost:8080/"} id="cKpAM65zqCRW" outputId="79dfd6ef-307a-4795-90fa-cdec14826fe3"
# !gsutil cp -r /tmp/test_folder gs://{bucket_name}
# + colab={"base_uri": "https://localhost:8080/"} id="qoectKiGOABf" cellView="form" outputId="1724e90a-5650-4cf7-89b1-45eb2ffe7c6c"
#@markdown Once the upload has finished, the data will appear in the Cloud Console storage browser for your project:
print('https://console.cloud.google.com/storage/browser?project=' + project_id)
# + [markdown] id="W8euIG2-SJwL"
# 
# + [markdown] id="Qh-jT25JyYTj"
# ### Read
# + [markdown] id="d8DiZOQXqZVS"
# The contents of the uploaded file in the bucket can be read in this way
# + colab={"base_uri": "https://localhost:8080/"} id="OJYVdw8xqSLm" outputId="69cfc55e-216e-4c32-bc3c-0c6d8919d429"
# !gsutil cat gs://{bucket_name}/test_folder/test_file.txt
# + [markdown] id="w2gH3Dk6vWHJ"
# The whole folder/file from the bucket can be downloaded in this way.
# + colab={"base_uri": "https://localhost:8080/"} id="r3t976BBu6HG" outputId="d60bea6e-c52d-48c9-fb25-914d40e30970"
# !gsutil cp -r gs://{bucket_name}/test_folder /content/
# !gsutil cp gs://{bucket_name}/test_folder/test_file.txt /content/
# + [markdown] id="1bMA_qqlyahq"
# ### Update
# + [markdown] id="MTezzZ7fudgn"
# **Updating a file**
#
# Edit the local copy and overwrite the file in the bucket.
# + colab={"base_uri": "https://localhost:8080/"} id="6luwErwjtcB_" outputId="123f7161-abad-4070-95fd-bf4e37030d41"
with open('/tmp/test_folder/test_file.txt', 'a') as f:
f.write(' this new string is added later')
# !gsutil cp /tmp/test_folder/test_file.txt gs://{bucket_name}/test_folder
# + colab={"base_uri": "https://localhost:8080/"} id="I_sg3FIiuRk3" outputId="0199d1c9-9084-4575-9b7c-ff48020eeec5"
# !gsutil cat gs://{bucket_name}/test_folder/test_file.txt
# + [markdown] id="Zy1HfnVawOwn"
# If you want to **update a folder** in the bucket to be in sync with a local copy of the folder, use `rsync`
# + [markdown] id="9tM3NTLITBig"
# Making some changes to the `test_folder`
# + id="fS1alXdiwM9Q"
# !rm /tmp/test_folder/test_file.txt
# + [markdown] id="QD3Zg4jeLi5b"
# adding a new file `test_file2.txt`
# + id="yv-cvrnLSnKn"
with open('/tmp/test_folder/test_file2.txt', 'w') as f:
f.write('this is a new file named test_file2')
# + [markdown] id="58AkHxXDTVM1"
# You can check the contents of your bucket at any level using the `ls` cmd.
# + [markdown] id="DdbkzqXFLpQf"
# bucket contents before updating.
# + colab={"base_uri": "https://localhost:8080/"} id="FBYGiFBTwxNq" outputId="a129024b-06ab-40b2-ceb7-9f6d75caa1e0"
# !gsutil ls gs://{bucket_name}/test_folder
# + colab={"base_uri": "https://localhost:8080/"} id="5WPy8ewNwka7" outputId="12d4e006-2da9-4a0f-ddb3-4f80c524887a"
# !gsutil rsync -d /tmp/test_folder gs://{bucket_name}/test_folder
# + [markdown] id="Pes9aB56Ls6c"
# bucket contents after updating.
# + colab={"base_uri": "https://localhost:8080/"} id="fbv0b1KOx7qF" outputId="7d5eaa82-4caf-4c67-888b-041933385b7f"
# !gsutil ls gs://{bucket_name}/test_folder
# + [markdown] id="5venzhkhyjUI"
# ### Delete
# + [markdown] id="LldL5vrqyoVY"
# **Contents of a bucket** can be deleted using `rm` command
# + colab={"base_uri": "https://localhost:8080/"} id="_rPX75LUynVb" outputId="85465c29-afe6-4745-dde3-6d3c5871e8af"
# !gsutil rm -r gs://{bucket_name}/test_folder
# + id="seeBynfHzPiC"
# !gsutil ls gs://{bucket_name}
# + [markdown] id="LouQLE76zrO3"
# **Deleting a bucket**
#
# The rb command deletes a bucket. Buckets must be empty before you can delete them.
# + colab={"base_uri": "https://localhost:8080/"} id="S4KiQaoSztt9" outputId="31f8f3ff-20da-4b4b-926d-36dd51f729a2"
# !gsutil rb gs://{bucket_name}
# + [markdown] id="xWFE65hNT83Y"
# ## Using GC **python API**
# + [markdown] id="aIM1BbWGcw48"
# ### Create
# + [markdown] id="uC0jTo9FfCrl"
# Creating a bucket
# + id="5fxyVu7Zc1TQ"
bucket_name = 'colab-sample-bucket-' + str(uuid.uuid1())
# + colab={"base_uri": "https://localhost:8080/"} id="er_q2sA_cWBH" outputId="a4f47186-d544-4a6a-f303-6e72a8977c38"
# Imports the Google Cloud client library
from google.cloud import storage
# Instantiates a client
storage_client = storage.Client(project=project_id)
# Creates the new bucket
bucket = storage_client.create_bucket(bucket_name)
print("Bucket {} created.".format(bucket.name))
# + id="niaT7sHjea6O"
# !mkdir /tmp/test_api_folder
# + id="_GRnTfFvea6R"
with open('/tmp/test_api_folder/test_api_file.txt', 'w') as f:
f.write('this file get saved in the test_api_folder')
# + [markdown] id="wdry4faxfG57"
# Uploading a object (a file with its path)
# + colab={"base_uri": "https://localhost:8080/"} id="S5_hlNWRdUIH" outputId="08d3e111-8536-46ae-de30-73ebff8603c0"
destination_blob_name = 'test_api_folder/test_api_file.txt'
source_file_name = '/tmp/test_api_folder/test_api_file.txt'
blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(source_file_name)
print("File {} uploaded to {}.".format(source_file_name, destination_blob_name))
# + colab={"base_uri": "https://localhost:8080/"} cellView="form" id="f2pSG_5desj0" outputId="2320a685-5064-4847-a451-4f6a63fd5c08"
#@markdown Once the upload has finished, the data will appear in the Cloud Console storage browser for your project:
print('https://console.cloud.google.com/storage/browser?project=' + project_id)
# + [markdown] id="G3CbkJbofnhd"
# 
# + [markdown] id="l_09sm39e_cW"
# ### Read
# + colab={"base_uri": "https://localhost:8080/"} id="RxuND8cifAk0" outputId="3b00ae6d-dec2-4bcb-b96e-910b00bc9659"
source_blob_name = 'test_api_folder/test_api_file.txt'
destination_file_name = '/content/downloaded_test_api.txt'
source_blob = bucket.blob(source_blob_name)
source_blob.download_to_filename(destination_file_name)
print("Blob {} downloaded to {}.".format(source_blob_name, destination_file_name))
# + colab={"base_uri": "https://localhost:8080/"} id="O5JV7Z1BglJx" outputId="88a991db-84b8-4905-c433-4114099da681"
# !cat /content/downloaded_test_api.txt
# + [markdown] id="Irh3VwbNh1Nt"
# To easily download all objects in a bucket or subdirectory, use the `gsutil cp` command.
# + [markdown] id="2omE4Q_6h-66"
# ### Update
# + [markdown] id="Lgrro0CSiCT-"
# **Updating a file/object**
# Its simply overwriting the existing copy of the object.
# + id="jz5vR3VEh_2P"
with open('/tmp/test_api_folder/test_api_file.txt', 'a') as f:
f.write(' this is an appended string')
# + colab={"base_uri": "https://localhost:8080/"} id="FlZDLluViTeR" outputId="eacea1c0-b6a5-4751-d31f-cad280077cc0"
source_file_name = '/tmp/test_api_folder/test_api_file.txt'
destination_blob_name = 'test_api_folder/test_api_file.txt'
destination_file_name = '/content/downloaded_test_api.txt'
blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(source_file_name)
print("File {} uploaded to {}.".format(source_file_name, destination_blob_name))
blob.download_to_filename(destination_file_name)
print("Blob {} downloaded to {}.".format(destination_blob_name, destination_file_name))
# + colab={"base_uri": "https://localhost:8080/"} id="hpv-JEM9jKnT" outputId="ba8988ab-0aee-49e3-9043-9db78aa2ca4c"
# !cat /content/downloaded_test_api.txt
# + [markdown] id="2AbCVPfRmAKk"
# ### Delete
# + [markdown] id="-XHFXFbzmX9M"
# objects can be deleted easily by using `blob.delete()`
#
# When a folder is empty it will be vanished.
# + colab={"base_uri": "https://localhost:8080/"} id="wrgfh17umBeC" outputId="271a7c0f-e3f8-48db-94ee-a62975cc92d1"
blob_name = 'test_api_folder/test_api_file.txt'
blob = bucket.blob(blob_name)
blob.delete()
print("Blob {} deleted.".format(blob_name))
# + id="EFrBUG5rnAkW"
# !gsutil ls gs://{bucket_name}
# + [markdown] id="hkbPZ07sngkt"
# **Deleting a bucket**
# + colab={"base_uri": "https://localhost:8080/"} id="3eSTtMKJni8-" outputId="44071eba-47f7-4238-b27d-fb15a6c70992"
bucket.delete()
print("Bucket {} deleted".format(bucket.name))
# + [markdown] id="HIuuu5gasoLz"
# ## Using **gcsfuse**(mount) and bash
# + [markdown] id="CX7frgOTtAcp"
# Another way is to mount the GCS bucket to the current session of colab and you can make any regular(CRUD) operations using bash commands(just like on any other directory)
# + [markdown] id="gUdhebc0tjYE"
# In order to use this, you should first create a bucket you can do this using any of the methods described above)
#
# `gsutil mb` is used here
# + [markdown] id="VYcyFs4Os5pq"
# ### Create
# + colab={"base_uri": "https://localhost:8080/"} id="DfpQ2rAKss_Z" outputId="6cfff41c-6842-4a89-a1b3-10474f712692"
import uuid
bucket_name = 'colab-sample-bucket-' + str(uuid.uuid1())
# !gsutil mb gs://{bucket_name}
# + [markdown] id="j8SsQz8lvUXd"
# Mounting the bucket to `/content/{bucket_name}`
# + colab={"base_uri": "https://localhost:8080/"} id="Y8QR1wEut_bJ" outputId="f142b1a3-0310-4914-9256-6017446a9f4b"
# !echo "deb http://packages.cloud.google.com/apt gcsfuse-bionic main" > /etc/apt/sources.list.d/gcsfuse.list
# !curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
# !apt -qq update
# !apt -qq install gcsfuse
# !mkdir $bucket_name
# !gcsfuse $bucket_name /content/$bucket_name
# + colab={"base_uri": "https://localhost:8080/"} id="Wcr20Zy6ls9h" outputId="5d3962b4-8694-459f-c7d0-6078880d5ea0"
# cd /content/$bucket_name
# + [markdown] id="8bcCpWcPvjtj"
# Creating a folder
# + id="--aHr0Ezvg17"
# !mkdir test_mount_folder
# + [markdown] id="8rQ6Ruvuvljo"
# Creating a file/object in the folder just created.
# + id="qKAuXZJDvg17"
with open('./test_mount_folder/test_mount_file.txt', 'w') as f:
f.write('this file get saved in the test_folder you just created')
# + colab={"base_uri": "https://localhost:8080/"} cellView="form" id="6ok4bP_Jv0Q3" outputId="04b43a21-0c83-45a8-fa8a-e26d0a135c22"
#@markdown Once the upload has finished, the data will appear in the Cloud Console storage browser for your project:
print('https://console.cloud.google.com/storage/browser?project=' + project_id)
# + [markdown] id="JBoocsoevrOr"
# 
#
# + [markdown] id="yzM8kmQ_x8s1"
# ### Read
# + [markdown] id="lN-R_zQCyFtZ"
# As the bucket is already mounted you can just access files by opening them using `files` or just double clicking the file
# + colab={"base_uri": "https://localhost:8080/", "height": 17} id="7NJjY7elyR_R" outputId="a98b30a9-d24e-4760-9a87-dd9a4e7e1239"
from google.colab import files
files.view(f'/content/{bucket_name}/test_mount_folder/test_mount_file.txt')
# + colab={"base_uri": "https://localhost:8080/"} id="WC4Mefwm4HqQ" outputId="30e4f5a0-075c-4289-9faf-e61949480877"
# !cat ./test_mount_folder/test_mount_file.txt
# + [markdown] id="bF58Tss40mwM"
# ### Update
# + [markdown] id="UP53crun0qqA"
# **Update to file**
#
# Update can be done normally either by editing or by using python.
# + colab={"base_uri": "https://localhost:8080/", "height": 17} id="-76UMSyU0oHQ" outputId="20e30678-e3e5-44bc-e938-d7bc54e6e18b"
with open('./test_mount_folder/test_mount_file.txt', 'a') as f:
f.write(' this new string is added later')
files.view(f'/content/{bucket_name}/test_mount_folder/test_mount_file.txt')
# + [markdown] id="lZ22hqhw3MYN"
# **Update to folder**
#
# New files can be added either by colab's UI or by using python or any other way that works for regular dirs.
# + id="206vm7Zn3Ypc"
# !echo "this is a second file to test_folder in the bucket" >> ./test_mount_folder/test_mount_file2.txt
# + [markdown] id="8REcIGt26XJl"
# 
# + colab={"base_uri": "https://localhost:8080/"} id="GVPkAEOU4qF0" outputId="f385fe6b-7ebf-4e6c-c052-badb1d66f236"
# !cat ./test_mount_folder/test_mount_file2.txt
# + [markdown] id="8NBwWlh56eU-"
# ### Delete
# + [markdown] id="ihlvf3An6f3_"
# `rm` cmd can be used to delete the objects of the bucket using the mounted path.
# + [markdown] id="YOa2T5Q26mmJ"
# **Deleting the file** `{bucket_name}/test_mount_folder/test_mount_file.txt`
# + id="DpOZmA2X6mOX"
# !rm ./test_mount_folder/test_mount_file.txt
# + [markdown] id="WrJLoQtO74zA"
# 
# + [markdown] id="aTeSnjYk8j2S"
# to delete the folder we can sue `rm -r` or `-rm -rf` to force remove
# + id="ApMpAjO89ee6"
# !rm -r ./test_mount_folder
# + [markdown] id="2pGeyk_I-SOA"
# 
# + [markdown] id="OlcvnBXW-ash"
# **Deleting a bucket**
# + [markdown] id="2A9c75-3_NTN"
# you can't delete a bucekt using gcsfuse, but you can unmount the mounted path and then can delete using any of the above mentioned ways.
# + [markdown] id="HlvPxsWi_gZm"
# **Unmount the bucket**
#
# After unmounting, any changes in the local path will not be rendered in the cloud bucket.
# + colab={"base_uri": "https://localhost:8080/"} id="L-Qcjzsf_o7P" outputId="81f14f30-6259-47a1-924a-634d820e230f"
# cd ..
# + id="WiWO6iZh-ZBi"
# !fusermount -u /content/$bucket_name
# + [markdown] id="BnF8R1qcAdDS"
# deleting the bucket using gsutil.
# + colab={"base_uri": "https://localhost:8080/"} id="H6iUe65CAarl" outputId="8cb9f933-e119-45fd-b578-44c020605608"
# !gsutil rb gs://{bucket_name}
# + [markdown] id="uDiTPyYgnujI"
# ## Using GC **UI**
# + [markdown] id="Bf7gyYKvnzQb"
# Bucketa can be also be accessed directly through Gcloud Consoles's UI.
# + colab={"height": 34} cellView="form" id="99BIoSOsW4Q_" outputId="97b483ac-c2b1-46f0-f4ac-61b3701a8c89"
#@markdown Go to your project's browser:
print('https://console.cloud.google.com/storage/browser?project=' + project_id)
# + [markdown] id="BlIUgp0VqeJb"
# ### Create
# + [markdown] id="3aBDvOSWqihY"
# Bucket can be created in the following way.
# + colab={"base_uri": "https://localhost:8080/", "height": 35} id="BCTNyAEwqe5l" outputId="7754c801-c69a-41d1-8a44-f464fe4a873e"
# Pick a bucket name, they should be unique,
# for the demo purpose we can create a bucket name in this way
import uuid
bucket_name = 'colab-sample-bucket-' + str(uuid.uuid1())
bucket_name
# + [markdown] id="pyanZVFysdw-"
# * In the Storage browser, click `CREATE BUCKET`
#
# 
#
# * Fill in the name of the bucket,as it is a sample bucket, other configs can be left as default
# 
# + [markdown] id="YOO_qas9Qeow"
# * To create a folder, click `CREATE FOLDER` in the corresponding bucket's page.
# 
# * To upload files files to this folder, you can use `UPLOAD FILES`
# 
#
# 
#
#
# + [markdown] id="BsKQ0OupTF_r"
# ### Read
# + [markdown] id="WRfbR_A046yO"
# * Files cannot be read directly using UI,but their metadata and permissions can be viewed
#
#
# + [markdown] id="EEo93-YD54Lh"
# 
# + [markdown] id="XBvCtz2VTHUz"
# * Through UI we can download the files easily through `DOWNLOAD` button after choosing the needed object
# 
#
# + [markdown] id="Q9FrPJlhUZTp"
# ### Update
# + [markdown] id="ie_FnQ5rUbzH"
# To update the bucket, we can upload files through `UPLOAD FILES` button at a given path.
#
# 
#
# + [markdown] id="1dowxoCTVK0d"
# ### Delete
# + [markdown] id="VCxkEOtaWHUY"
# * To delete any object in the bucket, we can use `DELETE` option.
# 
#
# * To delete the bucket, go back to the your project's browser and select the checkbox of the bucket you want to delete and click `DELETE`.
#
# 
|
import abc
class BaseRepository:
@abc.abstractmethod
def init(self):
pass
@abc.abstractmethod
def check(self):
pass
@abc.abstractmethod
def eraise(self):
pass
@abc.abstractmethod
def commit(self):
pass
@abc.abstractmethod
def write_tags(self, tags: list):
pass
@abc.abstractmethod
def write_types(self, types: list):
pass
@abc.abstractmethod
def write_langs(self, langs: list):
pass
@abc.abstractmethod
def write_networks(self, networks: list):
pass
@abc.abstractmethod
def write_resources(self, resources: list):
pass
@abc.abstractmethod
def write_descriptions(self, resources_id: int, descriptions: list):
pass
@abc.abstractmethod
def write_description(self, resources_id: int, description: str):
pass
@abc.abstractmethod
def write_resources_tags(self, resources_id: int, tags: list):
pass
@abc.abstractmethod
def read_networks(self):
pass
@abc.abstractmethod
def read_langs(self, network_id: int):
pass
@abc.abstractmethod
def read_types(self, network_id: int, lang_id: int):
pass
@abc.abstractmethod
def read_tags(self, network_id: int, lang_id: int, type_id: int):
pass
@abc.abstractmethod
def read_resources(self, network_id=0, lang_id=0, type_id=0, tags_id=0):
pass
@abc.abstractmethod
def read_resource(self, resource_id: int):
pass
@abc.abstractmethod
def get_resources_id_by_name(self, name: str):
pass
@abc.abstractmethod
def read_descriptions(self, resources_id: int):
pass
@abc.abstractmethod
def get_network_id_by_name(self, name: str):
pass
@abc.abstractmethod
def get_lang_id_by_name(self, name: str):
pass
@abc.abstractmethod
def get_type_id_by_name(self, name: str):
pass
@abc.abstractmethod
def get_resources(self):
pass
@abc.abstractmethod
def get_resources_tags(self):
pass
@abc.abstractmethod
def get_network_name_by_id(self, network_id: int):
pass
@abc.abstractmethod
def get_lang_name_by_id(self, lang_id: int):
pass
@abc.abstractmethod
def get_type_name_by_id(self, type_id: int):
pass
@abc.abstractmethod
def get_tag_name_by_id(self, tags_id: int):
pass
@abc.abstractmethod
def update_resources(self, resources_id: int, network_id: int, lang_id: int, type_id: int):
pass
@abc.abstractmethod
def search_resources_by_url(self, url: str):
pass
@abc.abstractmethod
def search_resources_by_description(self, description: str):
pass
|
from django import forms
# Form takes in input URLs from the user for the website whose articles they want to see.
class URLForm(forms.Form):
url1 = forms.URLField(initial="https://bbc.co.uk")
url2 = forms.URLField(initial="https://apnews.com")
url3 = forms.URLField(initial="https://reuters.com")
|
# Print the following pattern
lastNumber = 6
for row in range(1, lastNumber):
for column in range(1, row+1):
# end=" " is for space, only comma will new line
print(column, end=" ")
print(" ") # print with black string means newline
|
import re
from functools import partial
from django.contrib.auth.models import BaseUserManager
generate_random = partial(BaseUserManager.make_random_password.im_func, None)
def string_to_css_class(string):
'Convert a string to a format useful for use as a css class.'
if not string:
return ''
return string.lower().replace(' ', '_').replace('.', '_').replace('[', '_').replace(']', '_')
def possessive(str):
if not str:
return ''
if str[-1] == 's':
return "%s'" % str
else:
return "%s's" % str
def truncate(s, length=300, continuation="..."):
if s:
return (s[:length - len(continuation)] + continuation) if len(s) > length else s
else:
return ""
# Stolen from https://github.com/mitsuhiko/jinja2/blob/7d268bef0e8f3f12c0acb90f30d67726a3e3f261/jinja2/filters.py
# since there hasn't been a real release. When jinja2 goes to 2.7 we can delete this
# https://github.com/mitsuhiko/jinja2/pull/59
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (Mebi, Gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
prefixes = [
(binary and 'KiB' or 'kB'),
(binary and 'MiB' or 'MB'),
(binary and 'GiB' or 'GB'),
(binary and 'TiB' or 'TB'),
(binary and 'PiB' or 'PB'),
(binary and 'EiB' or 'EB'),
(binary and 'ZiB' or 'ZB'),
(binary and 'YiB' or 'YB')
]
if bytes == 1:
return '1 Byte'
elif bytes < base:
return '%d Bytes' % bytes
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
return '%.1f %s' % ((base * bytes / unit), prefix)
return '%.1f %s' % ((base * bytes / unit), prefix)
def camelcase_to_underscore(s):
# stolen from http://stackoverflow.com/a/1176023
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
from django.template.loader import render_to_string
from django.utils.html import format_html
from cms.app_base import CMSAppConfig, CMSAppExtension
from djangocms_alias.models import AliasContent
from djangocms_versioning.constants import DRAFT
from djangocms_version_locking.helpers import version_is_locked
def add_alias_version_lock(obj, field):
version = obj.versions.all()[0]
lock_icon = ""
if version.state == DRAFT and version_is_locked(version):
lock_icon = render_to_string("djangocms_version_locking/admin/locked_mixin_icon.html")
return format_html(
"{is_locked}{field_value}",
is_locked=lock_icon,
field_value=getattr(obj, field),
)
class VersionLockingCMSExtension(CMSAppExtension):
def __init__(self):
# The monkey patch is here to be sure that at module load time the Version class
# is registered and can be overriden without requiring a strict load order
# in the INSTALLED_APPS setting in a projects settings.py. This is why this patch
# Isn't loaded from: VersionLockingConfig.ready
from .monkeypatch import admin as monkeypatch_admin # noqa: F401
def configure_app(self, cms_config):
pass
class VersionLockingCMSAppConfig(CMSAppConfig):
djangocms_versioning_enabled = True
versioning = []
extended_admin_field_modifiers = [{AliasContent: {"name": add_alias_version_lock}}, ]
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
LithicEdgeWear
A QGIS plugin
This plugin quantifies lithic edge wear
-------------------
begin : 2017-10-30
git sha : $Format:%H$
copyright : (C) 2017 by Nick Waber
email : nwaber@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import QSettings, QTranslator, qVersion, QCoreApplication
from PyQt4.QtGui import QAction, QIcon
# Initialize Qt resources from file resources.py
import resources
# Import the code for the dialog
from lithic_edge_wear_dialog import LithicEdgeWearDialog
import os.path
class LithicEdgeWear:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'LithicEdgeWear_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Declare instance attributes
self.actions = []
self.menu = self.tr(u'&Lithic Edge Wear')
# TODO: We are going to let the user set this up in a future iteration
self.toolbar = self.iface.addToolBar(u'LithicEdgeWear')
self.toolbar.setObjectName(u'LithicEdgeWear')
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('LithicEdgeWear', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
# Create the dialog (after translation) and keep reference
self.dlg = LithicEdgeWearDialog()
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToMenu(
self.menu,
action)
self.actions.append(action)
return action
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/plugins/LithicEdgeWear/icon.png'
self.add_action(
icon_path,
text=self.tr(u'Lithic Edge Wear'),
callback=self.run,
parent=self.iface.mainWindow())
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
for action in self.actions:
self.iface.removePluginMenu(
self.tr(u'&Lithic Edge Wear'),
action)
self.iface.removeToolBarIcon(action)
# remove the toolbar
del self.toolbar
def run(self):
"""Run method that performs all the real work"""
# show the dialog
self.dlg.show()
# Run the dialog event loop
import processing
#need this so processing algs can be used
#fresh MB surface
self.dlg.comboBox.clear()
#worn MB surface
self.dlg.comboBox_2.clear()
#fresh MB perimeter
self.dlg.comboBox_3.clear()
#worn MB perimeter
self.dlg.comboBox_4.clear()
#spin box for threshold
layers = self.iface.legendInterface().layers()
layer_list = []
for layer in layers:
layer_list.append(layer.name())
self.dlg.comboBox.addItems(layer_list)
self.dlg.comboBox_2.addItems(layer_list)
self.dlg.comboBox_3.addItems(layer_list)
self.dlg.comboBox_4.addItems(layer_list)
self.dlg.doubleSpinBox.clear() #start with no value in spin box
result = self.dlg.exec_()
# See if OK was pressed
if result:
# Identify selected layer by its index
selectedLayer1Index = self.dlg.comboBox.currentIndex()
# use the layer currently in the comboBox
selectedLayer1 = layers[selectedLayer1Index].name()
selectedLayer2Index = self.dlg.comboBox_2.currentIndex()
selectedLayer2 = layers[selectedLayer2Index].name()
selectedLayer3Index = self.dlg.comboBox_3.currentIndex()
selectedLayer3 = layers[selectedLayer3Index].name()
selectedLayer4Index = self.dlg.comboBox_4.currentIndex()
selectedLayer4 = layers[selectedLayer4Index].name()
freshSurface = selectedLayer1
wornSurface = selectedLayer2
freshPerim = selectedLayer3
wornPerim = selectedLayer4
#coords in case needed for GRASS7 module extents (uncomment)
#ext = layers[selectedLayer1Index].extent()
#xmin = ext.xMinimum()
#xmax = ext.xMaximum()
#ymin = ext.yMinimum()
#ymax = ext.yMaximum()
#coords = "%f,%f,%f,%f" %(xmin, xmax, ymin, ymax) # this is a string that stores the coordinate
#subtract the worn surface from the unworn surface
processing.runalg("saga:rastercalculator",freshSurface,wornSurface,"a-b",0,False,7,"differenceRaster1") #basic worn surface
processing.runalg('saga:convertlinestopolygons', freshPerim,"freshPoly") #create unworn polygon
processing.runalg('saga:convertlinestopolygons', wornPerim,"wornPoly") #create worn polygon
processing.runalg("saga:difference","freshPoly.shp","wornPoly.shp",True,"lostPoly") #create lost material polygon
processing.runalg('saga:clipgridwithpolygon', "differenceRaster1","wornPoly.shp",0,"differenceRaster2") #clean up difference raster edges with worn surface polygon
processing.runalg('saga:clipgridwithpolygon', freshSurface,"lostPoly.shp",0,"lostMaterial") #clip fresh surface with total wear polygon
processing.runalg("gdalogr:merge","differenceRaster2;lostMaterial",False,False,-9999,5,'mergedWear') #merge wear surface with clipped lostMaterial surface
processing.runalg("saga:rastercalculator",freshSurface,"mergedWear","b/a",0,False,7,"differenceIndex1") #raw wear index
processing.runalg("saga:rastercalculator","differenceIndex1",None,"ifelse( a>1,1,a)",0,False,7,"differenceIndex2") #wear index max = 1
processing.runalg("saga:rastercalculator","differenceIndex2",None,"ifelse( a<0,0,a)",0,False,7,"differenceIndex3") #wear index min = 0
processing.runalg("saga:rastercalculator","mergedWear",None,"ifelse( a>0,a,0)",0,False,7,"wearVolume") #create wear volume raster
self.iface.addRasterLayer("differenceIndex3", "Wear index") #load Wear Index layer
self.iface.addRasterLayer("wearVolume", "Wear volume") #load Wear Volume layer
#Create binary wear mapper based on lineEdit threshold
#thresh = float( self.dlg.doubleSpinBox.value())
#processing.runalg("saga:rastercalculator","differenceIndex3",None,"ifelse( a>0.15,1,0)",0,False,7,"wearBinary") #wear index min = 0
#processing.runalg("gdalogr:polygonize","wearBinary","DN","wearBinaryPoly")
#processing.runalg("qgis:selectbyattribute","wearBinaryPoly","DN",0,"1")
#processing.runandload("qgis:saveselectedfeatures","wearBinaryPoly","wornArea")
#print(thresh) #for debugging lineEdit.value()
|
from .paddings import ZeroPadding, Pkcs7Padding
from .rijndael import Rijndael, RijndaelCbc
__version__ = '0.3.3'
|
from net_models.fields import GENERIC_OBJECT_NAME, PRIVILEGE_LEVEL, AAA_METHOD_NAME
from net_models.models.BaseModels import BaseNetModel, VendorIndependentBaseModel
from pydantic import root_validator
from pydantic.types import PositiveInt
from pydantic.typing import Optional, List, Literal, Union
def enable_action_prohibited(cls, values):
return values
class IosAaaBase(VendorIndependentBaseModel):
pass
class IosAaaAction(IosAaaBase):
action: Literal["local", "local-case", "none", "group", "enable"]
group: Optional[Union[Literal["radius", "tacacs+"], GENERIC_OBJECT_NAME]]
@root_validator(allow_reuse=True)
def verify_group_present(cls, values):
if values.get("action") == "group":
if values.get("group") is None:
msg = f"When action == 'group', group is required."
raise AssertionError(msg)
else:
if values.get("group") is not None:
msg = f"Unless action == 'group', group must be None."
raise AssertionError(msg)
return values
class IosAaaMethodBase(IosAaaBase):
name: AAA_METHOD_NAME
action_list: List[IosAaaAction]
class IosAaaAuthenticationMethod(IosAaaMethodBase):
pass
class IosAaaAuthentication(IosAaaBase):
login: Optional[List[IosAaaAuthenticationMethod]]
enable: Optional[List[IosAaaAuthenticationMethod]]
dot1x: Optional[List[IosAaaAuthenticationMethod]]
class IosAaaAuthorizationMethod(IosAaaMethodBase):
if_authenticated: Optional[bool]
class IosAaaAuthorizationCommands(IosAaaAuthorizationMethod):
level: PRIVILEGE_LEVEL
class IosAaaAuthorization(IosAaaBase):
exec: Optional[List[IosAaaAuthorizationMethod]]
commands: Optional[List[IosAaaAuthorizationCommands]]
network: Optional[List[IosAaaAuthorizationMethod]]
authorize_console: Optional[bool]
authorize_config_commands: Optional[bool]
class IosAaaAccountingAction(IosAaaAction):
action: Literal["none", "group"]
broadcast: Optional[bool]
@root_validator(allow_reuse=True)
def validate_broadcast(cls, values):
if values.get("action") == "none":
if values.get("broadcast") not in [None, False]:
msg = f"If action == 'none', broadcast can only be in [None, False]."
raise AssertionError(msg)
return values
class IosAaaAccountingMethod(IosAaaMethodBase):
action_list: Optional[List[IosAaaAccountingAction]]
record: Literal["none", "start-stop", "stop-only"]
@root_validator(allow_reuse=True)
def validate_action_list(cls, values):
record = values.get("record")
if record == 'none':
# action_list must be None
if values.get("action_list") is not None:
msg = f"If record == 'none', action_list must be None."
raise AssertionError(msg)
else:
if values.get("action_list") is None:
msg = f"Unless record == 'none', action_list cannot be None."
raise AssertionError(msg)
return values
class IosAaaAccountingCommands(IosAaaAccountingMethod):
level: PRIVILEGE_LEVEL
class IosAaaAccounting(IosAaaBase):
exec: Optional[List[IosAaaAccountingMethod]]
commands: Optional[List[IosAaaAccountingCommands]]
visible_keys: Optional[bool]
class IosAaaConfig(IosAaaBase):
authentication: Optional[IosAaaAuthentication]
authorization: Optional[IosAaaAuthorization]
accounting: Optional[IosAaaAccounting]
class IosAaaLineCommands(BaseNetModel):
name: GENERIC_OBJECT_NAME
"""Name of the AAA method"""
level: PRIVILEGE_LEVEL
"""Privilege level"""
class IosAaaLineAuthorization(BaseNetModel):
exec: Optional[AAA_METHOD_NAME]
"""Name of the authorization exec method"""
commands: Optional[List[IosAaaLineCommands]]
"""List of Line Commands Authorization Models"""
class IosAaaLineAccounting(BaseNetModel):
exec: Optional[AAA_METHOD_NAME]
"""Name of the accounting exec method"""
commands: Optional[List[IosAaaLineCommands]]
"""List of Line Commands Accounting Models"""
class IosLineAaaConfig(BaseNetModel):
authentication: Optional[AAA_METHOD_NAME]
"""Name of the authentication login method"""
authorization: Optional[IosAaaLineAuthorization]
"""Line Authorization Model"""
accounting: Optional[IosAaaLineAuthorization]
"""Line Accounting Model"""
class IosAaaConfig(BaseNetModel):
authentication: Optional[IosAaaAuthentication]
authorization: Optional[IosAaaAuthorization]
accounting: Optional[IosAaaAccounting]
|
import datetime
import numpy as np
import pandas as pd
import tkinter as tk
from selenium import webdriver
from typing import List, Dict, Any
from .classWidget import Widget
from .classLogger import Logger
from .classSettings import Settings
logger = Logger(__name__, Settings.LOGGER)
class WidgetBins(Widget):
def __init__(self, widgetName: str, cronSyntax: str, priority: int, pane: str, slotNumber: int, config: Dict[str, Any]) -> None:
super().__init__(widgetName, cronSyntax, priority, pane, slotNumber, config)
def update(self) -> None:
# update output data
chrome_options: webdriver.ChromeOptions = webdriver.ChromeOptions()
chrome_options.headless = True
self.driver: webdriver.Chrome = webdriver.Chrome(chrome_options=chrome_options)
self.driver.get(self.config["baseUrl"])
self.driver.find_element_by_id("txtLookupPostCode").send_keys(self.config["postCode"])
self.driver.find_element_by_id("btnAddressLookup").click()
# select a different address??
self.driver.find_element_by_name("waste_collection_getData").submit()
table = self.driver.find_element_by_id("RetrieveAllDataGrid")
rows = table.find_elements_by_tag_name("tr")
data: List[List[str]] = [[x.text for x in y.find_elements_by_tag_name("td")] for y in rows]
self.driver.quit()
df: pd.DataFrame = pd.DataFrame(data)
df.columns = df.iloc[0]
df = df[1:]
df["Due dates"] = df["Due dates"].str.split("\n")
df_tall: pd.DataFrame = df.explode("Due dates", ignore_index=True)
df_tall = df_tall.rename({"Container type": "bin"}, axis=1)
df_tall = df_tall.loc[lambda df: df["bin"].isin(self.config["binsOfInterest"])]
df_tall["dateNext"] = pd.to_datetime(df_tall["Due dates"], format="%d/%m/%y")
df_tall["frequencyDays"] = np.select(
condlist=[
df_tall["Frequency"] == "ONCE WEEKLY",
df_tall["Frequency"] == "FORTNIGHTLY"
],
choicelist=[
pd.DateOffset(days=7),
pd.DateOffset(days=14)
])
df_tall["dateAfter"] = df_tall["dateNext"] + df_tall["frequencyDays"]
df_tall["daysNext"] = (df_tall["dateNext"] - pd.to_datetime(datetime.date.today())).dt.days
df_tall["daysNext"] = df_tall["daysNext"].astype(str) + " days"
df_tall["daysNext"] = df_tall["daysNext"].replace({"0 days": "This morning", "1 days": "Tomorrow morning"})
df_tall["bin"] = df_tall["bin"].str.replace(" WHEELIE BIN", "")
df_tall = df_tall.reset_index(drop=True)
df_tall = df_tall[["bin", "dateNext", "daysNext", "dateAfter"]]
df_summary: pd.DataFrame = df_tall.groupby("bin").first().reset_index()
df_summary = df_summary.sort_values(["dateNext", "bin"])
df_summary = df_summary.drop(["dateNext"], axis=1)
df_summary = df_summary.rename({
"daysNext": "Next Collection",
"dateAfter": "Collection Afterwards"
}, axis=1)
bin_colour = df_summary.loc[lambda df: df["bin"] != "FOOD BOX"].iloc[0]["bin"].split(" ")[0]
self.output = (df_summary, bin_colour)
logger.info(f"updated widget {self.widgetName} at: {datetime.datetime.now()}")
def generateHtml(self) -> None:
# get dataframe
html: str = self.output[0].to_html(index=False)
html = html.replace("<th></th>", "")
# # get colour box
# if self.output[1].lower() == "blue":
# html += """\n<div style="height:10 width:100 color:blue;"></div>"""
# elif self.output[1].lower() == "black":
# html += """<p>□</p>"""
self.html: str = html
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.