blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7019b6b2ccb716f997d7907943ca21567284877d | 091eb8602b0de816222b3c2dabbdb84cc4c63298 | /game_8_puzzle.py | 573945d64189f0df47ca0ed7ec53352372908d3c | [] | no_license | 888yzbt888/game_8_puzzle | 5b32f1aaf8aca4a09aafa333a0c06a83292b8d30 | 1b9bdc2cb6d707e11455d49245a8501b7e69b587 | refs/heads/master | 2021-06-30T19:44:09.575701 | 2017-09-18T02:10:31 | 2017-09-18T02:10:31 | 103,508,934 | 0 | 0 | null | 2017-09-22T14:00:54 | 2017-09-14T08:45:40 | Python | UTF-8 | Python | false | false | 6,697 | py | import sys
import pygame
import random
import time
import numpy as np
import algorithm_8_puzzle
REPLAY_SPEED=0.4
XOFFSET = 30
YOFFSET = 15
WINDOW_HEIGHT=440
WINDOW_WIDTH=400
FINAL_STATE=[[1,2,3],[4,5,6],[7,8,0]]
def initgame():
img = []
for i in range(0, 9):
img.append(pygame.image.load(str(i) + ".bmp"))
game=Game()
state=game.getState()
return game,state,img
#move to algorithm
def find_0_posi(block):
return [int(np.where(block == 0)[0]), int(np.where(block == 0)[1])]#[row,col]
#move to algorithm
def if_solvable(block):
block=block.reshape(9)
posi=int(np.where(block==0)[0])
total_rev=0
for i in range(1,9):
for k in range(i):
if block[k]>block[i]:
total_rev=total_rev+1
if (total_rev+posi)%2==0:
return True
else:
return False
class Game:
def __init__(self):
self.block=np.array(random.sample(range(9),9))
self.block=self.block.reshape((3,3))
print("yes" if if_solvable(self.block) else "no")##
def move(self,action):
#print(action)
if self.checkvalid(action)==False:
return self.block,"invalid"
else:
posi = find_0_posi(self.block)
if action=="down":
tem=self.block[posi[0]-1,posi[1]]
self.block[posi[0]-1,posi[1]]=self.block[posi[0],posi[1]]
self.block[posi[0],posi[1]]=tem
if action=="up":
tem = self.block[posi[0]+1, posi[1]]
self.block[posi[0]+1, posi[1]] = self.block[posi[0], posi[1]]
self.block[posi[0], posi[1]] = tem
if action=="left":
tem = self.block[posi[0], posi[1]+1]
self.block[posi[0], posi[1]+1] = self.block[posi[0], posi[1]]
self.block[posi[0], posi[1]] = tem
if action=="right":
tem = self.block[posi[0], posi[1] - 1]
self.block[posi[0], posi[1] - 1] = self.block[posi[0], posi[1]]
self.block[posi[0], posi[1]] = tem
return self.block,"done"
def checkvalid(self,action):
if action=="down" or action=="up" or action=="left" or action=="right":
posi = find_0_posi(self.block)
if posi[0]==0 and action=="down":
return False
if posi[0]==2 and action=="up":
return False
if posi[1]==0 and action=="right":
return False
if posi[1]==2 and action=="left":
return False
return True
else:
return False
def getState(self):
return self.block
def display_img(state,screen,img):
pygame.display.update()
screen.blit(img[state[0, 0]], (0 + XOFFSET, 0 + YOFFSET))
screen.blit(img[state[0, 1]], (120 + XOFFSET, 0 + YOFFSET))
screen.blit(img[state[0, 2]], (240 + XOFFSET, 0 + YOFFSET))
screen.blit(img[state[1, 0]], (0 + XOFFSET, 140 + YOFFSET))
screen.blit(img[state[1, 1]], (120 + XOFFSET, 140 + YOFFSET))
screen.blit(img[state[1, 2]], (240 + XOFFSET, 140 + YOFFSET))
screen.blit(img[state[2, 0]], (0 + XOFFSET, 280 + YOFFSET))
screen.blit(img[state[2, 1]], (120 + XOFFSET, 280 + YOFFSET))
screen.blit(img[state[2, 2]], (240 + XOFFSET, 280 + YOFFSET))
def user(screen):
game, state, img = initgame()
sol=if_solvable(state)
esc=False
while True:
if sol==False and esc==True:
break
if (state==FINAL_STATE).all():
break
action=""
for event in pygame.event.get():
if event.type==pygame.KEYDOWN:
k=event.key
if k==pygame.K_LEFT:
action="left"
elif k==pygame.K_RIGHT:
action="right"
elif k==pygame.K_UP:
action="up"
elif k==pygame.K_DOWN:
action="down"
elif k==pygame.K_ESCAPE:
esc=True
state,msg=game.move(action)
#print(msg,action)
display_img(state,screen,img)
if esc==False:
while True:
end = False
display_img(state, screen, img)
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
end = True
if end == True:
break
else:
pass
def auto(screen):
game, state, img = initgame()
if if_solvable(state):
while True:
print(state)#
procedure = algorithm_8_puzzle.solve(state)
print(procedure)#
l=len(procedure)
if l>0:
if procedure[0]=="finish":
break
for action in procedure:
state, msg = game.move(action)
#print(msg, action)
display_img(state,screen,img)
time.sleep(REPLAY_SPEED)
else:
print("unsolvable")
while True:
end = False
display_img(state, screen, img)
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
end = True
if end == True:
break
def menu():
screen = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT), 0, 32)
pygame.display.set_caption("Game")
pygame.init()
menu_option_img=[]
menu_option_img.append(pygame.image.load("Manual.bmp"))
menu_option_img.append(pygame.image.load("Auto.bmp"))
menu_option_img.append(pygame.image.load("Exit.bmp"))
while True:
pygame.display.update()
screen.fill([0,0,0])
screen.blit(menu_option_img[0],(10,80))
screen.blit(menu_option_img[1],(210,80))
screen.blit(menu_option_img[2],(110,300))
option=""
for event in pygame.event.get():
if event.type==pygame.KEYDOWN:
k=event.key
if k==pygame.K_LEFT:
option="manual"
elif k==pygame.K_RIGHT:
option="auto"
elif k==pygame.K_DOWN:
option="exit"
if option=="manual":
screen.fill([0,0,0])
user(screen)
elif option=="auto":
screen.fill([0, 0, 0])
auto(screen)
elif option=="exit":
print("exit")
pygame.quit()
sys.exit()
def main():
menu()
if __name__ == '__main__' :
main()
| [
"1002789177@qq.com"
] | 1002789177@qq.com |
a9cc0883b47e3569797ac2468dfcffe5081ffe26 | 7787db9eaf80ac4a366648902ee945112bca127a | /Leetcode300/14. Longest Common Prefix.py | 692f5ec20f2b5de8345b7f4b768d6f26010650f4 | [] | no_license | LYXalex/Leetcode-PythonSolution | 0de7af69373171affe15f2074bacc74955d09a2c | 2ae3529366227efb5f2ad81a8b039ad71e8d1ed5 | refs/heads/main | 2023-06-22T18:49:32.492547 | 2021-07-14T02:12:05 | 2021-07-14T02:12:05 | 325,213,787 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | class Solution:
def longestCommonPrefix(self, strs):
if not strs: return ""
shortest = min(strs,key=len)
for i,char in enumerate(shortest):
for each in strs:
if each[i] != char:
return shortest[:i]
return shortest | [
"yul801@ucsd.edu"
] | yul801@ucsd.edu |
75b4c345054f9757d6e642ce84b0d8c16a1c82c6 | eb00755d9d0f2630ffdb21e3ab6685b2fbcb0d9e | /tests/bench/bench_scripts/bench_sampleData.py | 729fcf79af5383d0af68875e3179d971fe99aff2 | [
"BSD-3-Clause"
] | permissive | mlangill/biom-format | aca45518c71b807cf30b0f548ad726880802a2b5 | 4cebfbdba8b6b64ff0d503df33634e3d52de1de0 | refs/heads/master | 2021-01-16T21:59:51.218830 | 2013-12-04T16:41:50 | 2013-12-04T16:41:50 | 9,486,201 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | #!/usr/bin/env python
from sys import argv
from gzip import open as gzip_open
from biom.parse import parse_biom_table
from random import choice
if __name__ == '__main__':
table = parse_biom_table(gzip_open(argv[1]))
foo = table.sampleData(choice(table.SampleIds))
| [
"mcdonadt@colorado.edu"
] | mcdonadt@colorado.edu |
b9299ec6d17a4f7f9476a364ca7ba6aac57cba1c | 39debb4a11094caffa06e0c026cc40fe3e298c6c | /staff/staff_login_interface.py | 4685702e395e0bc6a05bc55bee0c32a393a151bd | [] | no_license | sumanbashyal007/Clinic_management_system | 66204c5628a4dd8085a73c76adfb743ee7f3635d | 0a318697ad04fc61bfe289be7490d01e393a9a7a | refs/heads/master | 2022-12-04T22:22:31.640301 | 2020-08-27T08:21:39 | 2020-08-27T08:21:39 | 290,690,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,260 | py | # ====================================== Importing Necessary photos ========================================#
from tkinter import *
from tkinter import messagebox
from PIL import Image, ImageTk
from admin.connection import MyDatabase
from staff.staff_registration import Staffregistrationwindow
from staff.staff_interface import Staff_interface
class Staffwindow:
# ====================================== Generating Windows ========================================#
def __init__(self):
self.wn=Tk()
self.wn.title("Staff Login")
self.wn.geometry("1370x735+0+0")
self.wn.resizable(False,False)
self.my_db = MyDatabase()
# ====================================== Necessary Photos ========================================#
self.title_photo = PhotoImage(file="C:\\Users\\Aashrit\\Desktop\\Clinic_management_system\\pictures\\ad.png")
self.title_photo_lable = Label(self.wn, image=self.title_photo)
self.title_photo_lable.image = self.title_photo
self.title_photo_lable.place(x=0, y=0)
self.title01_photo = PhotoImage(file="C:\\Users\\Aashrit\\Desktop\\Clinic_management_system\\pictures\\nurse.png")
self.title01_photo_lable = Label(self.wn, image=self.title01_photo,bg="white")
self.title01_photo_lable.image = self.title01_photo
self.title01_photo_lable.place(x=355, y=177)
self.title02_photo = PhotoImage(file="C:\\Users\\Aashrit\\Desktop\\Clinic_management_system\\pictures\\username_logo.png")
self.title02_photo_lable = Label(self.wn, image=self.title02_photo)
self.title02_photo_lable.image = self.title02_photo
self.title03_photo = PhotoImage(file="C:\\Users\\Aashrit\\Desktop\\Clinic_management_system\\pictures\\password.png")
self.title03_photo_lable = Label(self.wn, image=self.title03_photo)
self.title03_photo_lable.image = self.title03_photo
# ====================================== All Frames ========================================#
self.staff_frame=Frame(self.wn,bg="white")
self.staff_frame.place(x=683, y=256)
self.staff_frame1 = Frame(self.wn, bg="white")
self.staff_frame1.place(x=683, y=177)
self.staff_frame2 = Frame(self.wn, bg="white")
self.staff_frame2.place(x=824, y=177)
# ====================================== All Lables ========================================#
self.lb_heading = Label(self.staff_frame1, text="Staff",font=('Impact',37,'bold','underline'),justify="center", fg='red',bg="white")
self.lb_heading.grid(row=0, column=0,columnspan=1,padx=40,pady=10)
self.lb_heading2 = Label(self.staff_frame2, text="Login",font=('Impact',37,'bold','underline'),justify="center", fg='blue',bg="white")
self.lb_heading2.grid(row=0, column=1,columnspan=1,padx=22,pady=10)
self.lb_username = Label(self.staff_frame, text="Username:", bg="white",fg="Blue", font=("cambria", 15, 'bold','underline'),image=self.title02_photo,compound=LEFT)
self.lb_username.grid(row=5, column=0, padx=10, pady=5)
self.lb_password = Label(self.staff_frame, text="Password:", bg="white", fg="Blue", font=("cambria", 15, 'bold','underline'),image=self.title03_photo,compound=LEFT)
self.lb_password.grid(row=10, column=0, padx=10, pady=5)
# ====================================== All Entries ========================================#
self.ent_username = Entry(self.staff_frame, bg="white", fg="black", font=("arial", 15, "bold"))
self.ent_username.grid(row=6, column=0,padx=40, pady=5)
self.ent_pass = Entry(self.staff_frame, bg="white", fg="black", font=("arial", 15, "bold"), show="*")
self.ent_pass.grid(row=11, column=0, padx=40, pady=5)
self.butn_forget = Button(self.staff_frame, text="Forgot your password?", fg="#000080", bg="white",font=("Arial", 10, "underline"),cursor="hand2",command=self.forgotpassword, relief=FLAT)
self.butn_forget.grid(row=14, columnspan=3, pady=5)
# ====================================== Buttons Required ========================================#
self.ch_btn = Checkbutton(self.staff_frame, text="Remember me", bg="white", fg="Black",font=("Arial MT", 10, "bold"),cursor="hand2")
self.ch_btn.grid(row=18, columnspan=2, padx=5, pady=2)
self.loginbtn_photo = PhotoImage(file="C:\\Users\\Aashrit\\Desktop\\Clinic_management_system\\pictures\\loginbutn.png")
self.loginbtn_photo_button = Button(self.staff_frame, image=self.loginbtn_photo,bg='white', fg="#3498eb", activebackground="#73C2FB",cursor="hand2",command=self.checking_credentials, font=("bold", 13), height=39, width=120,relief=RAISED)
self.loginbtn_photo_button.image = self.loginbtn_photo
self.loginbtn_photo_button.grid(row=20, columnspan=2, padx=0, pady=6)
self.butn_dont_have_an_account = Button(self.staff_frame, text="Don't have an account? | Sign Up", fg="#000080", bg="white", font=("Arial", 10, "underline"),command=self.open_staffregpage,cursor="hand2", relief=FLAT)
self.butn_dont_have_an_account.grid(row=22, columnspan=3, pady=5)
self.show_menu()
self.wn.mainloop()
# ====================================== Open Staff Regestritation Page ========================================#
def open_staffregpage(self):
self.wn.destroy()
Staffregistrationwindow()
# ====================================== Opening Staff Dashboard ========================================#
def open_staff_dashboard(self,usrlgn):
self.wn.destroy()
Staff_interface(usrlgn)
# ====================================== Checking Credentials ========================================#
def checking_credentials(self):
username=self.ent_username.get().lower()
password=self.ent_pass.get().lower()
if len(username)==0 or len(password)==0:
messagebox.showerror("Missing data entry","You can't leave any of the sections empty.")
else:
values=self.my_db.fetchingdata_staff()
username_mylist = []
for i in values:
data = (i[0]).lower()
username_mylist.append(data)
if username in username_mylist:
required_index=username_mylist.index(username)
name_logged_in_user=values[required_index][0]
if (username == values[required_index][0].lower() and password == values[required_index][1].lower()):
if values[required_index][3] == "yes" or values[required_index][3] == "Yes":
messagebox.showinfo("Login Successful",f"Welcome Mr {values[required_index][2]}")
self.open_staff_dashboard(name_logged_in_user)
else:
messagebox.showerror("User not authenticated","Your registration hasn't been\n approved by the admin yet.")
else:
messagebox.showerror("Login Credintials didn't matched","The given username and password didn't matched")
else:
messagebox.showerror("User Doesn't Exist","Sorry you aren't registered yet")
# =================================== MENU Button ===================================#
def show_menu(self):
my_menu = Menu(self.wn)
self.wn.config(menu=my_menu)
log_out = Menu(my_menu)
my_menu.add_cascade(label="<-- Back", menu=log_out)
log_out.add_cascade(label="<-- Back", command=self.logout)
# =================================== Logging out ===================================#
def logout(self):
self.wn.destroy()
from interface.first_window import Firstwindow
Firstwindow()
# =================================== Forgot Password ==============================#
def forgotpassword(self):
messagebox.showinfo("Service Unavailable","The system is in its inital phase."
"\n Service regarding credintials shall"
"\n be provided very soon.\n"
"Please consult admin desk for more info.") | [
"suman.bashyal007@gmail.com"
] | suman.bashyal007@gmail.com |
f64ca4a352ebd20fb444b43b39e98c4f44f8f5c4 | c146bce0f8585307877b53448088000ad5b6e690 | /setupStimuliandWalks.py | bd13ee914a88eb0faa08e328c78e5016ec583777 | [] | no_license | utooley/netlearn_task_v1 | 07b4dbbc5a8856a45118901709903607c0582d15 | 914411c34fc9551e704c1e8f67519308e35cdc0a | refs/heads/master | 2021-01-16T08:24:39.363760 | 2020-02-25T16:06:18 | 2020-02-25T16:06:18 | 243,041,098 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,508 | py | #Internet says that to run scripts from Terminal on new Macs, modules need to be imported in this order
from pandas import DataFrame, read_csv
from psychopy import core, gui
from psychopy import data, event, logging, visual
# Import modules
import os
import random
import re
import urllib
import csv
import numpy as np
from psychopy import prefs
#prefs.general['audioLib'] = ['pyo']
prefs.general['audioLib'] = ['pygame']
prefs.general['shutdownKey'] = 'q'
from psychopy import sound
from config import *
#print prefs
################
# Set up instruction stimuli #
################
#prior to task
pretask_instruc_1="""Now we're going to play the alien game.
\n\n
You'll see two alien friends. You can tap either alien to see the next set of friends.
\n\n
Try to tap on the aliens as fast as you can.
\n\n
Now, let's practice!
"""
#Set up instructions to show
fixation = visual.TextStim(win, text="+", height=2, color="#FFFFFF")
pretask_instrucScreen_1= visual.TextStim(win, text=pretask_instruc_1, wrapWidth=30, alignHoriz="center", height=1.0, color="#FFFFFF")
#set up a mouse
mymouse = event.Mouse(win=win)
mymouse.setPos((0,0))
#transition to task
transition_instruc_1="""Great! Now, let's play for real.
\n\n
Remember, your job as a scientist is to watch the aliens and try to figure out who's friends with who!\
\n\n
Ready? Let's go!
"""
transition_screen_1= visual.TextStim(win, text=transition_instruc_1, wrapWidth=30, alignHoriz="center", height=1.0, color="#FFFFFF")
# Final SCREEN
completion_instruc_1="""Great job!
\n\n
Now you're back on Planet Earth...
\n\n
Remember how when you saw two aliens together, that meant they were friends?
\n\n\
Now we're going to ask you about the aliens you just saw.
"""
completion_screen_1= visual.TextStim(win, text=completion_instruc_1, wrapWidth=30, alignHoriz="center", height=1.0, color="#FFFFFF")
################
# Import trial lists #
################
# def get_trials(subj_id):
# # import trial list and info and set up trial handler
# trialFile = 'subjects/subj{}/walks1.csv'.format(subj_id)
# trial_list = [ item for item in csv.DictReader(open(trialFile,'rU'))]
# trials = data.TrialHandler(trial_list,nReps=1,method='sequential')
# return trials
#####
# SHOW INSTRUCTIONS
#####
#define a function to show instructions
def show_instructions():
print('started instructionss')
mymouse.setPos((0,0))
mymouse.getPos()
press1=False
press2=False
press3=False
press4=False
#core.wait(3)
print('started instruct 2')
# while not press1 and not press2 and not press3 and not press4:
pretask_instrucScreen_1.draw()
win.flip()
core.wait(3)
event.waitKeys()
# if mymouse.mouseMoved():
# press1 = True
# core.wait(.2)
#####
# READY SCREEN INSTRUCTIONS
#####
#define a function to show instructions
def show_ready_screen():
mymouse.setPos((0,0))
mymouse.getPos()
press1=False
press2=False
# while not press1 and press2:
transition_screen_1.draw()
win.flip()
event.waitKeys()
# if mymouse.mouseMoved():
# press1 = True
# core.wait(.2)
############
# Set up trial stimuli #
##############
#background image
background_image = visual.ImageStim(win, 'stimuli/Monster-Bkg-1-BW.jpg')
#Set up a mouse?
mymouse = event.Mouse(win=win)
#Import audio wav files
#soundL = sound.Sound('sounds/low_200.wav')
#soundR = sound.Sound('sounds/high_200.wav')
#Set Trial Stimuli
img = visual.ImageStim(win,'stimuli/null.png')
imgL = visual.ImageStim(win,'stimuli/null.png',pos=(-7,-4), size=10)
imgR = visual.ImageStim(win,'stimuli/null.png',pos=(7,-4), size=10)
#Completion sound
donesound=sound.Sound('sounds/high_200.wav')
#####
#Make a function to get the practice trial data #
####
def set_practicedata(subj_id):
#########
# log file
# Get logfile name
#Split trials into here runs if desired
#trials=get_trials(subj_id)
# import trial list and info and set up trial handler
trialFile = 'subjData/{}/exposure_walk1.csv'.format(subj_id)
trial_list = [ item for item in csv.DictReader(open(trialFile,'rU'))]
prac_trial_list=trial_list[0:4]
prac_trials = data.TrialHandler(prac_trial_list,nReps=1,method='sequential')
#return trials
### DON'T NEED THIS ANYMORE
# import animation conditions and info and set up list
#animateFile = 'stimuli/animation_conds.csv'
#animate_list = [ item for item in csv.DictReader(open(animateFile,'rU'))]
#Add data types to trials
#trials.data.addDataType('resp')
prac_trials.data.addDataType('onset')
prac_trials.data.addDataType('rt')
# setup logging #
#log_file = logging.LogFile("logs/subj%s.log" % (subj_id), level=logging.DATA, filemode="w")
return (prac_trials)
#####
#Make a function to get the walk data #
####
def set_walkdata(subj_id):
#########
# log file
# Get logfile name
expdir = os.getcwd()
logdir = '{}/logs/{}'.format(expdir,subj_id)
print logdir
#if one participant is run more than once, make sure their log is saved separately
ct = 0
while 'logname' not in locals() or os.path.exists(logname):
if ct > 0:
lognum = '_%d' % (ct)
else:
lognum = ''
logname = '{}/{}_log{}.csv'.format(logdir, subj_id, lognum)
ct += 1
if not os.path.exists(os.path.join('logs/%s/' % subj_id)):
print "creating subject data directory"
directory="logs/%s/" % subj_id
os.makedirs(directory)
#Split trials into here runs if desired
#trials=get_trials(subj_id)
# import trial list and info and set up trial handler
trialFile = 'subjData/{}/exposure_walk1.csv'.format(subj_id)
trial_list = [ item for item in csv.DictReader(open(trialFile,'rU'))]
trial_list=trial_list[5:len(trial_list)]
trials = data.TrialHandler(trial_list,nReps=1,method='sequential')
#return trials
# import animation conditions and info and set up list
#animateFile = 'stimuli/animation_conds.csv'
#animate_list = [ item for item in csv.DictReader(open(animateFile,'rU'))]
#Add data types to trials
#trials.data.addDataType('resp')
trials.data.addDataType('onset')
trials.data.addDataType('rt')
# setup logging #
log_file = logging.LogFile("logs/%s/subj%s.log" % (subj_id, subj_id), level=logging.DATA, filemode="w")
return (log_file,logname,trials)
#####
#Make a function to run the practice trials #
####
def do_runpractrials(subj_id,prac_trials,runID):
#log_file = logging.LogFile("logs/subj%s.log" % (subj_id), level=logging.DATA, filemode="w")
#change logging level to DATA if don't want so much info
########################
# SHOW READY SCREEN #
########################
mymouse.getPos()
atimer=core.CountdownTimer(1.5)
while atimer.getTime() > 0:
fixation.draw()
win.flip()
# wait for trigger from scanner
#specify a key here
#event.waitKeys()
# set clock
globalClock = core.Clock()
logging.setDefaultClock(globalClock)
logging.log(level=logging.DATA, msg="** START TASK **")
prac_trials.extraInfo={'START':globalClock.getTime()}
prac_trials.extraInfo={'participant':subj_id}
# # disdaq fixation
# logging.log(level=logging.DATA, msg="FIXATION")
# for frame in range(frames['disdaq']):
# fixation.draw()
# win.flip()
#size_list=[-= 0.1, -= 2, += 0.5, += 0.1]
tidx = 0
for tidx, trial in enumerate(prac_trials):
print('In trial {} - node1 = {} node2 = {}'. format(tidx+1, trial['node1'], trial['node2']))
print(trial['path1'],trial['path2'])
logging.log(level=logging.DATA, msg="Trial %i - Stimuli1 %s - Stimuli2 %s" % (tidx+1, trial['path1'], trial['path2']))
#Set values for trial
imgL.setImage(trial['path1'])
imgR.setImage(trial['path2'])
#pick at random from animate_list
animateone=trial['movement1']
animatetwo=trial['movement2']
#print(animateone,animatetwo)
#add sounds here
soundL=sound.Sound(trial['sound1'], secs=0.1)
soundR=sound.Sound(trial['sound2'], secs=0.1)
#soundR=sound.Sound(trial['sound2'])
#imgR.size(0.1, '+')
onset = globalClock.getTime()
prac_trials.addData('onset', onset)
#event.Mouse.clickReset(mouseclick)
#correct=None
#responses=None
mymouse.setPos((0,0))
mymouse.getPos()
key=None
rt=None
Pressed=False
#while not mouseclick.getPressed():
#while globalClock.getTime() < (tidx+1)*trialDur:
#timeimg1 = core.CountdownTimer(alien_duration_short)#how long the entire trial lasts for
while not Pressed:
#img_rect.draw()
#set moving animation characteristics here after resetting normal!
imgL.ori=(0)
imgR.ori=(0)
imgL.opacity=(1)
imgR.opacity=(1)
imgL.size=(10)
imgR.size=(10)
# imgL.pos=(-7,-4)
# imgR.pos = (7,-4)
#exec('imgR.'+ animateone['animation'])
#exec('imgL.' + animatetwo['animation'])
#print(animateone)
#print(animatetwo)
#show the result of the above
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
soundL.play()
timeimg1 = core.CountdownTimer(alien_duration_short)
#mymouse.getPos()
#while (timeimg1.getTime() > 2 and np.all(mymouse.getPos()) == 0):
while (timeimg1.getTime() > 0 and not (imgL.contains(mymouse) or imgR.contains(mymouse))):
#while localClock.getTime() < fixDur:
#for frame in range(10*frame_rate):
exec(animateone)#first have the left image zoom off
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
#mymouse.getPos()
soundR.play(loops=0)
timeimg2=core.CountdownTimer(alien_duration_short)
while (timeimg2.getTime() > 0 and not (imgL.contains(mymouse) or imgR.contains(mymouse))):
#while (timeimg1.getTime() > 0 and timeimg1.getTime() < 2 and np.all(mymouse.getPos()) == 0):
#while localClock.getTime() < fixDur:
#for frame in range(10*frame_rate):
exec(animatetwo)#first have the left image zoom off
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
if len(event.getKeys(['escape'])):
logging.flush()
win.close()
core.quit()
break
if imgL.contains(mymouse) or imgR.contains(mymouse):
#if np.any(mymouse.getPos()) != 0 or timeimg1.getTime() < 0:
donesound.play()
rt=globalClock.getTime()-onset
soundL.stop()
soundR.stop()
timer1 = core.CountdownTimer(.6)#how fast L image moves off screen.
while timer1.getTime() > 0:
#while localClock.getTime() < fixDur:
#for frame in range(10*frame_rate):
imgL.pos-=(.25,0)#first have the left image zoom off
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
#imgL.size += 10
donesound.stop()
timer2 = core.CountdownTimer(.9)
while timer2.getTime() > 0:
imgR.pos-=(.25,0)#then move the right image over
background_image.draw()
imgR.draw()
win.flip()
core.wait(.25)
Pressed= True
event.clearEvents()
soundL.stop()
soundR.stop()
imgL.pos=(-7,-4)
imgR.pos = (7,-4)
#event.clearEvents()
# If no response, play low sound
#if responses==None:
#low.play()
#responses='NA'
#rt='NA'
#correct=0
# record response
#trials.addData('resp',responses)
prac_trials.addData('rt',rt)
# final fixation
timer = core.CountdownTimer(fixDur)
while timer.getTime() > 0:
#while localClock.getTime() < fixDur:
#for frame in range(10*frame_rate):
fixation.draw()
win.flip()
# # break
# if runID<5:
# NS_breakScreen.draw()
# win.flip()
# event.waitKeys(keyList=('1'))
logging.log(level=logging.DATA, msg="*** END ****")
prac_trials.extraInfo['END']=globalClock.getTime()
#####
#Make a function to run the trials #
####
def do_runtrials(subj_id,trials,logname,runID):
log_file = logging.LogFile("logs/%s/subj%s.log" % (subj_id, subj_id), level=logging.DATA, filemode="w")
#change logging level to DATA if don't want so much info
########################
# SHOW READY SCREEN #
########################
atimer=core.CountdownTimer(1.5)
while atimer.getTime() > 0:
fixation.draw()
win.flip()
# wait for trigger from scanner
# set clock
globalClock = core.Clock()
logging.setDefaultClock(globalClock)
logging.log(level=logging.DATA, msg="** START TASK **")
trials.extraInfo={'START':globalClock.getTime()}
trials.extraInfo={'participant':subj_id}
# # disdaq fixation
# logging.log(level=logging.DATA, msg="FIXATION")
# for frame in range(frames['disdaq']):
# fixation.draw()
# win.flip()
#size_list=[-= 0.1, -= 2, += 0.5, += 0.1]
tidx = 0
for tidx, trial in enumerate(trials):
print('In trial {} - node1 = {} node2 = {}'. format(tidx+1, trial['node1'], trial['node2']))
print(trial['path1'],trial['path2'])
logging.log(level=logging.DATA, msg="Trial %i - Stimuli1 %s - Stimuli2 %s" % (tidx+1, trial['path1'], trial['path2']))
#Set values for trial
imgL.setImage(trial['path1'])
imgR.setImage(trial['path2'])
#pick at random from animate_list
animateone=trial['movement1']
animatetwo=trial['movement2']
#print(animateone,animatetwo)
#add sounds here
soundL=sound.Sound(trial['sound1'], secs=0.1)
soundR=sound.Sound(trial['sound2'], secs=0.1)
#soundR=sound.Sound(trial['sound2'])
#imgR.size(0.1, '+')
onset = globalClock.getTime()
trials.addData('onset', onset)
#event.Mouse.clickReset(mouseclick)
#correct=None
#responses=None
mymouse.setPos((0,0))
mymouse.getPos()
key=None
rt=None
Pressed=False
#while not mouseclick.getPressed():
#while globalClock.getTime() < (tidx+1)*trialDur:
#timeimg1 = core.CountdownTimer(alien_duration)
while not Pressed:
#img_rect.draw()
#set moving animation characteristics here after resetting normal!
imgL.ori=(0)
imgR.ori=(0)
imgL.opacity=(1)
imgR.opacity=(1)
imgL.size=(10)
imgR.size=(10)
imgL.pos=(-7,-4)
imgR.pos = (7,-4)
#print(animateone)
#print(animatetwo)
#show the result of the above
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
timeimg1 = core.CountdownTimer(alien_duration_short)#how fast L image moves off screen.
soundL.play()
while (timeimg1.getTime() > 0 and not (imgL.contains(mymouse) or imgR.contains(mymouse))):
#while (timeimg1.getTime() > 0 and not (imgL.contains(mymouse) or imgR.contains(mymouse))):
#while localClock.getTime() < fixDur:
#for frame in range(10*frame_rate):
exec(animateone)#first have the left image zoom off
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
timeimg2 = core.CountdownTimer(alien_duration_short)#how fast L image moves off screen.
soundR.play()
while (timeimg2.getTime() > 0 and not (imgL.contains(mymouse) or imgR.contains(mymouse))):
#while localClock.getTime() < fixDur:
#for frame in range(10*frame_rate):
exec(animatetwo)#first have the left image zoom off
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
if len(event.getKeys(['escape'])):
logging.flush()
trials.saveAsWideText(fileName=logname, delim='\t', appendFile=False)
win.close()
core.quit()
break
if imgL.contains(mymouse) or imgR.contains(mymouse):
#if np.any(mymouse.getPos()) != 0 or timeimg1.getTime() < 0:
donesound.play()
rt=globalClock.getTime()-onset
soundL.stop()
soundR.stop()
timer1 = core.CountdownTimer(.6)#how fast L image moves off screen.
while timer1.getTime() > 0:
#while localClock.getTime() < fixDur:
#for frame in range(10*frame_rate):
imgL.pos-=(.25,0)#first have the left image zoom off
background_image.draw()
imgL.draw()
imgR.draw()
win.flip()
donesound.stop()
timer2 = core.CountdownTimer(.9)
while timer2.getTime() > 0:
imgR.pos-=(.25,0)#then move the right image over
background_image.draw()
imgR.draw()
win.flip()
core.wait(.25)
Pressed= True
event.clearEvents()
soundL.stop()
soundR.stop()
#event.clearEvents()
# record response
#trials.addData('resp',responses)
#imgL.pos-=(1,0)
trials.addData('rt',rt)
# # break
# if runID<5:
# NS_breakScreen.draw()
# win.flip()
# event.waitKeys(keyList=('1'))
logging.log(level=logging.DATA, msg="*** END ****")
trials.extraInfo['END']=globalClock.getTime()
trials.saveAsWideText(fileName=logname, delim='\t', appendFile=False)
#####
# COMPLETION SCREEN
#####
#define a function to show instructions
def show_completion_screen():
mymouse.setPos((0,0))
mymouse.getPos()
press1=False
press2=False
# while not press1 and not press2:
completion_screen_1.draw()
win.flip()
event.waitKeys()
# core.wait(2)
# if mymouse.mouseMoved():
# press1 = True
# core.wait(.2)
print('done')
win.close()
####
# If this script is run by itself, not loaded as a module, do the below:
####
if __name__ == '__main__':
subj_id = 1
#just show the instructions
#show_instructions()
#and then run through trials
log_file,logname,trials = set_walkdata(subj_id)
practrials=set_practicedata(subj_id)
#round 1
do_runpractrials(subj_id, practrials)
do_runtrials(subj_id,trials,logname.replace('.csv','_run1.csv'),1)
| [
"utooley@gmail.com"
] | utooley@gmail.com |
667615d24df3f447ef773eb76c4de08b7f9c84c4 | aa5db0b160300c61c6a243c10a9ae4f24e61acbe | /main.py | 38d11176559c61a5e2a98abcb2dfe3406902bd6e | [] | no_license | ShlomiRex/Twitter-Slack-Bot-Interview-Home-Assignment | 68e7a36baae49653b8e151c128e822db4cd057f9 | c2c8d9421046f792acf35e4cdf1d62bc85a7dfc3 | refs/heads/main | 2023-08-29T21:41:23.942498 | 2021-11-15T17:46:13 | 2021-11-15T17:46:13 | 427,901,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,811 | py | import configparser
import datetime
import os.path
import pickle
import threading
import time
from dotenv import load_dotenv
from flask import Flask, Response, request
import logging
import slack_worker
import twitter_worker
# Environment
from twitter_worker.twitter_worker import Tweet
load_dotenv()
# Configuration files
config = configparser.ConfigParser()
config.read("config.ini")
# Logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
# Flask
app = Flask(__name__)
# Globals / others
running = False
pickled_timestamps_file = "scan_timestamps.pkl"
@app.route("/new-content", methods=["POST"])
def command_new_content():
"""
Command handler for '/new-content'.
:return:
"""
logger.info("Command 'new-content' called")
# In order to not get "operation_timeout" we can run this in another thread
def threaded_task():
for page in twitter_worker.pages_to_pull:
scan_timestamp = get_last_scan_timestamp(page)
if not scan_timestamp:
# Defaults to one hour as per instructions.
tweets = twitter_worker.pull_tweets_last_hour(page)
push_scan_timestamp(page, datetime.datetime.utcnow() - datetime.timedelta(hours=1))
else:
# Else, we scan again from the scan timestamp. If new tweets appear, it will be because of from the delta
# timing.
tweets = twitter_worker.pull_tweets(page, start_time=scan_timestamp)
push_scan_timestamp(page, datetime.datetime.utcnow())
slack_worker.post_new_content(page, tweets)
threading.Thread(target=threaded_task).start()
return Response(), 200
@app.route("/now", methods=["POST"])
def command_now():
logger.info("Command 'now' called")
slack_worker.post_current_datetime()
return Response(), 200
@app.route("/tweet", methods=["POST"])
def command_tweet():
logger.info("Command 'tweet' called")
command_text = request.form.get("text")
if command_text:
s = command_text.split(" ", 1)
if len(s) != 2:
return Response("No recipient and no message was given.", 400)
twitter_id = s[0]
msg = s[1]
success, reason = twitter_worker.tweet(twitter_id, msg)
if success:
return Response(), 200
else:
return Response(reason, 400)
else:
return Response("No tweeter id specified.", 400)
def get_last_scan_timestamp(twitter_id: str):
"""
Read pickle file and return the scan timestamp for this user.
:param twitter_id:
:return:
"""
if os.path.exists(pickled_timestamps_file):
with open(pickled_timestamps_file, "rb") as file:
obj = pickle.load(file)
if obj and obj.get(twitter_id):
return obj[twitter_id]
def push_scan_timestamp(twitter_id: str, timestamp: datetime.datetime):
"""
Write scan timestamp for a user.
:param twitter_id:
:param timestamp:
:return:
"""
if not os.path.exists(pickled_timestamps_file):
open(pickled_timestamps_file, "x")
with open(pickled_timestamps_file, "rb") as file:
try:
obj = pickle.load(file)
except EOFError:
obj = None
with open(pickled_timestamps_file, "wb") as file:
if obj:
obj[twitter_id] = timestamp
else:
obj = {twitter_id: timestamp}
pickle.dump(obj, file)
def dispatch_bot(twitter_username: str, every: int):
"""
Run the time bot. It writes to channel every X seconds the current time. It also scans for new tweets.
:param twitter_username:
:param every:Amount of seconds to wait between sends.
:return:
"""
def time_loop():
while running:
timestamp = get_last_scan_timestamp(twitter_username)
#utc_now = datetime.datetime.utcnow() - datetime.timedelta(minutes=60) # TODO: Remove timedelta
utc_now = datetime.datetime.utcnow()
push_scan_timestamp(twitter_username, utc_now)
if timestamp:
tweets = twitter_worker.pull_tweets(twitter_username, timestamp)
if tweets:
slack_worker.post_tweets(twitter_username, tweets)
slack_worker.post_current_datetime()
time.sleep(every)
threading.Thread(target=time_loop).start()
if __name__ == "__main__":
running = True
# Run flask
kwargs = {'host': '127.0.0.1', 'port': 5000, 'threaded': True, 'use_reloader': False, 'debug': False}
flaskThread = threading.Thread(target=app.run, daemon=True, kwargs=kwargs).start()
# Run bot's time functionality in separate thread
dispatch_bot(twitter_username="DomnenkoShlomi", every=3600)
| [
"vgtvgy1@gmail.com"
] | vgtvgy1@gmail.com |
00556680676e49944ba71fefdd6fed4756bfb9a5 | 17f75be58052605ddf4da0af2dd3abba69dc3bc4 | /api/migrations/0001_initial.py | bd6e5a842c3771f5a8eb56800966d4f2ba674a6b | [] | no_license | assasin-lv/my-first-blog | 9f8547a84091ebba2d91d73a7554f2279d463a21 | f068517e7df5d6f3ed026213a3afc6528dd944dc | refs/heads/master | 2021-02-19T05:03:52.086526 | 2020-03-10T02:56:37 | 2020-03-10T02:56:37 | 245,278,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | # Generated by Django 2.0.6 on 2019-06-04 19:15
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Terminal',
fields=[
('id_terminal', models.AutoField(primary_key=True, serialize=False)),
('serie', models.CharField(max_length=50)),
('mac', models.CharField(max_length=50)),
('android_id', models.CharField(max_length=50)),
('terminal', models.CharField(max_length=50)),
],
),
]
| [
"noob.assasin.lv@gmail.com"
] | noob.assasin.lv@gmail.com |
2b8b167f852914d1fd4dbd941c92ebeffbc7c63a | de033d5aba647555fa4fd4844df9b563cfc1e2f4 | /py/elfs/debuginfo.py | b699b2d0abeab20dad29e6c8fe6e2f91ed3f87f3 | [
"Apache-2.0"
] | permissive | eth-sri/debin | 16fc0499901149bdc9818f268178569469f197df | 715771c1e1468eaafbb599d8bf81a19b5b2e22d2 | refs/heads/master | 2022-08-14T12:31:13.648564 | 2022-05-20T15:12:01 | 2022-05-20T15:12:01 | 160,524,006 | 392 | 64 | Apache-2.0 | 2022-06-22T05:14:48 | 2018-12-05T13:40:37 | Python | UTF-8 | Python | false | false | 33,661 | py | import traceback
import sys
import ctypes
from common import utils
from elfs.framebase import FrameBase
from elftools.dwarf.callframe import ZERO
from elftools.dwarf.locationlists import LocationEntry
from elftools.elf.elffile import ELFFile
from elements.regs import GivReg
from common.constants import UNKNOWN_LABEL
from common.constants import ENUM_DW_FORM_exprloc, ENUM_DW_TAG, ENUM_DW_AT, ENUM_DW_FORM
from common.constants import ENUM_ABBREV_CODE, ENUM_DW_CHILDREN, ENUM_DW_AT_language
from common.constants import POINTER, ENUM, ARRAY, UNION, STRUCT, VOID
from common.constants import SHORT, UNSIGNED_SHORT, CHAR, UNSIGNED_CHAR, LONG_LONG
from common.constants import UNSIGNED_LONG_LONG, LONG, UNSIGNED_LONG
from common.constants import INT, UNSIGNED_INT, BOOL
from common.constants import TEXT, RODATA, DATA, BSS, MAX_UPPER_BOUND
from common.constants import SYMTAB, STRTAB
from common.utils import decode_sleb128, decode_uleb128, decode_address, encode_address
class DebugInfo:
def __init__(self, *args, **kwargs):
self.binary = kwargs['binary']
self.dies = dict()
self.debug_elffile = ELFFile(kwargs['debug_elffile'])
if self.debug_elffile.has_dwarf_info():
self.dwarf_info = self.debug_elffile.get_dwarf_info()
self.location_lists = self.dwarf_info.location_lists()
self.symtab = self.debug_elffile.get_section_by_name(SYMTAB)
self.strtab = self.debug_elffile.get_section_by_name(STRTAB)
self.call_frames = []
self.init_call_frames()
def init_call_frames(self):
cfi_entries = []
if self.binary.elffile.get_dwarf_info().has_EH_CFI():
cfi_entries += self.binary.elffile.get_dwarf_info().EH_CFI_entries()
if self.dwarf_info.has_CFI():
cfi_entries += self.dwarf_info.CFI_entries()
call_frames = []
for entry in cfi_entries:
if not isinstance(entry, ZERO):
for row in entry.get_decoded().table:
cfa = row['cfa']
pc = row['pc']
if cfa.reg is not None and cfa.offset is not None and cfa.reg in self.binary.config.REG_MAPPING:
call_frames.append(FrameBase(base_register=self.binary.config.REG_MAPPING[cfa.reg], offset=cfa.offset, low_pc=pc, high_pc=None))
call_frames = sorted(call_frames, key=lambda f: f.low_pc)
for i, frame in enumerate(call_frames):
if i < len(call_frames) - 1:
frame.high_pc = call_frames[i + 1].low_pc - 1
if len(call_frames) > 0:
call_frames[-1].high_pc = self.binary.config.HIGH_PC
self.call_frames = call_frames
def get_pointer_ttype_die(self, die):
die_type_offset = die.attributes.get('DW_AT_type', None)
cu_offset = die.cu.cu_offset
die_type = None
if die_type_offset is not None and die_type_offset.value + cu_offset in self.dies:
die_type = self.dies[die_type_offset.value + cu_offset]
else:
abstract_origin_attr = die.attributes.get('DW_AT_abstract_origin', None)
specification_attr = die.attributes.get('DW_AT_specification', None)
if abstract_origin_attr is not None:
origin_offset = abstract_origin_attr.value + die.cu.cu_offset
return self.get_pointer_ttype_die(self.dies[origin_offset])
elif specification_attr is not None:
specification_offset = specification_attr.value + die.cu.cu_offset
return self.get_pointer_ttype_die(self.dies[specification_offset])
if die_type is None:
return None
else:
if die.tag == 'DW_TAG_pointer_type':
return die_type
else:
return self.get_pointer_ttype_die(die_type)
def get_ttype_name(self, die):
if die.tag == 'DW_TAG_pointer_type':
return POINTER
elif die.tag == 'DW_TAG_enumeration_type':
return ENUM
elif die.tag == 'DW_TAG_array_type':
return ARRAY
elif die.tag == 'DW_TAG_union_type':
return UNION
elif die.tag in ('DW_TAG_structure_type', 'DW_TAG_class_type'):
return STRUCT
elif die.tag == 'DW_TAG_base_type':
type_name_attr = die.attributes.get('DW_AT_name', None)
if type_name_attr is None:
return VOID
else:
type_name = type_name_attr.value.decode('ascii')
if 'short' in type_name:
if 'unsigned' in type_name:
return UNSIGNED_SHORT
else:
return SHORT
elif 'char' in type_name:
if 'unsigned' in type_name:
return UNSIGNED_CHAR
else:
return CHAR
elif type_name.count('long') == 2:
if 'unsigned' in type_name:
return UNSIGNED_LONG_LONG
else:
return LONG_LONG
elif type_name.count('long') == 1:
if 'unsigned' in type_name:
return UNSIGNED_LONG
else:
return LONG
elif 'int' in type_name:
if 'unsigned' in type_name:
return UNSIGNED_INT
else:
return INT
elif 'bool' in type_name.lower():
return BOOL
else:
return VOID
else: # ('DW_TAG_typedef', 'DW_TAG_const_type', 'DW_TAG_volatile_type'):
die_type_offset = die.attributes.get('DW_AT_type', None)
cu_offset = die.cu.cu_offset
if die_type_offset is not None and die_type_offset.value + cu_offset in self.dies:
die_type = self.dies[die_type_offset.value + cu_offset]
return self.get_ttype_name(die_type)
else:
abstract_origin_attr = die.attributes.get('DW_AT_abstract_origin', None)
specification_attr = die.attributes.get('DW_AT_specification', None)
if abstract_origin_attr is not None:
origin_offset = abstract_origin_attr.value + die.cu.cu_offset
return self.get_ttype_name(self.dies[origin_offset])
elif specification_attr is not None:
specification_offset = specification_attr.value + die.cu.cu_offset
return self.get_ttype_name(self.dies[specification_offset])
else:
return VOID
def get_name_origin(self, die):
name_attr = die.attributes.get('DW_AT_name', None)
abstract_origin_attr = die.attributes.get('DW_AT_abstract_origin', None)
specification_attr = die.attributes.get('DW_AT_specification', None)
cu_offset = die.cu.cu_offset
if name_attr is None:
if abstract_origin_attr is not None:
origin_offset = abstract_origin_attr.value + cu_offset
return self.get_name_origin(self.dies[origin_offset])
elif specification_attr is not None:
origin_offset = specification_attr.value + cu_offset
return self.get_name_origin(self.dies[origin_offset])
else:
return die
else:
return die
def get_die_type(self, die):
if die is None:
return None
die_type_offset = die.attributes.get('DW_AT_type', None)
cu_offset = die.cu.cu_offset
if die_type_offset is None:
abstract_origin_attr = die.attributes.get('DW_AT_abstract_origin', None)
specification_attr = die.attributes.get('DW_AT_specification', None)
if abstract_origin_attr is not None:
origin_offset = abstract_origin_attr.value + cu_offset
return self.get_die_type(self.dies[origin_offset])
elif specification_attr is not None:
origin_offset = specification_attr.value + cu_offset
return self.get_die_type(self.dies[origin_offset])
else:
return die
else:
die_type = self.dies[die_type_offset.value + cu_offset]
if die_type.tag in ('DW_TAG_typedef', 'DW_TAG_const_type', 'DW_TAG_volatile_type'):
return self.get_die_type(die_type)
else:
return die_type
def get_byte_size(self, die):
byte_size_attr = die.attributes.get('DW_AT_byte_size', None)
if byte_size_attr is not None:
return byte_size_attr.value
else:
type_offset_attr = die.attributes.get('DW_AT_type', None)
if type_offset_attr is None:
return None
else:
cu_offset = die.cu.cu_offset
offset = type_offset_attr.value + cu_offset
if offset not in self.dies:
return None
else:
return self.get_byte_size(self.dies[offset])
def get_array_upper_bound(self, die):
for child in die.iter_children():
if child.tag == 'DW_TAG_subrange_type':
upper_bound_attr = child.attributes.get('DW_AT_upper_bound', None)
if upper_bound_attr is None:
return None
else:
if upper_bound_attr.form in ('DW_FORM_data1',
'DW_FORM_data2',
'DW_FORM_data4',
'DW_FORM_data8'):
return upper_bound_attr.value
elif upper_bound_attr.form == 'DW_FORM_exprloc':
loc = upper_bound_attr.value
if loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const1u']:
return ctypes.c_uint8(loc[1]).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const1s']:
return ctypes.c_int8(loc[1]).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const2u']:
return ctypes.c_uint16(utils.decode_kbytes(loc[1:], 2)).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const2s']:
return ctypes.c_int16(utils.decode_kbytes(loc[1:], 2)).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const4u']:
return ctypes.c_uint32(utils.decode_kbytes(loc[1:], 2)).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const4s']:
return ctypes.c_int32(utils.decode_kbytes(loc[1:], 2)).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const8u']:
return ctypes.c_uint64(utils.decode_kbytes(loc[1:], 2)).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_const8s']:
return ctypes.c_int64(utils.decode_kbytes(loc[1:], 2)).value
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_constu']:
return utils.decode_uleb128(loc[1:])
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_consts']:
return utils.decode_sleb128(loc[1:])
else:
return None
else:
return None
def binary_train_info(self):
for cu in self.dwarf_info.iter_CUs():
for die in cu.iter_DIEs():
self.dies[die.offset] = die
added_die = set()
for cu in self.dwarf_info.iter_CUs():
top_die = cu.get_top_DIE()
low_pc_attr = top_die.attributes.get('DW_AT_low_pc', None)
if low_pc_attr is not None:
cu_low_pc = low_pc_attr.value
else:
cu_low_pc = 0
for die in cu.iter_DIEs():
if die.tag == 'DW_TAG_subprogram':
low_pc_attr = die.attributes.get('DW_AT_low_pc', None)
# high_pc_attr = die.attributes.get('DW_AT_high_pc', None)
origin = self.get_name_origin(die)
if low_pc_attr is not None:
low_pc = low_pc_attr.value
if self.binary.functions.is_lowpc_function(low_pc):
function = self.binary.functions.get_function_by_lowpc(low_pc)
if function.is_run_init:
self.function_train_info(function, die, cu_low_pc, True)
added_die.add(die)
else:
pass
else:
pass
if die.tag == 'DW_TAG_variable':
loc_attr = die.attributes.get('DW_AT_location', None)
if loc_attr is not None:
loc = loc_attr.value
form = loc_attr.form
if form == 'DW_FORM_block1' or form == 'DW_FORM_exprloc':
if loc[0] == ENUM_DW_FORM_exprloc['DW_OP_addr'] and len(loc) == self.binary.config.ADDRESS_BYTE_SIZE + 1:
offset = utils.decode_address(loc[1:], self.binary)
self.direct_offset_train_info(offset, die)
else:
pass
else:
pass
else:
pass
for sym in self.symtab.iter_symbols():
ttype = sym.entry['st_info']['type']
name = self.strtab.get_string(sym.entry['st_name'])
if '@@' in name:
name = name[:name.find('@@')]
value = sym.entry['st_value']
if ttype == 'STT_FUNC' and self.binary.functions.is_lowpc_function(value):
function = self.binary.functions.get_function_by_lowpc(value)
if function.train_name == UNKNOWN_LABEL:
function.train_name = name
if ttype == 'STT_OBJECT' and value in self.binary.direct_offsets:
direct_offset = self.binary.direct_offsets[value]
if direct_offset.train_name == UNKNOWN_LABEL:
direct_offset.train_name = name
for cu in self.dwarf_info.iter_CUs():
top_die = cu.get_top_DIE()
low_pc_attr = top_die.attributes.get('DW_AT_low_pc', None)
if low_pc_attr is not None:
cu_low_pc = low_pc_attr.value
else:
cu_low_pc = 0
for die in cu.iter_DIEs():
if die.tag == 'DW_TAG_subprogram':
origin = self.get_name_origin(die)
name_attr = origin.attributes.get('DW_AT_name', None)
if name_attr is not None:
name = name_attr.value.decode('ascii')
for function in self.binary.functions.functions:
if function.is_run_init \
and (function.name == name or function.train_name == name):
self.function_train_info(function, die, cu_low_pc, True)
break
die_linkage_name_attr = die.attributes.get('DW_AT_linkage_name', None)
origin_linkage_name_attr = origin.attributes.get('DW_AT_linkage_name', None)
name = None
if die_linkage_name_attr is not None:
name = die_linkage_name_attr.value.decode('ascii')
elif origin_linkage_name_attr is not None:
name = origin_linkage_name_attr.value.decode('ascii')
if name is not None:
for function in self.binary.functions.functions:
if function.is_run_init \
and (function.name == name or function.train_name == name):
self.function_train_info(function, die, cu_low_pc, True)
break
if die.tag == 'DW_TAG_variable':
origin = self.get_name_origin(die)
name_attr = origin.attributes.get('DW_AT_name', None)
if name_attr is not None:
name = name_attr.value.decode('ascii')
for direct_offset in self.binary.direct_offsets.values():
if direct_offset.train_name == name \
and direct_offset.ttype.train_name == UNKNOWN_LABEL:
ttype = self.get_ttype_name(die)
direct_offset.ttype.train_info(ttype)
# for f in self.binary.functions.functions:
# if f.train_name != UNKNOWN_LABEL \
# and f.ttype.train_name == UNKNOWN_LABEL:
# f.ttype.train_info(VOID)
def function_train_info(self, function, die, cu_low_pc, add_info):
frame_base_attr = die.attributes.get('DW_AT_frame_base', None)
function.add_frame_bases(frame_base_attr, cu_low_pc)
function.init_run = True
if add_info:
name = self.get_ttype_name(die)
function.ttype.train_info(name)
origin = self.get_name_origin(die)
name_attr = origin.attributes.get('DW_AT_name', None)
if name_attr is not None:
function.train_name = name_attr.value.decode('ascii')
descendants = []
def get_die_descendants(d):
if d.tag in ('DW_TAG_inlined_subroutine', 'DW_TAG_GNU_call_site'):
pass
else:
if d.tag in ('DW_TAG_formal_parameter', 'DW_TAG_variable'):
descendants.append(d)
for child in d.iter_children():
get_die_descendants(child)
get_die_descendants(die)
for desc in descendants:
if desc.tag in ('DW_TAG_formal_parameter', 'DW_TAG_variable'):
loc_attr = desc.attributes.get('DW_AT_location', None)
if loc_attr is not None:
loc = loc_attr.value
form = loc_attr.form
if form == 'DW_FORM_exprloc':
self.loc_train_info(function, loc, desc)
elif form in ('DW_FORM_data4', 'DW_FORM_sec_offset'):
self.location_list_train_info(function, loc, desc, cu_low_pc)
elif form == 'DW_FORM_block1':
if len(loc) == 1:
if ENUM_DW_FORM_exprloc['DW_OP_reg0'] <= loc[0] <= ENUM_DW_FORM_exprloc['DW_OP_reg31'] \
and (loc[0] - ENUM_DW_FORM_exprloc['DW_OP_reg0']) in self.binary.config.REG_MAPPING:
base_register = self.binary.config.REG_MAPPING[loc[0] - ENUM_DW_FORM_exprloc['DW_OP_reg0']]
self.reg_add_info(function, base_register, desc, None, None)
else:
self.loc_train_info(function, loc, desc)
else:
pass
else:
pass
def fbreg_train_info(self, function, offset, die, low_pc=None, high_pc=None):
if len(function.frame_bases) == 0:
pass
elif len(function.frame_bases) == 1:
frame_base = function.frame_bases[0]
base_pointer = frame_base.base_register
frame_offset = frame_base.offset + offset
self.indirect_offset_train_info(function, base_pointer, frame_offset, die, self.get_die_type(die))
else:
for frame_base in function.frame_bases:
base_pointer = frame_base.base_register
frame_offset = frame_base.offset + offset
frame_low_pc = frame_base.low_pc
frame_high_pc = frame_base.high_pc
if low_pc is None and high_pc is None:
self.indirect_offset_train_info(function, base_pointer, frame_offset, die, self.get_die_type(die), frame_low_pc, frame_high_pc)
elif high_pc > frame_low_pc and low_pc < frame_high_pc:
self.indirect_offset_train_info(function, base_pointer, frame_offset, die, self.get_die_type(die), max(frame_low_pc, low_pc), min(frame_high_pc, high_pc))
def indirect_offset_add_info(self, function, base_pointer, offset, die, low_pc, high_pc, ttype):
key = (base_pointer, offset)
# print(key)
# traceback.print_stack(file=sys.stdout)
if key in function.indirect_offsets:
for indirect_offset in function.indirect_offsets[key].values():
if low_pc is None and high_pc is None:
indirect_offset.train_info(die, ttype)
else:
for pc in indirect_offset.pcs:
if pc >= low_pc and pc < high_pc:
indirect_offset.train_info(die, ttype)
break
def reg_add_info(self, function, base_register, die, low_pc, high_pc):
ttype = self.get_ttype_name(die)
for reg in function.regs.values():
if not isinstance(reg, GivReg) and reg.base_register == base_register:
for pc in reg.pcs:
if (low_pc is None and high_pc is None) or low_pc <= pc < high_pc:
reg.train_info(die, ttype)
break
if ttype == POINTER:
pointer_ttype_die = self.get_pointer_ttype_die(die)
pointer_ttype_name = self.get_ttype_name(pointer_ttype_die) if pointer_ttype_die is not None else VOID
self.indirect_offset_train_info(function, base_register, 0, die, self.get_die_type(pointer_ttype_die), low_pc, high_pc, pointer_ttype_name)
def indirect_offset_train_info(self, function, base_pointer, offset, die, die_type, low_pc=None, high_pc=None, ttype=None):
if ttype is None:
ttype = self.get_ttype_name(die)
if die_type is None:
self.indirect_offset_add_info(function, base_pointer, offset, die, low_pc, high_pc, ttype)
elif die_type.tag == 'DW_TAG_array_type':
byte_size = self.get_byte_size(die_type)
upper_bound = self.get_array_upper_bound(die_type)
if byte_size is not None and upper_bound is not None:
if upper_bound * byte_size > MAX_UPPER_BOUND:
for key in function.indirect_offsets:
if key[0] == base_pointer and offset <= key[1] < upper_bound * byte_size + offset:
self.indirect_offset_add_info(function, key[0], key[1], die, low_pc, high_pc, ttype)
else:
for i in range(0, upper_bound * byte_size):
off = offset + i
self.indirect_offset_add_info(function, base_pointer, off, die, low_pc, high_pc, ttype)
else:
self.indirect_offset_add_info(function, base_pointer, offset, die, low_pc, high_pc, ttype)
elif die_type.tag == 'DW_TAG_union_type':
byte_size = self.get_byte_size(die_type)
if byte_size is not None:
if byte_size > MAX_UPPER_BOUND:
for key in function.indirect_offsets:
if key[0] == base_pointer and offset <= key[1] < byte_size + offset:
self.indirect_offset_add_info(function, key[0], key[1], die, low_pc, high_pc, ttype)
else:
for i in range(0, byte_size):
off = offset + i
self.indirect_offset_add_info(function, base_pointer, off, die, low_pc, high_pc, ttype)
else:
self.indirect_offset_add_info(function, base_pointer, offset, die, low_pc, high_pc, ttype)
elif die_type.tag in ('DW_TAG_structure_type', 'DW_TAG_class_type'):
byte_size = self.get_byte_size(die_type)
if byte_size is not None:
if byte_size > MAX_UPPER_BOUND:
for key in function.indirect_offsets:
if key[0] == base_pointer and offset <= key[1] < byte_size + offset:
self.indirect_offset_add_info(function, key[0], key[1], die, low_pc, high_pc, ttype)
else:
for i in range(0, byte_size):
off = offset + i
self.indirect_offset_add_info(function, base_pointer, off, die, low_pc, high_pc, ttype)
else:
self.indirect_offset_add_info(function, base_pointer, offset, die, low_pc, high_pc, ttype)
for child in die_type.iter_children():
child_offset_attr = die.attributes.get('DW_AT_data_member_location', None)
if child_offset_attr is not None:
if child_offset_attr.form == 'DW_FORM_block1':
if child_offset_attr.value[0] == 0x23:
child_offset = utils.decode_uleb128(child_offset_attr[1:])
off = offset + child_offset
self.indirect_offset_train_info(function, base_pointer, off, die, die_type, low_pc, high_pc)
else:
pass
elif child_offset_attr.form == 'DW_FORM_data1':
child_offset = child_offset_attr.value
off = offset + child_offset
self.indirect_offset_train_info(function, base_pointer, off, die, die_type, low_pc, high_pc)
else:
pass
else:
byte_size = self.get_byte_size(die_type)
if byte_size is not None:
if byte_size > MAX_UPPER_BOUND:
for key in function.indirect_offsets:
if key[0] == base_pointer and offset <= key[1] < byte_size + offset:
self.indirect_offset_add_info(function, key[0], key[1], die, low_pc, high_pc, ttype)
else:
for i in range(0, byte_size):
off = offset + i
self.indirect_offset_add_info(function, base_pointer, off, die, low_pc, high_pc, ttype)
else:
self.indirect_offset_add_info(function, base_pointer, offset, die, low_pc, high_pc, ttype)
def direct_offset_train_info(self, offset, die, ttype=None):
die_type = self.get_die_type(die)
if ttype is None:
ttype = self.get_ttype_name(die)
if die_type is None:
if offset in self.binary.direct_offsets:
self.binary.direct_offsets[offset].train_info(die, ttype)
else:
pass
elif die_type.tag == 'DW_TAG_array_type':
byte_size = self.get_byte_size(die_type)
upper_bound = self.get_array_upper_bound(die_type)
if byte_size is not None and upper_bound is not None:
if upper_bound * byte_size > MAX_UPPER_BOUND:
for off in self.binary.direct_offsets:
if offset <= off < upper_bound * byte_size:
self.binary.direct_offsets[off].train_info(die, ttype)
else:
for i in range(0, upper_bound * byte_size):
off = offset + i
if off in self.binary.direct_offsets:
self.binary.direct_offsets[off].train_info(die, ttype)
elif offset in self.binary.direct_offsets:
self.binary.direct_offsets[offset].train_info(die, ttype)
else:
pass
elif die_type.tag == 'DW_TAG_union_type':
byte_size = self.get_byte_size(die_type)
if byte_size is not None:
if byte_size > MAX_UPPER_BOUND:
for off in self.binary.direct_offsets:
if offset <= off < offset + byte_size:
self.binary.direct_offsets[off].train_info(die, ttype)
else:
for i in range(0, byte_size):
off = offset + i
if off in self.binary.direct_offsets:
self.binary.direct_offsets[off].train_info(die, ttype)
elif offset in self.binary.direct_offsets:
self.binary.direct_offsets[offset].train_info(die, ttype)
else:
pass
elif die_type.tag in ('DW_TAG_structure_type', 'DW_TAG_class_type'):
byte_size = self.get_byte_size(die_type)
if byte_size is not None:
if byte_size > MAX_UPPER_BOUND:
for off in self.binary.direct_offsets:
if offset <= off < offset + byte_size:
self.binary.direct_offsets[off].train_info(die, ttype)
else:
for i in range(0, byte_size):
off = offset + i
if off in self.binary.direct_offsets:
self.binary.direct_offsets[off].train_info(die, ttype)
elif offset in self.binary.direct_offsets:
self.binary.direct_offsets[offset].train_info(die, ttype)
else:
pass
for child in die_type.iter_children():
child_offset_attr = die.attributes.get('DW_AT_data_member_location', None)
if child_offset_attr is not None:
if child_offset_attr.form == 'DW_FORM_block1':
if child_offset_attr.value[0] == 0x23:
child_offset = utils.decode_uleb128(child_offset_attr[1:])
off = offset + child_offset
self.direct_offset_train_info(off, die, ttype)
else:
pass
elif child_offset_attr.form == 'DW_FORM_data1':
child_offset = child_offset_attr.value
off = offset + child_offset
self.direct_offset_train_info(off, die, ttype)
else:
pass
elif offset in self.binary.direct_offsets:
byte_size = self.get_byte_size(die_type)
if byte_size is not None:
if byte_size > MAX_UPPER_BOUND:
for off in self.binary.direct_offsets:
if offset <= off < byte_size + offset:
self.binary.direct_offsets[off].train_info(die, ttype)
else:
for i in range(0, byte_size):
off = offset + i
if off in self.binary.direct_offsets:
self.binary.direct_offsets[off].train_info(die, ttype)
else:
self.binary.direct_offsets[offset].train_info(die, ttype)
else:
pass
def location_list_train_info(self, function, loc_offset, die, cu_low_pc):
location_list = self.location_lists.get_location_list_at_offset(loc_offset)
for entry in location_list:
if isinstance(entry, LocationEntry):
low_pc = entry.begin_offset + cu_low_pc
high_pc = entry.end_offset + cu_low_pc
loc = entry.loc_expr
if len(loc) > 0:
# print(entry)
self.loc_train_info(function, loc, die, low_pc, high_pc)
else:
pass
else:
pass
def loc_train_info(self, function, loc, die, low_pc=None, high_pc=None):
if loc[0] == ENUM_DW_FORM_exprloc['DW_OP_fbreg']:
self.fbreg_train_info(function, decode_sleb128(loc[1:]), die, low_pc, high_pc)
elif ENUM_DW_FORM_exprloc['DW_OP_breg0'] <= loc[0] <= ENUM_DW_FORM_exprloc['DW_OP_breg31'] \
and (loc[0] - ENUM_DW_FORM_exprloc['DW_OP_breg0']) in self.binary.config.REG_MAPPING:
base_pointer = self.binary.config.REG_MAPPING[loc[0] - ENUM_DW_FORM_exprloc['DW_OP_breg0']]
offset = decode_sleb128(loc[1:])
self.indirect_offset_train_info(function, base_pointer, offset, die, self.get_die_type(die))
elif loc[0] == ENUM_DW_FORM_exprloc['DW_OP_addr']:
offset = decode_address(loc[1:], self.binary)
self.direct_offset_train_info(offset, die)
elif ENUM_DW_FORM_exprloc['DW_OP_reg0'] <= loc[0] <= ENUM_DW_FORM_exprloc['DW_OP_reg31'] \
and (loc[0] - ENUM_DW_FORM_exprloc['DW_OP_reg0']) in self.binary.config.REG_MAPPING:
base_register = self.binary.config.REG_MAPPING[loc[0] - ENUM_DW_FORM_exprloc['DW_OP_reg0']]
self.reg_add_info(function, base_register, die, low_pc, high_pc)
else:
pass
| [
"he4444mingtian@gmail.com"
] | he4444mingtian@gmail.com |
bf7d221c249a3241ed1caec79c3c80e33dfe5221 | 35fb414cc9f5c408dc5d2c8316a5b6e4de3ccf22 | /test/templates/analyze_2l_2tau_cfg.py | 569b94fbe3d5ab083963e3c54bb48fe7dbaef4c9 | [] | no_license | kartikmaurya/tth-htt | abf1abafc9335da9687938f8588550a86631f751 | 8486aa6f33085a7b2d665e9215b828970f6ee8a7 | refs/heads/master | 2020-05-05T02:09:31.876729 | 2019-04-05T06:54:50 | 2019-04-05T06:54:50 | 177,517,377 | 0 | 0 | null | 2019-03-25T05:01:21 | 2019-03-25T05:01:21 | null | UTF-8 | Python | false | false | 4,412 | py | import FWCore.ParameterSet.Config as cms
import os
from tthAnalysis.HiggsToTauTau.configs.recommendedMEtFilters_cfi import *
from tthAnalysis.HiggsToTauTau.configs.EvtYieldHistManager_cfi import *
process = cms.PSet()
process.fwliteInput = cms.PSet(
fileNames = cms.vstring(),
maxEvents = cms.int32(-1),
outputEvery = cms.uint32(100000)
)
process.fwliteOutput = cms.PSet(
fileName = cms.string('')
)
process.analyze_2l_2tau = cms.PSet(
treeName = cms.string('Events'),
process = cms.string(''),
histogramDir = cms.string(''),
era = cms.string(''),
triggers_1e = cms.vstring(),
use_triggers_1e = cms.bool(True),
triggers_2e = cms.vstring(),
use_triggers_2e = cms.bool(True),
triggers_1mu = cms.vstring(),
use_triggers_1mu = cms.bool(True),
triggers_2mu = cms.vstring(),
use_triggers_2mu = cms.bool(True),
triggers_1e1mu = cms.vstring(),
use_triggers_1e1mu = cms.bool(True),
apply_offline_e_trigger_cuts_1e = cms.bool(True),
apply_offline_e_trigger_cuts_2e = cms.bool(True),
apply_offline_e_trigger_cuts_1mu = cms.bool(True),
apply_offline_e_trigger_cuts_2mu = cms.bool(True),
apply_offline_e_trigger_cuts_1e1mu = cms.bool(True),
electronSelection = cms.string(''),
muonSelection = cms.string(''),
lep_mva_cut = cms.double(1.),
apply_leptonGenMatching = cms.bool(True),
leptonChargeSelection = cms.string(''),
hadTauChargeSelection = cms.string(''),
hadTauGenMatch = cms.string('all'),
hadTauSelection = cms.string(''),
apply_hadTauGenMatching = cms.bool(False),
chargeSumSelection = cms.string(''),
applyFakeRateWeights = cms.string(""),
leptonFakeRateWeight = cms.PSet(
inputFileName = cms.string(""),
histogramName_e = cms.string(""),
histogramName_mu = cms.string("")
),
hadTauFakeRateWeight = cms.PSet(
inputFileName = cms.string(""),
lead = cms.PSet(
absEtaBins = cms.vdouble(-1., 1.479, 9.9),
graphName = cms.string("jetToTauFakeRate/$hadTauSelection/$etaBin/jetToTauFakeRate_mc_hadTaus_pt"),
applyGraph = cms.bool(True),
fitFunctionName = cms.string("jetToTauFakeRate/$hadTauSelection/$etaBin/fitFunction_data_div_mc_hadTaus_pt"),
applyFitFunction = cms.bool(True)
),
sublead = cms.PSet(
absEtaBins = cms.vdouble(-1., 1.479, 9.9),
graphName = cms.string("jetToTauFakeRate/$hadTauSelection/$etaBin/jetToTauFakeRate_mc_hadTaus_pt"),
applyGraph = cms.bool(True),
fitFunctionName = cms.string("jetToTauFakeRate/$hadTauSelection/$etaBin/fitFunction_data_div_mc_hadTaus_pt"),
applyFitFunction = cms.bool(True)
)
),
minNumJets = cms.int32(2),
isMC = cms.bool(True),
central_or_shift = cms.string(''),
lumiScale = cms.double(1.),
apply_genWeight = cms.bool(True),
apply_DYMCReweighting = cms.bool(False),
apply_hlt_filter = cms.bool(False),
apply_met_filters = cms.bool(True),
cfgMEtFilter = cms.PSet(),
apply_hadTauFakeRateSF = cms.bool(False),
fillGenEvtHistograms = cms.bool(False),
cfgEvtYieldHistManager = cms.PSet(),
branchName_electrons = cms.string('Electron'),
branchName_muons = cms.string('Muon'),
branchName_hadTaus = cms.string('Tau'),
branchName_jets = cms.string('Jet'),
branchName_met = cms.string('MET'),
branchName_memOutput = cms.string(''),
branchName_genLeptons = cms.string('GenLep'),
branchName_genHadTaus = cms.string('GenVisTau'),
branchName_genPhotons = cms.string('GenPhoton'),
branchName_genJets = cms.string('GenJet'),
redoGenMatching = cms.bool(True),
selEventsFileName_input = cms.string(''),
selEventsFileName_output = cms.string(''),
selectBDT = cms.bool(False),
syncNtuple = cms.PSet(
tree = cms.string(''),
output = cms.string(''),
requireGenMatching = cms.bool(False),
),
useNonNominal = cms.bool(False),
isDEBUG = cms.bool(False),
hasLHE = cms.bool(True),
evtWeight = cms.PSet(
apply = cms.bool(False),
histogramFile = cms.string(''),
histogramName = cms.string(''),
branchNameXaxis = cms.string(''),
branchNameYaxis = cms.string(''),
branchTypeXaxis = cms.string(''),
branchTypeYaxis = cms.string(''),
),
)
| [
"karlehataht@gmail.com"
] | karlehataht@gmail.com |
9742c90e0453936c31dfa9a52658cbe850b93beb | 0e1eec1b43b0eea7af05dec1c377046a91ab7616 | /setup.py | df4613e535249989b8f0d81ffc2637861b9fd499 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | zgababa/puke | b54ebcafd0f7c9f13f3e22dacb6eab2b0ef374e8 | 9428f08332035dac61fefe4866e8d50421c04bfd | refs/heads/master | 2021-01-17T21:29:21.926824 | 2013-05-21T00:11:51 | 2013-05-21T00:11:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,101 | py | #!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
import sys, os
import pkg_resources
major, minor = sys.version_info[:2]
if major < 2 and minor < 6:
raise Exception("Puke requires Python 2.6")
import logging
setup(
name = "puke",
version = "1.5.20",
packages = ['puke'],
scripts = [
'bin/puke',
'bin/puke.js.compress',
'bin/puke.css.compress'
],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
install_requires = ['pyscss', 'closure_linter', 'colorama', 'pyyaml', 'paramiko', 'requests==1.2.1'],
dependency_links = ['http://closure-linter.googlecode.com/files/closure_linter-latest.tar.gz'],
# metadata for upload to PyPI
author = "Emmanuel Tabard",
author_email = "manu@webitup.fr",
description = "Puke is a straightforward build system",
license = "http://www.gnu.org/copyleft/gpl.html",
keywords = "build system python",
url = 'http://github.com/webitup/puke',
include_package_data = True
)
| [
"e.tabard@gmail.com"
] | e.tabard@gmail.com |
0c2fddd11b78d0ae7d34b0e19aadb724ad55b1a1 | 9d652cc94bf07c149cd6c7c6060b0f97875a78d4 | /apps/my_app/views.py | 7b94dacae39a7d56bf02bd6f0ab841340dae1466 | [] | no_license | herimiguel/cdExam | 2c84a46f526518b691de0f6bfe215d2713664f76 | a119b9b6f336b035ad7f003ac4e44a9ce4d67ee1 | refs/heads/master | 2020-03-19T08:25:56.222809 | 2018-06-05T19:13:29 | 2018-06-05T19:13:29 | 136,203,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,136 | py | from __future__ import unicode_literals
from django.shortcuts import render, redirect
from django.contrib import messages
from models import *
from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist
# Create your views here.
def index(request):
return render(request, 'my_app/index.html')
def register(request):
if request.method=='POST':
firstName= request.POST['firstName']
lastName= request.POST['lastName']
email= request.POST['email']
password= request.POST['password']
conPassword= request.POST['conPassword']
isValid=True
minVal= 3
maxVP= 8
if len(request.POST['firstName']) < minVal:
messages.error(request, 'Name needs to be at least 3 characters!')
isValid = False
if len(request.POST['lastName']) < minVal:
messages.error(request, 'Last Name needs to be at least 3 characters!')
isValid = False
if len(request.POST['email']) < minVal:
messages.error(request, 'Email is required!')
isValid = False
if request.POST['email'] != email:
messages.error(request, 'Email is already registered!')
isValid = False
if len(request.POST['password']) < minVal:
messages.error(request, 'Password is required!')
isValid = False
if request.POST['conPassword'] != password:
messages.error(request, 'Password confirmation failed!')
isValid = False
if not isValid:
return redirect('/')
if request.POST['conPassword'] == password:
try:
user=User.objects.create(firstName=firstName, lastName=lastName, email=email, password=password )
except IntegrityError:
messages.error(request, 'This Email is already registered!')
return redirect('/')
request.session['user.id']= user.id
return redirect('my_app:viewItems')
# return render(request,'myApp/success.html')
def login(request):
if request.method=='POST':
email = request.POST['email']
password= request.POST['password']
isValid= True
minVal= 3
if len(request.POST['email']) < minVal:
messages.error(request, 'Email is required!')
isValid = False
if len(request.POST['password']) < minVal:
messages.error(request, 'Password is required!')
isValid = False
try:
User.objects.get(email=request.POST['email'], password= request.POST['password'])
except ObjectDoesNotExist:
messages.error(request, "Email and Password don't match!")
isValid = False
else:
messages.error(request, " ")
if not isValid:
return redirect('/')
else:
request.session['user.id'] = (User.objects.get(email=request.POST['email'])).id
return redirect('my_app:viewItems')
# return render(request, 'my_app/success.html')
# def success(request):
# if 'user.id' in request.session.keys():
# user= User.objects.get(id=request.session['user.id'])
# context={
# 'user': user
# }
# return render(request, 'my_app/success.html', context)
def viewItems(request):
user= request.session['user.id']
context={
'items': Item.objects.all().exclude(additions__user_id=user),
'myItems': Addition.objects.filter(user_id=user),
'additions': Addition.objects.all(),
'user': User.objects.get(id=request.session['user.id'])
}
return render(request, 'my_app/success.html', context)
# return render(request, 'my_app/success.html', context)
def logOut(request):
request.session.clear()
messages.success(request, 'Successfully logged out')
return redirect('/')
def addItem(request):
if request.method == 'POST':
user = User.objects.get(id=request.session['user.id'])
itemName = request.POST['itemName']
isValid=True
minVal= 3
if len(request.POST['itemName']) < minVal:
messages.error(request, 'COVFEFE! Your Wishlist Item must contian at least 3 characters!')
isValid = False
if not isValid:
return redirect('my_app:viewItems')
else:
Item.objects.create(itemName=itemName, creator = user)
messages.error(request, "HOPE YOUR WISH COMES TRUE")
return redirect('my_app:viewItems')
def toItems(request, id):
user= request.session['user.id']
context={
'item': Item.objects.get(id=id),
# 'myItems': Addition.objects.filter(user_id=user),
'additions': Addition.objects.filter(item_id=id)
}
return render(request, 'my_app/show.html', context)
def addToMyItem(request, item_id):
Addition.objects.create(item_id=item_id, user_id=request.session['user.id'])
return redirect('my_app:viewItems')
def deleteItem(request, item_id):
item= Addition.objects.get(item_id=item_id, user_id=request.session['user.id'])
item.delete()
return redirect('my_app:viewItems')
def deleteFromD(request, id):
item= Item.objects.get(id=id)
item.delete()
return redirect('my_app:viewItems')
| [
"herimiguel84@hotmail.com"
] | herimiguel84@hotmail.com |
904f11ece1f3a1f0e9f815aa7965f064e2510a83 | dbe770c12a3186e439ffe7bd1f3853a1b3ec6e4f | /test1.py | dab87f2a97cd837ab8954612da96924a871cd88a | [] | no_license | ankurmishra727/JenkinsWithJenkinsFile2 | d5d2f659b514c334e22736a1809946b6165dbc4e | 80632d059612583a9d8e1991415ecd603657146b | refs/heads/master | 2020-03-19T06:07:45.947120 | 2018-06-04T10:54:06 | 2018-06-04T10:54:06 | 135,992,780 | 0 | 0 | null | 2018-06-04T09:51:34 | 2018-06-04T08:15:14 | Python | UTF-8 | Python | false | false | 44 | py | print("merging into master from branch 1")
| [
"ankurgargmishra@gmail.com"
] | ankurgargmishra@gmail.com |
73b01d6e83f15e3b8998e48fde1d8e9a8e9c8657 | 5b7a0d2c364e40581eeff6c592067c954b96aa5b | /test_circle_ellipse.py | d03fd6ea80484a28a8acc42dbf20a692f6fa80ae | [] | no_license | skconan/dice_detection | a0f5afbfd1d5e38cf6f5d72872103280690e5ffc | da5b065398c0976b90833a10e6dfcde162ce1540 | refs/heads/master | 2020-03-18T16:42:32.272709 | 2018-07-05T04:26:47 | 2018-07-05T04:28:03 | 134,981,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,445 | py | import cv2 as cv
from lib import *
import numpy as np
from dice_detection import *
if __name__=='__main__':
cap = cv.VideoCapture(CONST.VDO_PATH + 'dice_01.mp4')
while True:
ret, image = cap.read()
if image is None:
continue
# image = cv.resize(image,(0,0),fx=0.5,fy=0.5)
image = pre_processing(image)
mask_th = find_mask_threshold(image)
img = mask_th.copy()
img.fill(0)
_,cnts,hierachy = cv.findContours(mask_th,cv.RETR_CCOMP,cv.CHAIN_APPROX_NONE)
ct = 0
x_min = 100000
x_max = -1
y_min = 100000
y_max = -1
for (cnt,hh) in zip(cnts,hierachy[0]):
if len(cnt) < 5:
continue
(x,y),(w,h),angle = ellipse = cv.fitEllipse(cnt)
x,y,_,_ = cv.boundingRect(cnt)
area = cv.contourArea(cnt)
area_ellipse = math.pi * (w/2.0) * (h/2.0)
hull = cv.convexHull(cnt)
hull_area = cv.contourArea(hull)
solidity = float(area)/hull_area
print(ct,w,h,w/h, solidity, hh)
ct += 1
# print()
if not (list(hh[2:]) == [-1,-1]):
continue
if not (w >= 8 and h>=8):
continue
if not 0.35 <= float(w)/h < 1.2:
continue
if not solidity >= 0.925 or not area/area_ellipse >= 0.8:
continue
if area > 10000:
continue
box = cv.boxPoints(ellipse)
box = np.int0(box)
cv.ellipse(img,ellipse,(255),-1)
x,y,w,h = cv.boundingRect(cnt)
dice_size = max(h/2.0,w/2.0) * 9
# cv.rectangle(img,(int(x-(w*0.5)),int(y-(h*0.5))),(int(x+(w*4.5)),int(y+(h*4.5))),(155),1)
cv.rectangle(img,(int(x-(w*2)),int(y-(h*2))),(int(x+(w*2.75)),int(y+(h*2.75))),(155),1)
# cv.rectangle(img,(int(x+(w*0.5)),int(y+(h*0.5))),(int(x-(w*4.5)),int(y-(h*4.5))),(155),1)
cv.rectangle(img,(int(x),int(y)),(int(x+w),int(y+h)),(155),1)
# img = cv.drawContours(img,[box],0,(0,0,255),1)
# img = cv.drawContours(img,cnt,-1,(0,0,255),1)
cv.imshow('img',img)
cv.imshow('image',image)
k = cv.waitKey(-1) & 0xff
if k == ord('q'):
break
cap.release()
cv.destroyAllWindows() | [
"supakit.kr@gmail.com"
] | supakit.kr@gmail.com |
fc9eada358e8a8bab6e2d5cabb8ef8dc7c58307a | 7ff9410466d608d5fc1df2a0d3c6f4ddfc3b713c | /xml_to_csv.py | 8eea90b9897b514f6a7356f7affd001615bc52a9 | [] | no_license | wilson-boca/identify-objects | 03d0b539d9ad1358cf3e95922e3003bd874b7127 | 07626727c31b1ae65e40ff99ff5c68ae8ed54d1b | refs/heads/master | 2023-04-05T05:43:53.356305 | 2020-03-30T14:59:22 | 2020-03-30T14:59:22 | 250,657,993 | 0 | 0 | null | 2023-03-24T22:34:29 | 2020-03-27T22:02:32 | Python | UTF-8 | Python | false | false | 1,189 | py | import os
import glob
import pandas as pd
import xml.etree.ElementTree as ET
def xml_to_csv(path):
xml_list = []
for xml_file in glob.glob(path + '/*.xml'):
tree = ET.parse(xml_file)
root = tree.getroot()
for member in root.findall('object'):
value = (root.find('filename').text,
int(root.find('size')[0].text),
int(root.find('size')[1].text),
member[0].text,
int(member[4][0].text),
int(member[4][1].text),
int(member[4][2].text),
int(member[4][3].text)
)
xml_list.append(value)
column_name = ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax']
xml_df = pd.DataFrame(xml_list, columns=column_name)
return xml_df
def main():
for folder in ['train', 'test']:
image_path = os.path.join(os.getcwd(), ('images/' + folder))
xml_df = xml_to_csv(image_path)
xml_df.to_csv(('images/'+folder+'_labels.csv'), index=None)
print('Successfully converted xml to csv.')
if __name__ == '__main__':
main()
| [
"wilson.boca@gmail.com"
] | wilson.boca@gmail.com |
2a1a2b5c39644226f3151bea35c55800e3d74fde | eafd177a43d08eb4b09c94af5b8073916598013b | /Conjugate Gradient.py | e83170f7ca716f7b9c4475acb4213d0632f4f345 | [] | no_license | lechuandafo/Simple-optimization-problem | acb16772a3e0f9c0036ada1a13288a6159ca6f9a | c93906a5c792a75880f80f6f44cbab81af30d0a6 | refs/heads/master | 2020-06-13T22:36:03.605662 | 2019-07-02T07:22:30 | 2019-07-02T07:22:30 | 194,810,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,037 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Nov 17 13:39:12 2018
@author: YLC
"""
import numpy as np
x = np.array([0,0,0,0]).T #.T表示转置,下同
H = np.array([[158,20,90,101],[20,36,46,61],[90,46,306,156],[101,61,156,245]])
g = np.array([8,-5,1,6]).T
def grad(H,x,g): #梯度计算公式,由原方程求导得到
return np.dot(H,x)-g
eta = grad(H,x,g) #梯度
d = -eta #梯度方向
i = 1 #迭代次数
while(np.linalg.norm(eta,ord=2) > 1e-10):
alpha = -np.dot(eta.T,d)/np.dot(np.dot(d.T,H),d)
x = x + np.dot(alpha,d)
eta = grad(H,x,g)
d = -eta + np.dot(np.dot(np.dot(eta.T,H),d)/np.dot(np.dot(d.T,H),d),d)
#print("========================================")
#print("迭代第"+str(i)+"次||eta||的值为:",np.linalg.norm(eta,ord=2))
#print("迭代第"+str(i)+"次alpha的值为:\n",alpha)
#print("迭代第"+str(i)+"次eta的值为:\n",eta)
#print("迭代第"+str(i)+"次d的值为:\n",d)
print("迭代第"+str(i)+"次x的值为:\n",x)
i = i + 1 | [
"noreply@github.com"
] | noreply@github.com |
78299487affb1d72f08ac00fb8585935f8fa1a0c | 5af19625143ee8732b09541f4f84169cfa58bf0f | /10-23-19/forloop_nested.py | 9f8e2f8f234fd89328da8fdfcc958c2cd7038a75 | [] | no_license | markymauro13/CSIT104_05FA19 | 1ffef1643d0e5908128b75783ffbd7dc735cd060 | f65f5205730fc7890edb5fa7e174a4ab897f9f7f | refs/heads/master | 2020-07-28T07:05:55.293496 | 2019-12-11T15:37:19 | 2019-12-11T15:37:19 | 209,346,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95 | py | for i in range(1,5):
j = 0
while j < i:
print(j, end = '')
j += 1
| [
"noreply@github.com"
] | noreply@github.com |
17e16a08041f1fc5702bff45cbade47ad9622093 | eceeef628f926a51797f6bbe1bfd409c566d3d3b | /Res18_T2_transfer.py | cb27dac94ca41ab0b66f921c24551c18adcb1558 | [] | no_license | wangshuai-bit/T2_classification | 2ca33cb6b52be4f12846e245ca2bbd6a87d3ec7f | 94488e168d618abe8228c75f07f58209c1cbccbc | refs/heads/main | 2023-02-18T20:14:50.913843 | 2021-01-22T01:30:44 | 2021-01-22T01:30:44 | 331,675,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,511 | py | # try to print the error image 20190520
import tensorflow as tf
import pickle
import time
from tflearn.layers.conv import global_avg_pool
from tensorflow.contrib.layers import batch_norm, flatten
from tensorflow.contrib.layers import xavier_initializer
from tensorflow.contrib.framework import arg_scope
from PIL import Image
from load_data import *
import matplotlib.pyplot as plt
from tensorflow.python import pywrap_tensorflow
import math
from itertools import cycle
from sklearn.metrics import roc_curve,auc
from scipy import interp
from numpy.random import seed
seed(1)
from tensorflow import set_random_seed
set_random_seed(2)
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
# Hyperparameter
growth_k = 24
nb_block = 2 # how many (dense block + Transition Layer) ?
#init_learning_rate = 2.0e-4
init_learning_rate = 0.01
#tmp_learning_rate = 0.1
init_lamda_1 = 0.00
init_lamda_2 = 1.00
epsilon = 1e-4 # AdamOptimizer epsilon
dropout_rate = 0.30
keep_prob = 1.0
# Momentum Optimizer will use
nesterov_momentum = 0.9
weight_decay = 8e-4
weight_decay_l1 = 0
# Label & batch_size
batch_size = 32
dataset_size = 4800
iteration = 150
# batch_size * iteration = data_set_number
test_iteration = 10
# total_epochs = 300
total_epochs = 300
# regularzer
reg_scale = 0.4
# train
isTrain =False
#datasets
datasets = {}
root_path = '/home/wangshuai/ckpts_for_zhengyao/pt_5_to_2_lr_transfer_test_5_5to2'
#os.mkdir(root_path)
txt_path = os.path.join(root_path, 'logs.txt')
print(txt_path)
ckpt_path = root_path
summary_path = root_path
save_path = os.path.join(root_path, 'train_64_pt_5_to_2')
write_title = "train_64_pt_5_to_2, init_learning_rate:%.6f, dropout_rate:%.2f, " \
"weight_decay%.4f,total_epochs%.4f, batch_size%.1f\n" \
% (init_learning_rate,dropout_rate,weight_decay,total_epochs,batch_size)
start_time = time.time()
print("start time is", start_time)
def variable_summaries(var,name):
with tf.name_scope(name):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
tf.summary.histogram('histogram', var)
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape, dtype=tf.float32)
return tf.Variable(initial)
def Global_Average_Pooling(x, stride=1):
"""
width = np.shape(x)[1]
height = np.shape(x)[2]
pool_size = [width, height]
return tf.layers.average_pooling2d(inputs=x, pool_size=pool_size, strides=stride) # The stride value does not matter
It is global average pooling without tflearn
"""
return global_avg_pool(x, name='Global_avg_pooling')
# But maybe you need to install h5py and curses or not
def Batch_Normalization(x, training, scope):
with arg_scope([batch_norm],
scope=scope,
updates_collections=None,
decay=0.9,
center=True,
scale=True,
zero_debias_moving_mean=False):
return tf.cond(training,
lambda: batch_norm(inputs=x, is_training=training, reuse=None),
lambda: batch_norm(inputs=x, is_training=training, reuse=True))
def Drop_out(x, rate, training):
return tf.layers.dropout(inputs=x, rate=rate, training=training)
def Relu(x):
return tf.nn.relu(x)
def Average_pooling(x, pool_size=[2, 2], stride=2, padding='VALID'):
return tf.layers.average_pooling2d(inputs=x, pool_size=pool_size, strides=stride, padding=padding)
def max_pool(input, k_size=1, stride=1, name=None):
return tf.nn.max_pool(input, ksize=[1, k_size, k_size, 1], strides=[1, stride, stride, 1],
padding='SAME', name=name)
def Concatenation(layers):
return tf.concat(layers, axis=3)
def Linear(x):
#dropout = tf.layers.dropout(inputs=x, rate=0.2, training=training_flag) # add dropout here
#relu_1 = tf.nn.relu(x)
dense_1 = tf.layers.dense(inputs=x, units=10, name='linear_1', use_bias=True,
kernel_regularizer=tf.contrib.layers.l2_regularizer(reg_scale))
dense = tf.layers.dense(inputs=dense_1, units=class_num, name='linear_5', use_bias=True,
kernel_regularizer=tf.contrib.layers.l2_regularizer(reg_scale))
return dense
def Evaluate(sess, epoch):
test_acc = 0.0
test_acc_norm = 0.0
test_acc_arc = 0.0
test_loss = 0.0
test_pre_index = 0
train_pre_index = 0
add = 67
#add = 930
y_amount_0 = 0
y_amount_1 = 0
y_amount_2 = 0
y_amount_3 = 0
equal = 0
y_equal_0 = 0
y_equal_1 = 0
y_equal_2 = 0
y_equal_3 = 0
y_0to1 = 0
y_0to2 = 0
y_0to3 = 0
y_1to0 = 0
y_1to2 = 0
y_1to3 = 0
y_2to0 = 0
y_2to1 = 0
y_2to3 = 0
y_3to0 = 0
y_3to1 = 0
y_3to2 = 0
y_equal_0_pro_sigmoid = 0
y_equal_1_pro_sigmoid = 0
y_equal_0_pro_softmax = 0
y_equal_1_pro_softmax = 0
y_equal_2_pro_softmax = 0
y_equal_3_pro_softmax = 0
y_all_1_pro_sigmoid = 0
y_all_0_pro_sigmoid = 0
y_all_0_pro_softmax = 0
y_all_1_pro_softmax = 0
y_all_2_pro_softmax = 0
y_all_3_pro_softmax = 0
y_equal_0_pro_sigmoid_wrong = 0
mid = 0
mid_1 = 0
y_score = np.empty(shape=[0, 4])
y_onehot = np.empty(shape=[0, 4])
for it in range(test_iteration):
test_batch_x = test_x[test_pre_index: test_pre_index + add]
test_batch_y = test_y[test_pre_index: test_pre_index + add]
test_batch_p = test_p[test_pre_index: test_pre_index + add]
test_pre_index = test_pre_index + add
test_feed_dict = {
x: test_batch_x,
label: test_batch_y,
path: test_batch_p,
learning_rate: epoch_learning_rate,
training_flag: False
}
loss_, acc_ = sess.run([cost, accuracy], feed_dict=test_feed_dict)
'''
logits_watch = sess.run(logits, feed_dict=test_feed_dict)
print("logit is", logits_watch)
print("label is ", test_batch_y)
'''
if epoch >= total_epochs-1:
result_one = sess.run(logits, feed_dict=test_feed_dict)
loss_, acc_= sess.run([cost, accuracy], feed_dict=test_feed_dict)
y_score = np.append(y_score, result_one, axis=0)
y_onehot = np.append(y_onehot, test_batch_y, axis=0)
test_loss += loss_ / 10.0
test_acc += acc_ / 10.0
if epoch >= total_epochs-1:
# print("the acc of this time is ", acc_)
# print("the all acc is ", test_acc)
result_one_sigmoid = sess.run(tf.nn.sigmoid(result_one))
result_one_softmax = sess.run(tf.nn.softmax(result_one))
result_one_argmax = sess.run(tf.argmax(result_one, 1))
test_batch_y_argmax = sess.run(tf.argmax(test_batch_y, 1))
path_one = test_batch_p
for i in range(len(test_batch_y_argmax)):
if test_batch_y_argmax[i] == 0:
y_amount_0 = y_amount_0 + 1
y_all_0_pro_softmax = y_all_0_pro_softmax + result_one_softmax[i]
if result_one_argmax[i] == 1:
y_0to1 = y_0to1 + 1
#print("y_0to1 is ", path_one[i])
elif result_one_argmax[i] == 2:
y_0to2 = y_0to2 + 1
#print("y_0to2 is ", path_one[i])
elif result_one_argmax[i] == 3:
y_0to3 = y_0to3 + 1
#print("y_0to3 is ", path_one[i])
elif result_one_argmax[i] == test_batch_y_argmax[i]:
y_equal_0 = y_equal_0 + 1
y_equal_0_pro_sigmoid = y_equal_0_pro_sigmoid + result_one_sigmoid[i]
y_equal_0_pro_softmax = y_equal_0_pro_softmax + result_one_softmax[i]
#print("0 is", path_one[i])
elif test_batch_y_argmax[i] == 1:
y_amount_1 = y_amount_1 + 1
y_all_1_pro_softmax = y_all_1_pro_softmax + result_one_softmax[i]
if result_one_argmax[i] == 0:
y_1to0 = y_1to0 + 1
#print("y_1to0 is", path_one[i])
elif result_one_argmax[i] == 2:
y_1to2 = y_1to2 + 1
#print("y_1to2 is", path_one[i])
elif result_one_argmax[i] == 3:
y_1to3 = y_1to3 + 1
#print("y_1to3 is", path_one[i])
elif result_one_argmax[i] == test_batch_y_argmax[i]:
y_equal_1 = y_equal_1 + 1
y_equal_1_pro_sigmoid = y_equal_1_pro_sigmoid + result_one_sigmoid[i]
y_equal_1_pro_softmax = y_equal_1_pro_softmax + result_one_softmax[i]
#print("1 is", path_one[i])
elif test_batch_y_argmax[i] == 2:
y_amount_2 = y_amount_2 + 1
y_all_2_pro_softmax = y_all_2_pro_softmax + result_one_softmax[i]
if result_one_argmax[i] == 0:
y_2to0 = y_2to0 + 1
#print("y_2to0 is", path_one[i])
elif result_one_argmax[i] == 1:
y_2to1 = y_2to1 + 1
#print("y_2to1 is", path_one[i])
elif result_one_argmax[i] == 3:
y_2to3 = y_2to3 + 1
#print("y_2to3 is", path_one[i])
if result_one_argmax[i] == test_batch_y_argmax[i]:
y_equal_2 = y_equal_2 + 1
y_equal_2_pro_softmax = y_equal_2_pro_softmax + result_one_softmax[i]
#print("2 is" , path_one[i])
elif test_batch_y_argmax[i] == 3:
y_amount_3 = y_amount_3 + 1
y_all_3_pro_softmax = y_all_3_pro_softmax + result_one_softmax[i]
if result_one_argmax[i] == 0:
y_3to0 = y_3to0 + 1
#print("y_3to0 is", path_one[i])
elif result_one_argmax[i] == 1:
y_3to1 = y_3to1 + 1
#print("y_3to1 is", path_one[i])
elif result_one_argmax[i] == 2:
y_3to2 = y_3to2 + 1
#print("y_3to2 is", path_one[i])
elif result_one_argmax[i] == test_batch_y_argmax[i]:
y_equal_3 = y_equal_3 + 1
y_equal_3_pro_softmax = y_equal_3_pro_softmax + result_one_softmax[i]
#print("3 is", path_one[i])
# print("the result_one_argmax is ", result_one_argmax)
# print("the test_batch_y_argmax is ", test_batch_y_argmax)
# print("result_one_softmax is ", result_one_softmax)
# print("test_batch_y is ", test_batch_y)
if epoch >=total_epochs-1:
print("y_score and y_onehot shape is ", y_score.shape, y_onehot.shape)
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(class_num):
fpr[i], tpr[i], _ = roc_curve(y_onehot[:, i], y_score[:, i])
roc_auc[i] = auc(fpr[i], tpr[i])
# first aggregate all the false positive rates
all_fpr = np.unique(np.concatenate([fpr[i] for i in range(class_num)]))
# then interpolate all ROC curves at this point
mean_tpr = np.zeros_like(all_fpr)
for i in range(class_num):
mean_tpr += interp(all_fpr, fpr[i], tpr[i])
# finally average it and compute AUC
mean_tpr /= class_num
fpr["macro"] = all_fpr
tpr["macro"] = mean_tpr
roc_auc["macro"] = auc(fpr["macro"], tpr["macro"])
fpr_macro = fpr["macro"]
tpr_macro = tpr["macro"]
roc_auc_macro = roc_auc["macro"]
# plot all ROC curves
subtype = ["ccRCC","CRCC","AML","PRCC"]
plt.plot(fpr["macro"], tpr["macro"], label="macro-average ROC curve(area = {0:0.2f})".format(roc_auc["macro"]),
color="navy", linestyle=":", linewidth=4)
colors = cycle(['aqua', 'darkorange', 'cornflowerblue'])
for i, color in zip(range(class_num), colors):
plt.plot(fpr[i], tpr[i], color=color, lw=2,
label="ROC curve of {0}(area = {1:0.2f})".format(subtype[i], roc_auc[i]))
print(fpr[0].shape)
plt.plot([0, 1], [0, 1], "k--", lw=2)
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel("false positive rate")
plt.ylabel("true positive rate")
plt.title("ROC to multi-classification")
plt.legend(loc="lower right")
plt.savefig("ROC of 5_to_2.jpg")
plt.show()
if epoch >= total_epochs-1:
print("the amount of 0 is and the equal is ", y_amount_0, y_equal_0)
print("the amount of 1 is and the equal is ", y_amount_1, y_equal_1)
print("the amount of 2 is and the equal is ", y_amount_2, y_equal_2)
print("the amount of 3 is and the equal is ", y_amount_3, y_equal_3)
print("the equal pro of 0 is sigmoid, softmax", y_equal_0_pro_sigmoid / y_amount_0,
y_equal_0_pro_softmax / y_amount_0)
print("the equal pro of 1 is sigmoid, softmax", y_equal_1_pro_sigmoid / y_amount_1,
y_equal_1_pro_softmax / y_amount_1)
print("the equal pro of 2 is sigmoid, softmax", y_equal_2_pro_softmax / y_amount_2)
print("the equal pro of 3 is sigmoid, softmax", y_equal_3_pro_softmax / y_amount_3)
print("the all pro of 0 is sigmoid ", y_all_0_pro_softmax / y_amount_0)
print("the all pro of 1 is sigmoid ", y_all_1_pro_softmax / y_amount_1)
print("the all pro of 0 is sigmoid ", y_all_2_pro_softmax / y_amount_2)
print("the all pro of 1 is sigmoid ", y_all_3_pro_softmax / y_amount_3)
# print("the pro of 0 wrong is , and the mid0, mid1 is ", y_equal_0_pro_sigmoid_wrong/(y_amount_0-y_equal_0), mid, mid_1)
y_0_acc = y_equal_0 / y_amount_0
y_1_acc = y_equal_1 / y_amount_1
y_2_acc = y_equal_2 / y_amount_2
y_3_acc = y_equal_3 / y_amount_3
print("the acc of 0 is ", y_0_acc)
print("the acc of 1 is ", y_1_acc)
print("the acc of 2 is ", y_2_acc)
print("the acc of 3 is ", y_3_acc)
print("the 0 class is", y_equal_0, y_0to1, y_0to2, y_0to3, y_amount_0)
print("the 1 class is", y_1to0, y_equal_1, y_1to2, y_1to3, y_amount_1)
print("the 2 class is", y_2to0, y_2to1, y_equal_2, y_2to3, y_amount_2)
print("the 3 class is", y_3to0, y_3to1, y_3to2, y_equal_3, y_amount_3)
print("the precision of 0,1,2,3", "0", (y_equal_0 + y_1to0 + y_2to0 + y_3to0),
y_equal_0 / (y_equal_0 + y_1to0 + y_2to0 + y_3to0),
"1", (y_0to1 + y_equal_1 + y_2to1 + y_3to1), y_equal_1 / (y_0to1 + y_equal_1 + y_2to1 + y_3to1),
"2", (y_0to2 + y_1to2 + y_equal_2 + y_3to2), y_equal_2 / (y_0to2 + y_1to2 + y_equal_2 + y_3to2),
"3", (y_0to3 + y_1to3 + y_2to3 + y_equal_3), y_equal_3 / (y_0to3 + y_1to3 + y_2to3 + y_equal_3)
)
summary = tf.Summary(value=[tf.Summary.Value(tag='test_loss', simple_value=test_loss),
tf.Summary.Value(tag='test_accuracy', simple_value=test_acc)])
return test_acc, test_loss, summary
class RESNet():
def __init__(self, x, training, labels):
self.training = training
self.model = self.ResNet18(x, is_training=training, pooling_and_fc=True,
reuse=False, kernel_initializer = tf.contrib.layers.variance_scaling_initializer())
def identity_block2d(self,input_tensor, kernel_size, filters, stage, block, is_training, reuse,
kernel_initializer=tf.contrib.layers.variance_scaling_initializer()):
filters1, filters2, filters3 = filters
conv_name_2 = 'conv' + str(stage) + '_' + str(block) + '_3x3'
bn_name_2 = 'bn' + str(stage) + '_' + str(block) + '_3x3'
x = tf.layers.conv2d(input_tensor, filters2, kernel_size, use_bias=False, padding='SAME',
kernel_initializer=kernel_initializer, name=conv_name_2, reuse=reuse)
x = Batch_Normalization(x, training=is_training, scope=bn_name_2)
x = tf.nn.relu(x)
conv_name_3 = 'conv' + str(stage) + '_' + str(block) + '_1x1_increase'
bn_name_3 = 'bn' + str(stage) + '_' + str(block) + '_1x1_increase'
x = tf.layers.conv2d(x, filters3, (kernel_size, kernel_size), use_bias=False, padding='SAME',
kernel_initializer=kernel_initializer, name=conv_name_3, reuse=reuse)
x = Batch_Normalization(x, training=is_training, scope=bn_name_3)
x = tf.add(input_tensor, x)
x = tf.nn.relu(x)
return x
def conv_block_2d(self,input_tensor, kernel_size, filters, stage, block, is_training, reuse, strides=(2, 2),
kernel_initializer=tf.contrib.layers.variance_scaling_initializer()):
filters1, filters2, filters3 = filters
conv_name_2 = 'conv' + str(stage) + '_' + str(block) + '_3x3'
bn_name_2 = 'bn' + str(stage) + '_' + str(block) + '_3x3'
x = tf.layers.conv2d(input_tensor, filters2, (kernel_size, kernel_size), use_bias=False, strides=strides,
padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_2, reuse=reuse)
x = Batch_Normalization(x, training=is_training, scope=bn_name_2)
x = tf.nn.relu(x)
conv_name_3 = 'conv' + str(stage) + '_' + str(block) + '_1x1_increase'
bn_name_3 = 'bn' + str(stage) + '_' + str(block) + '_1x1_increase'
x = tf.layers.conv2d(x, filters3, (kernel_size, kernel_size), use_bias=False, padding='SAME',
kernel_initializer=kernel_initializer, name=conv_name_3, reuse=reuse)
x = Batch_Normalization(x, training=is_training, scope=bn_name_3)
conv_name_4 = 'conv' + str(stage) + '_' + str(block) + '_1x1_shortcut'
bn_name_4 = 'bn' + str(stage) + '_' + str(block) + '_1x1_shortcut'
shortcut = tf.layers.conv2d(input_tensor, filters3, (kernel_size, kernel_size), use_bias=False, strides=strides,
padding='SAME', kernel_initializer=kernel_initializer, name=conv_name_4,
reuse=reuse)
shortcut = Batch_Normalization(shortcut, training=is_training, scope=bn_name_4)
x = tf.add(shortcut, x)
x = tf.nn.relu(x)
return x
def ResNet18(self,input_tensor, is_training=True, pooling_and_fc=True, reuse=False,
kernel_initializer=tf.contrib.layers.variance_scaling_initializer()):
print("the input_tensor is ", input_tensor)
input_tensor_tile = tf.tile(input_tensor, [1,1,1,3])
print("after tf.tile, the input tensor is", input_tensor_tile)
x = tf.layers.conv2d(input_tensor_tile, 32, (3, 3), strides=(1, 1), kernel_initializer=kernel_initializer,
use_bias=False, padding='SAME', name='conv1_1/3x3_s1', reuse=reuse)
x = Batch_Normalization(x, training=is_training,scope ='bn1_1/3x3_s1')
x = tf.nn.relu(x)
x1 = self.identity_block2d(x, 3, [48, 32, 32], stage=2, block='1b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x1 = self.identity_block2d(x1, 3, [48, 32, 32], stage=3, block='1c', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x2 = self.conv_block_2d(x1, 3, [96, 64, 64], stage=3, block='2a', strides=(2, 2), is_training=is_training,
reuse=reuse, kernel_initializer=kernel_initializer)
x2 = self.identity_block2d(x2, 3, [96, 64, 64], stage=3, block='2b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x3 = self.conv_block_2d(x2, 3, [128, 128, 128], stage=4, block='3a', strides=(2, 2), is_training=is_training,
reuse=reuse, kernel_initializer=kernel_initializer)
x3 = self.identity_block2d(x3, 3, [128, 128, 128], stage=4, block='3b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x4 = self.conv_block_2d(x3, 3, [256, 256, 256], stage=5, block='4a', strides=(2, 2), is_training=is_training,
reuse=reuse, kernel_initializer=kernel_initializer)
x4 = self.identity_block2d(x4, 3, [256, 256, 256], stage=5, block='4b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
# print('before gap: ', x4)
x4 = tf.reduce_mean(x4, [1, 2])
x4 = Drop_out(x4, dropout_rate, is_training)
# print('after gap: ', x4)
# flatten = tf.contrib.layers.flatten(x4)
prob = tf.layers.dense(x4, 4, reuse=reuse, kernel_initializer=tf.contrib.layers.xavier_initializer(),
use_bias=True, name="fully_connected")
return prob
def ResNet34(self, input_tensor, is_training, pooling_and_fc=True,
reuse=False, kernel_initializer = tf.contrib.layers.variance_scaling_initializer()):
x = tf.layers.conv2d(input_tensor, 32, (5, 5), strides=(1, 1), kernel_initializer=kernel_initializer,
use_bias=False, padding='SAME', name='conv1_1/3x3_s1', reuse=reuse)
x = Batch_Normalization(x, training=is_training,scope ='bn1_1/3x3_s1')
x = tf.nn.relu(x)
variable_summaries(x, name='x_0')
x1 = self.identity_block2d(x, 3, [48, 32, 32], stage=1, block='1a', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x1 = self.identity_block2d(x1, 3, [48, 32, 32], stage=1, block='1b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x1 = self.identity_block2d(x1, 3, [48, 32, 32], stage=1, block='1c', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
variable_summaries(x1, name='x_1')
x2 = self.conv_block_2d(x1, 3, [96, 64, 64], stage=2, block='2a', strides=(2, 2), is_training=is_training,
reuse=reuse, kernel_initializer=kernel_initializer)
x2 = self.identity_block2d(x2, 3, [96, 64, 64], stage=2, block='2b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x2 = self.identity_block2d(x2, 3, [96, 64, 64], stage=2, block='2c', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x2 = self.identity_block2d(x2, 3, [96, 64, 64], stage=2, block='2d', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
variable_summaries(x2, name='x_2')
x3 = self.conv_block_2d(x2, 3, [128, 128, 128], stage=3, block='3a', strides=(2, 2), is_training=is_training,
reuse=reuse, kernel_initializer=kernel_initializer)
x3 = self.identity_block2d(x3, 3, [128, 128, 128], stage=3, block='3b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x3 = self.identity_block2d(x3, 3, [128, 128, 128], stage=3, block='3c', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x3 = self.identity_block2d(x3, 3, [128, 128, 128], stage=3, block='3d', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x3 = self.identity_block2d(x3, 3, [128, 128, 128], stage=3, block='3e', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x3 = self.identity_block2d(x3, 3, [128, 128, 128], stage=3, block='3f', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
variable_summaries(x3, name='x_3')
x4 = self.conv_block_2d(x3, 3, [256, 256, 256], stage=4, block='4a', strides=(2, 2), is_training=is_training,
reuse=reuse, kernel_initializer=kernel_initializer)
x4 = self.identity_block2d(x4, 3, [256, 256, 256], stage=4, block='4b', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
x4 = self.identity_block2d(x4, 3, [256, 256, 256], stage=4, block='4c', is_training=is_training, reuse=reuse,
kernel_initializer=kernel_initializer)
# print('before gap: ', x4)
x4 = tf.reduce_mean(x4, [1, 2])
x4 = Drop_out(x4, dropout_rate, is_training)
# print('after gap: ', x4)
# flatten = tf.contrib.layers.flatten(x4)
prob = tf.layers.dense(x4, 4, reuse=reuse, kernel_initializer=tf.contrib.layers.xavier_initializer(seed=1),
name="fully_connected")
return prob
#train_x_pre, train_y_pre, test_x_pre, test_y_pre = prepare_data(train_files = '/training_64_4class_pk.pickle', test_files = '/test_64_4class_pk.pickle')
train_x, train_y, train_p, test_x, test_y, test_p = prepare_data(train_files = '/train_64_pt_all_sel_5_to_2', test_files = '/test_64_pt_all_sel_5_to_2')
train_x, test_x = color_preprocessing(train_x, test_x)
print("after select,the shape of train data and label is ", train_x.shape, train_y.shape)
print("aftre select, the shape of test data and label is ", test_x.shape, test_y.shape)
# image_size = 32, img_channels = 3, class_num = 10 in cifar10
x = tf.placeholder(tf.float32, shape=[None, image_size, image_size, img_channels])
label = tf.placeholder(tf.float32, shape=[None, class_num])
path = tf.placeholder(tf.string)
training_flag = tf.placeholder(tf.bool)
learning_rate = tf.placeholder(tf.float32, name='learning_rate')
logits = RESNet(x=x, training=training_flag, labels=label).model
#logits, cos_t, s_train, logits_2, logits_3, logits_4, logits_5, logits_6, center_loss= DenseNet(x=x, nb_blocks=nb_block, filters=growth_k, training=training_flag, labels = label).model
# reg_ws = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,"DenseNet")
# weights_regularizer = tf.contrib.layers.l1_regularizer(0.4)
reg_ws = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES, 'DenseNet')
print("label", label, "logits", logits)
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=label, logits=logits))
l2 = tf.add_n([tf.nn.l2_loss(var) for var in tf.trainable_variables()])
l1 = tf.add_n([tf.contrib.layers.l1_regularizer(0.5)(var) for var in tf.trainable_variables()])
"""
l2_loss = tf.add_n([tf.nn.l2_loss(var) for var in tf.trainable_variables()])
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=nesterov_momentum, use_nesterov=True)
train = optimizer.minimize(cost + l2_loss * weight_decay)
In paper, use MomentumOptimizer
init_learning_rate = 0.1
but, I'll use AdamOptimizer
"""
cost = cross_entropy + L_metric_l2_regularizer
#optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate, epsilon=epsilon)
tr_vars = tf.trainable_variables()
var_list = [t for t in tr_vars]
print("type of var is ", type(var_list),var_list)
i=0
for ttt in var_list:
print("t is ", i,ttt)
i+=1
new_var_list = var_list[30:]
optimizer = tf.train.MomentumOptimizer(learning_rate, 0.9)
train = optimizer.minimize(cost + l2 * weight_decay )
correct_prediction = tf.equal(tf.argmax(logits, 1), tf.argmax(label, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
#merge all the summary
restore_variable_list = tf.contrib.framework.get_variables_to_restore(exclude=["fully_connected","is_training"])
saver = tf.train.Saver(restore_variable_list)
saver_2 = tf.train.Saver()
with open(txt_path, 'a') as f:
f.write(write_title)
print("write finished,\n")
with tf.Session() as sess:
''' '''
ckpt = tf.train.get_checkpoint_state(ckpt_path)
if ckpt and tf.train.checkpoint_exists(ckpt.model_checkpoint_path):
reader = pywrap_tensorflow.NewCheckpointReader(save_path)
var_to_shape_map = reader.get_variable_to_shape_map()
for key in var_to_shape_map:
print("tensor name",key)
sess.run(tf.global_variables_initializer())
saver_2.restore(sess, ckpt.model_checkpoint_path)
print("load the model")
else :
sess.run(tf.global_variables_initializer())
print("new initial")
#sess.run(tf.global_variables_initializer())
#print("new initial")
summary_writer = tf.summary.FileWriter(summary_path, sess.graph)
epoch_learning_rate = init_learning_rate
#epoch_learning_rate = tmp_learning_rate
if isTrain:
test_acc_old=0
for epoch in range(1, total_epochs + 1):
#for epoch in range(1, 2):
if epoch == (total_epochs * 0.5) or epoch == (total_epochs * 0.75):
epoch_learning_rate = epoch_learning_rate / 10
pre_index = 0
train_acc = 0.0
train_acc_norm = 0.0
train_acc_arcface = 0.0
train_loss = 0.0
train_center_loss = 0.0
train_y_equal_0 = 0
train_y_equal_1 = 0
train_y_equal_2 = 0
train_y_equal_3 = 0
for step in range(1, iteration + 1):
if pre_index + batch_size < dataset_size:
batch_x = train_x[pre_index: pre_index + batch_size]
batch_y = train_y[pre_index: pre_index + batch_size]
batch_p = train_p[pre_index: pre_index + batch_size]
else:
batch_x = train_x[pre_index:]
batch_y = train_y[pre_index:]
batch_p = train_p[pre_index:]
batch_x = data_augmentation(batch_x)
train_feed_dict = {
x: batch_x,
label: batch_y,
path: batch_p,
learning_rate: epoch_learning_rate,
training_flag: True
}
_, batch_loss= sess.run([train, cost], feed_dict=train_feed_dict)
batch_acc = accuracy.eval(feed_dict=train_feed_dict)
'''
logits_watch = sess.run(logits,feed_dict=train_feed_dict)
print("logit is",logits_watch )
print("label is ", batch_y)
'''
train_loss += batch_loss
#train_center_loss += batch_center_loss
train_acc += batch_acc
pre_index += batch_size
if step == iteration:
train_loss /= iteration # average loss
train_acc /= iteration # average accuracy
train_center_loss /= iteration
if epoch >= total_epochs-1:
train_acc_norm /= iteration
train_acc_arcface /= iteration
train_summary = tf.Summary(value=[tf.Summary.Value(tag='train_loss', simple_value=train_loss),
tf.Summary.Value(tag='train_accuracy', simple_value=train_acc)])
test_acc, test_loss, test_summary= Evaluate(sess, epoch)
summary_writer.add_summary(summary=train_summary, global_step=epoch)
summary_writer.add_summary(summary=test_summary, global_step=epoch)
summary_writer.flush()
line = "epoch: %d/%d, train_loss: %.4f, train_acc: %.4f, test_loss: %.4f, test_acc: %.4f\n" % (
epoch, total_epochs, train_loss, train_acc, test_loss, test_acc)
print(line)
with open(txt_path, 'a') as f :
f.write(line)
if epoch >= total_epochs-10:
test_acc_new = test_acc
if test_acc_new >= test_acc_old:
saver_2.save(sess=sess, save_path=save_path)
print("model saved ,acc is", test_acc_new)
test_acc_old = test_acc
if epoch >= total_epochs-1:
train_result = sess.run(tf.argmax(logits, 1),feed_dict=train_feed_dict)
label_argmax = sess.run(tf.argmax(label, 1), feed_dict=train_feed_dict)
for itrain in range(len(batch_y)):
if label_argmax[itrain] == 0:
if train_result[itrain] == label_argmax[itrain]:
train_y_equal_0 = train_y_equal_0 + 1
elif label_argmax[itrain] == 1:
if train_result[itrain] == label_argmax[itrain]:
train_y_equal_1 = train_y_equal_1 + 1
elif label_argmax[itrain] == 2:
if train_result[itrain] == label_argmax[itrain]:
train_y_equal_2 = train_y_equal_2 + 1
elif label_argmax[itrain] == 3:
if train_result[itrain] == label_argmax[itrain]:
train_y_equal_3 = train_y_equal_3 + 1
#s_train_val = sess.run(s_train, feed_dict=train_feed_dict)
if epoch >= total_epochs-1:
#print("s_train_val is ", s_train_val)
print("the right amount of train of 0 and 1 and 2 and 3 is ", train_y_equal_0, train_y_equal_1, train_y_equal_2, train_y_equal_3)
else:
epoch = total_epochs-1
test_acc, test_loss, test_summary = Evaluate(sess, epoch)
print("test_loss:",test_loss,"test_acc",test_acc)
end_time = time.time()
print("end time is", end_time)
time_dur = end_time - start_time
print("time_dur is ", time_dur)
| [
"noreply@github.com"
] | noreply@github.com |
e8271a5bf72bda3ddf07e62fa50173e847af9541 | abf857dfc50a3a0a109d00cc24ce88cf0df79a97 | /daphnia/main.py | 5d294e41baa6b73f3125a64a03dd4ef62f8720e1 | [] | no_license | awedwards/daphnia-bergland | 7a4542e5f48dbf2bb442632738626cbf602dd31f | 9d29edb7a1df84062e0368d3f918a1cace09815b | refs/heads/master | 2021-09-13T09:27:50.329160 | 2018-04-27T19:56:16 | 2018-04-27T19:56:16 | 95,588,676 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,195 | py | from __future__ import division
import utils
import pandas as pd
from clone import Clone
import os
import cv2
DATADIR = "/mnt/spicy_4/daphnia/data"
ANALYSISDIR = "/mnt/spicy_4/daphnia/analysis/"
INDUCTIONMETADATADIR = "/mnt/spicy_4/daphnia/analysis/MetadataFiles/induction"
PONDSEASONFILEPATH = "/mnt/spicy_4/daphnia/analysis/MetadataFiles/season_metadata.csv"
ext = '.bmp'
current = "analysis_results_current.txt"
out = "tail_spine.txt"
pedestal = "pedestal_current.txt"
analysis = True
build_clonedata = False
flags = []
if analysis == True:
#flags.append("getPxtomm")
#flags.append("doEyeAreaCalc")
#flags.append("doAntennaMasking")
#flags.append("doAnimalAreaCalc")
#flags.append("getOrientationVectors")
flags.append("doLength")
#flags.append("fitPedestal")
#flags.append("doPedestalScore")
#flags.append("doQualityCheck")
print "Loading clone data\n"
try:
clones = utils.build_clonelist(DATADIR, ANALYSISDIR, INDUCTIONMETADATADIR, PONDSEASONFILEPATH)
df = utils.csv_to_df(os.path.join(ANALYSISDIR, current))
loaded = utils.df_to_clonelist(df, datadir=DATADIR)
#dfout = utils.csv_to_df(os.path.join(ANALYSISDIR, out))
#out_loaded = utils.df_to_clonelist(dfout, datadir=DATADIR)
#clones = utils.update_clone_list(clones, out_loaded)
clones = utils.update_clone_list(clones, loaded)
print "Successfully updated clone list"
except (AttributeError, IOError):
clones = utils.build_clonelist(DATADIR, ANALYSISDIR, INDUCTIONMETADATADIR, PONDSEASONFILEPATH)
cols = ["filebase",
"barcode",
"cloneid",
"pond",
"id",
"season",
"treatment",
"replicate",
"rig",
"datetime",
"inductiondate",
"total_animal_pixels",
"animal_area",
"total_eye_pixels",
"eye_area",
"animal_length_pixels",
"animal_length",
"pixel_to_mm",
"animal_x_center",
"animal_y_center",
"animal_major",
"animal_minor",
"animal_theta",
"eye_x_center",
"eye_y_center",
"anterior",
"posterior",
"dorsal",
"ventral",
"ant_vec",
"pos_vec",
"dor_vec",
"ven_vec",
"eye_dorsal",
"head",
"tail",
"tail_tip",
"tail_spine_length_pixels",
"tail_spine_length",
"ventral_mask_endpoints",
"dorsal_mask_endpoints",
"anterior_mask_endpoints",
"posterior_mask_endpoints",
"pedestal_max_height_pixels",
"pedestal_area_pixels",
"pedestal_max_height",
"pedestal_area",
"poly_coeff",
"res",
"pedestal_max_height",
"pedestal_area",
"peak",
"deyecenter_pedestalmax_pixels",
"deyecenter_pedestalmax",
"automated_PF",
"automated_PF_reason",
"manual_PF",
"manual_PF_reason",
"manual_PF_curator"]
try:
if os.stat(os.path.join(ANALYSISDIR, out)).st_size == 0:
raise IOError
except (IOError, OSError):
print "Starting new output file"
with open(os.path.join(ANALYSISDIR, out), "wb+") as f:
f.write( "\t".join(cols) + "\n")
try:
"Loading pedestal data"
pedestal_data = utils.load_pedestal_data( os.path.join(ANALYSISDIR, pedestal) )
except IOError:
pedestal_data = {}
utils.load_male_list(clones, os.path.join(ANALYSISDIR, "male_list.csv"))
utils.load_manual_curation(clones, os.path.join(ANALYSISDIR, "manual_curation.csv"))
if analysis:
for barcode in clones.keys():
for dt in clones[barcode].keys():
clone = clones[barcode][dt]["full"]
if not clone.analyzed:
if clone.filebase in pedestal_data.keys(): clone.pedestal_analyzed = True
else: clone.pedestal_analyzed = False
print "Analyzing " + clone.filebase
utils.analyze_clone(clone, flags, pedestal_data=pedestal_data)
if "fitPedestal" in flags:
if not clone.pedestal_analyzed:
try:
im = cv2.imread(os.path.join(DATADIR, clone.filepath), cv2.IMREAD_GRAYSCALE)
clone.initialize_pedestal(im)
print "Fitting pedestal for " + clone.filebase
clone.fit_pedestal(im)
pedestal_data[clone.filebase] = [clone.pedestal, clone.ipedestal]
utils.append_pedestal_line(clone.filebase, pedestal_data[clone.filebase], os.path.join(ANALYSISDIR, pedestal))
#utils.analyze_clone(clone, ["doPedestalScore"], pedestal_data=pedestal_data)
except Exception as e:
print "Failed to fit pedestal for " + clone.filebase + " because of " + str(e)
#utils.save_clonelist(clones, ANALYSISDIR, "analysis_results_test.txt", cols)
utils.write_clone(clone, cols, ANALYSISDIR, out)
| [
"edwardsa@janelia.hhmi.org"
] | edwardsa@janelia.hhmi.org |
1acc3b8f6a2e1c850b698629893c7c179aceb189 | c23e10f2a67ac37d2aa39d193b518251a2a2e03a | /boardproject/boardapp/migrations/0003_auto_20210524_0642.py | 0fe47a03f9ace8075fb09f409ac0262cd54d386e | [] | no_license | pecop/udemy-django-3apps | cd2bf976ae3db28bcae0b8a1b23f4bd55881cabd | fc214bcc04ca439873d55265e4d2aade4b5701c0 | refs/heads/master | 2023-05-02T01:31:50.951907 | 2021-05-24T08:06:04 | 2021-05-24T08:06:04 | 370,271,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | # Generated by Django 3.2.3 on 2021-05-24 06:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('boardapp', '0002_rename_auther_boardmodel_author'),
]
operations = [
migrations.AlterField(
model_name='boardmodel',
name='good',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='boardmodel',
name='read',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='boardmodel',
name='readtext',
field=models.TextField(blank=True, null=True),
),
]
| [
"back.to.the.future52@gmail.com"
] | back.to.the.future52@gmail.com |
6f463313a068c75251f01e1d44480afd5b84827e | aa2533eb375d06f6b73aaff0fac6bacbdcaab458 | /src/conf.py | e3adda167afe1c0f3ab5359391d8db9b05b89d2b | [] | no_license | Rain0193/automonkey | 1f08afd6b353ec4307ed34909fd45de3debc6819 | 32168429cf771964dbcaae735611893c134a5a95 | refs/heads/master | 2021-08-11T08:38:59.214392 | 2017-11-13T12:26:19 | 2017-11-13T12:26:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,543 | py | #!/usr/bin/evn python
# -*- coding:utf-8 -*-
# @author: zhangzhao_lenovo@126.com
# @date: 20161005
# @version: 1.0.0.1009
import yaml
import os,platform
def dictinsertdict(dicta,dictb):
for k, v in dicta.items():
x = dictb.get(k)
if not x:
dictb[k] = v
else:
if isinstance(v,dict):
dictinsertdict(v, x)
else:
dictb[k] = v
class Conf():
def __init__(self):
self.conf = {}
self.conf['pluginlist'] = []
self.conf['saveScreen'] = True
self.conf['pageobject'] = False
self.conf['reportTitle'] = ''
self.conf['screenshotTimeout'] = 20
self.conf['currentDriver'] = 'Android'
self.conf['tagLimitMax'] = 6
self.conf['tagLimit'] = []
self.conf['showCancel'] = False
self.conf['maxTime'] = 3600*3
#win
if 'Windows' in platform.system():self.conf['resultDir'] = '%s%sresult' % (os.path.split(os.path.realpath(__file__))[0], os.path.sep)
#linux jenkins
else: self.conf['resultDir'] = '/home/zhangzhao/work/test/job/workspace/Pandatv_uimonkeytest_android'
self.conf['gt'] = False
capability = {}
capability['app'] = ''
capability['udid'] = ''
capability['noRest'] = False
capability['autoWebview'] = False
capability['autoLaunch'] = True
capability['unicodeKeyboard'] = True
capability['resetKeyboard'] = True
self.conf['capability'] = capability
androidcapability = {}
androidcapability['platformName'] = 'android'
androidcapability['deviceName'] = 'android'
androidcapability['appPackage'] = ''
androidcapability['appActivity'] = ''
androidcapability['appWaitActivity'] = ''
androidcapability['mainActivity'] = 'com.panda.videoliveplatform.MainFragmentActivity'
self.conf['androidCapability'] = androidcapability
ioscapability = {}
ioscapability['automationName'] = 'XCUITest'
ioscapability['bundleID'] = ''
ioscapability['autoAcceptAlerts'] = True
ioscapability['platformVersion'] = '10.2.1'
ioscapability['platformName'] = 'iOS'
ioscapability['deviceName'] = 'iPhone 6'
self.conf['iosCapability'] = ioscapability
self.conf['xpathAttributes'] = ['name','label','value','resource-id','content-desc','index','text']
self.conf['defineUrl'] = []
self.conf['baseUrl'] = []
self.conf['appWhiteList'] = []
self.conf['maxDepth'] = 6
self.conf['headFirst'] = True
self.conf['enterWebView'] = True
self.conf['urlBlackList'] = []
self.conf['urlWhiteList'] = []
self.conf['defaultBackAction'] = []
self.conf['backButton'] = []
self.conf['firstList'] = []
self.conf['selectedList'] = ["//*[contains(name(), 'Text')]",
"//*[contains(name(), 'Image')]",
"//*[contains(name(), 'Button')]",
"//*[contains(name(), 'CheckBox')]"]
self.conf['lastList'] = []
self.conf['blackList'] = []
self.conf['extrablackList'] = []
self.conf['elementActions'] = []
self.conf['startupActions'] = ["time.sleep(3)",
"swipeto(driver,\"left\")",
"swipeto(driver,\"left\")",
"swipeto(driver,\"left\")",
"swipeto(driver,\"left\")",
"swipeto(driver,\"left\")"]
self.conf['beforeElementAction'] = []
self.conf['afterElementAction'] = []
self.conf['afterUrlFinished'] = []
self.conf['monkeyEvents'] = []
self.conf['monkeyRunTimeSeconds'] =30
self.conf['schemaBlackList'] = []
self.conf['beforeRefreshpageAction'] = []
self.conf['randomselect'] = 1
self.conf['startupClosePopenSysmenu'] = []
self.conf['elementActionsInanyURLwilldo'] = []
def load(self,path):
file = open(path,encoding='gbk')
yamlconf = yaml.load(file)
dictinsertdict(yamlconf,self.conf)
return self.conf
def test():
ymlpath = '%s%sconf%spanda.yml'%(os.path.split(os.path.realpath(__file__))[0],os.path.sep,os.path.sep)
config = Conf()
config.load(ymlpath)
print(config.conf)
if __name__ == "__main__":
test()
| [
"zhangzhao_lenovo@126.com"
] | zhangzhao_lenovo@126.com |
04c328687a8499e092c28512e12f5cd8237575e3 | 4a971163f1b3ed376913825a8e85bfd7122a16e2 | /forum/migrations/0019_auto_20210416_1137.py | 97d6cf54e0941d53aa7a417a0dbb1944349210a5 | [] | no_license | kifahnaim/djangoAimarena | a314b77e95b86290274721f95af8a036025447d3 | 0961afa7f8df1a15a9af22b04f908dbde4f880c3 | refs/heads/main | 2023-04-28T04:52:33.144912 | 2021-05-15T06:48:11 | 2021-05-15T06:48:11 | 367,449,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,119 | py | # Generated by Django 3.0.5 on 2021-04-16 11:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('forum', '0018_auto_20210416_1136'),
]
operations = [
migrations.RemoveField(
model_name='topic',
name='accepted_visible',
),
migrations.RemoveField(
model_name='topic',
name='acceptedappeal_visible',
),
migrations.RemoveField(
model_name='topic',
name='is_visible',
),
migrations.RemoveField(
model_name='topic',
name='pinned_visible',
),
migrations.RemoveField(
model_name='topic',
name='rejected_visible',
),
migrations.RemoveField(
model_name='topic',
name='rejectedappeal_visible',
),
migrations.AddField(
model_name='subtopic',
name='accepted_visible',
field=models.BooleanField(default=False),
preserve_default=False,
),
migrations.AddField(
model_name='subtopic',
name='acceptedappeal_visible',
field=models.BooleanField(default=False),
preserve_default=False,
),
migrations.AddField(
model_name='subtopic',
name='is_visible',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='subtopic',
name='pinned_visible',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='subtopic',
name='rejected_visible',
field=models.BooleanField(default=True),
preserve_default=False,
),
migrations.AddField(
model_name='subtopic',
name='rejectedappeal_visible',
field=models.BooleanField(default=True),
preserve_default=False,
),
]
| [
"naimkifah@gmail.com"
] | naimkifah@gmail.com |
37a2620996f5b4f1543105bffdc6fb58220c624c | 6a4ebebbe0d7f81efc4f1749054a2ed7242c0e58 | /granary/test/test_googleplus.py | e12902c656d570b1ffc904713e8a4b875bb87829 | [
"LicenseRef-scancode-public-domain"
] | permissive | skylarker/granary | 6e192ecd2475febb3585728d5ba7afe34742107d | 2fd8ef017588b955e78606242ce582849cfd57ac | refs/heads/master | 2020-12-26T21:35:04.155528 | 2016-04-18T18:15:30 | 2016-04-18T18:15:30 | 56,891,160 | 1 | 0 | null | 2016-04-22T23:43:09 | 2016-04-22T23:43:09 | null | UTF-8 | Python | false | false | 21,560 | py | # coding=utf-8
"""Unit tests for googleplus.py.
See googleapiclient/http.py for details on using RequestMockBuilder to mock out
Google API calls. (This is the current doc on apiclient mocks, but it doesn't
mention RequestMockBuilder:
https://developers.google.com/api-client-library/python/guide/mocks )
TODO: figure out how to check the query parameters. Right now they're ignored. :/
"""
__author__ = ['Ryan Barrett <granary@ryanb.org>']
import copy
from email.message import Message
from email.mime.multipart import MIMEMultipart
import json
import os
from apiclient import discovery
from apiclient import http
import httplib2
from oauth_dropins import googleplus as oauth_googleplus
from oauth_dropins.webutil import util
from oauth_dropins.webutil import testutil
from granary import appengine_config
appengine_config.GOOGLE_CLIENT_ID = 'my client id'
appengine_config.GOOGLE_CLIENT_SECRET = 'my client secret'
from granary import googleplus
DISCOVERY_DOC = appengine_config.read(
os.path.join(os.path.dirname(__file__), '../../googleplus_api_discovery.json'))
def tag_uri(name):
return util.tag_uri('plus.google.com', name)
ACTIVITY_GP = { # Google+
'kind': 'plus#activity',
'verb': 'post',
'id': '001',
'actor': {'id': '444', 'displayName': 'Charles'},
'object': {
'content': 'my post',
'url': 'http://plus.google.com/001',
},
}
ACTIVITY_AS = { # ActivityStreams
'kind': 'plus#activity',
'verb': 'post',
'id': tag_uri('001'),
'actor': {'id': tag_uri('444'), 'displayName': 'Charles'},
'object': {
'content': 'my post',
'url': 'http://plus.google.com/001',
'author': {'id': tag_uri('444'), 'displayName': 'Charles'},
'to': [{'objectType':'group', 'alias':'@public'}],
},
}
COMMENT_GP = { # Google+
'kind': 'plus#comment',
'verb': 'post',
'id': 'zyx.888',
'actor': {'id': '777', 'displayName': 'Eve'},
'object': {'content': 'my content'},
'inReplyTo': [{'url': 'http://post/url'}],
}
COMMENT_AS = { # ActivityStreams
'kind': 'plus#comment',
'verb': 'post',
'id': tag_uri('zyx.888'),
'url': 'http://post/url#zyx%23888',
'author': {'id': tag_uri('777'), 'displayName': 'Eve'},
'content': 'my content',
'object': {'content': 'my content'},
'inReplyTo': [{'url': 'http://post/url'}],
'to': [{'objectType':'group', 'alias':'@public'}],
}
PLUSONER = { # Google+
'kind': 'plus#person',
'id': '222',
'displayName': 'Alice',
'url': 'https://profiles.google.com/alice',
'image': {'url': 'https://alice/picture'},
}
LIKE = { # ActivityStreams
'id': tag_uri('001_liked_by_222'),
'url': 'http://plus.google.com/001#liked-by-222',
'objectType': 'activity',
'verb': 'like',
'object': {'url': 'http://plus.google.com/001'},
'author': {
'kind': 'plus#person',
'id': tag_uri('222'),
'displayName': 'Alice',
'url': 'https://profiles.google.com/alice',
'image': {'url': 'https://alice/picture'},
},
}
RESHARER = { # Google+
'kind': 'plus#person',
'id': '444',
'displayName': 'Bob',
'url': 'https://plus.google.com/bob',
'image': {'url': 'https://bob/picture'},
}
SHARE = { # ActivityStreams
'id': tag_uri('001_shared_by_444'),
'url': 'http://plus.google.com/001#shared-by-444',
'objectType': 'activity',
'verb': 'share',
'object': {'url': 'http://plus.google.com/001'},
'author': {
'kind': 'plus#person',
'id': tag_uri('444'),
'displayName': 'Bob',
'url': 'https://plus.google.com/bob',
'image': {'url': 'https://bob/picture'},
},
}
ACTIVITY_GP_EXTRAS = copy.deepcopy(ACTIVITY_GP) # Google+
ACTIVITY_GP_EXTRAS['object'].update({
'replies': {'totalItems': 1},
'plusoners': {'totalItems': 1},
'resharers': {'totalItems': 1},
})
ACTIVITY_AS_EXTRAS = copy.deepcopy(ACTIVITY_AS) # ActivityStreams
ACTIVITY_AS_EXTRAS['object'].update({
'replies': {'totalItems': 1, 'items': [COMMENT_AS]},
'plusoners': {'totalItems': 1},
'resharers': {'totalItems': 1},
'tags': [LIKE, SHARE],
})
# HTML from http://plus.google.com/
HTML_ACTIVITY_GP = [
["..."],
[1002, None, None, None, None, [1001, "z13gjrz4ymeldtd5f04chnrixnvpjjqy42o"],
{"33558957" : [
"",
"",
"",
"David Barrett",
"",
1440425513401,
None,
[], # first comment (if any) would be here
"z13gjrz4ymeldtd5f04chnrixnvpjjqy42o",
"",
"a:ext:client.sharebox.108380595987.apps.googleusercontent.com",
[None],
[None],
"",
None,
[None],
"105815303293125791402",
[None],
"https://lh4.googleusercontent.com/-OvNQMFbbks0/AAAAAAAAAAI/AAAAAAAAOuo/YXnsx5bfWxo/photo.jpg",
None,
u"Hi! It’s been a while since I’ve written because we’ve been hard at work, but I’m very happy to take the wraps off our latest feature (or really, series of features): Realtime Expense Reports. I know I’ve been hyping this up for a long time, and you’re…",
"+DavidBarrettQuinthar/posts/VefFHLMoCqV",
0,
0,
"./105815303293125791402",
[None], None,
[ # location
41.230564,
9.172682,
"(41.2305630, 9.1726818)",
"",
None,
"/maps/api/staticmap?center=41.230564,9.172682&zoom=14&size=300x220&sensor=false&markers=41.230564,9.172682&client=google-buzz&signature=GDLZ49Fe0-uc4BoVt-e7p-OmZ50%3D",
["1152921504606846977", "-7273273746059208260"],
"",
"https://maps.google.com?ll=41.230564,9.172682&q=41.230564,9.172682",
None,
"https://maps-api-ssl.google.com/maps/api/staticmap?center=41.230564,9.172682&zoom=15&size=100x100&sensor=false&client=google-buzz&signature=Doqggt3WB5BQzKieZRSA2VwHRXM%3D",
0, None, 412305629, 91726818, None, None, [None]
],
"", 0, 0, 0, 1, None, 0, 1, None, 0,
1440425513401,
] + [None] * 58 + [ # collapsed for brevity
[
[335, 0],
"http://blog.expensify.com/2015/08/24/realtime-expense-reports-are-here-and-so-much-more/",
None, None, None, None,
[
1440425513266,
"http://blog.expensify.com/2015/08/24/realtime-expense-reports-are-here-and-so-much-more/",
"http://blog.expensify.com/2015/08/24/realtime-expense-reports-are-here-and-so-much-more/",
"http://blog.expensify.com/2015/08/24/realtime-expense-reports-are-here-and-so-much-more/",
[None], [None], [None]
],
"http://blog.expensify.com/2015/08/24/realtime-expense-reports-are-here-and-so-much-more/",
{
"39748951" : [
"http://blog.expensify.com/2015/08/24/realtime-expense-reports-are-here-and-so-much-more/",
"http://0.gravatar.com/blavatar/ee4c59993abdb971416349dee59ca9d1?s=200&ts=1440425508",
"Realtime Expense Reports are Here! (And so much more...)",
"Hi! It's been a while since I've written because we've been hard at work, but I'm very happy to take the wraps off our latest feature (or really, series of features): Realtime Expense Reports. I kn...",
None,
["//lh6.googleusercontent.com/proxy/IvWQIbjjvIWCUhTACtHDQRysGY2NYqf-A6XWPOGMLdr4W5BHFjIeQw4ZOTDrkDA2oc1kKfCgkV7gT-iQIFvOaeUhtfEf_3BPBTNsmesTGSawvh5kednyc-Oi8MPmpdRZ_SE2=w120-h120",
120, 120, None, None, None, None, 120,
[2,
"https://lh6.googleusercontent.com/proxy/IvWQIbjjvIWCUhTACtHDQRysGY2NYqf-A6XWPOGMLdr4W5BHFjIeQw4ZOTDrkDA2oc1kKfCgkV7gT-iQIFvOaeUhtfEf_3BPBTNsmesTGSawvh5kednyc-Oi8MPmpdRZ_SE2=w800-h800"]],
"//s2.googleusercontent.com/s2/favicons?domain=blog.expensify.com",
[[[350, 335, 0], "http://quinthar.com/",
{"41007156" : ["http://quinthar.com/", None, None, None, None, None,
None, [None], None, None, [None]]}]],
None, None, [None], "blog.expensify.com",] + [None] * 172 + [# collapsed for brevity
[[339, 338, 336, 335, 0],
"http://0.gravatar.com/blavatar/ee4c59993abdb971416349dee59ca9d1?s=200&ts=1440425508",
{"40265033" : [
"http://0.gravatar.com/blavatar/ee4c59993abdb971416349dee59ca9d1?s=200&ts=1440425508",
"http://0.gravatar.com/blavatar/ee4c59993abdb971416349dee59ca9d1?s=200&ts=1440425508",
None, None, None,
["//lh6.googleusercontent.com/proxy/IvWQIbjjvIWCUhTACtHDQRysGY2NYqf-A6XWPOGMLdr4W5BHFjIeQw4ZOTDrkDA2oc1kKfCgkV7gT-iQIFvOaeUhtfEf_3BPBTNsmesTGSawvh5kednyc-Oi8MPmpdRZ_SE2=w120-h120",
120, 120, None, None, None, None, 120,
[2,
"https://lh6.googleusercontent.com/proxy/IvWQIbjjvIWCUhTACtHDQRysGY2NYqf-A6XWPOGMLdr4W5BHFjIeQw4ZOTDrkDA2oc1kKfCgkV7gT-iQIFvOaeUhtfEf_3BPBTNsmesTGSawvh5kednyc-Oi8MPmpdRZ_SE2=w800-h800"]],
# ...
]}]]}], # ...
]}],
# second element is non-post, under 7 items long
[1002, None, None],
# third element is non-post, item 6 is empty
[1002, None, None, None, None, None, {}],
] # ...
HTML_ACTIVITIES_GP_HEADER = """
<!DOCTYPE html><html lang="en" dir="ltr" ><head><meta name="referrer" content="origin"><base href="https://plus.google.com/"><style>
...
</style></head><body class="Td lj"><input type="text" name="hist_state" id="hist_state" style="display:none;"><iframe id="hist_frame" name="hist_frame1623222153" class="ss" tabindex="-1"></iframe><script>window['OZ_wizstart'] && window['OZ_wizstart']()</script>
<script>AF_initDataCallback({key: '199', isError: false , hash: '13', data:[2,0]
});</script><script>AF_initDataCallback({key: '161', isError: false , hash: '14', data:["os.con",[[]
,"these few lines test the code that collapses commas",
[,1,1,,,,20,,"social.google.com",[,]
,,,2,,,0,,15,,[[1002,2],"..."]],,[,],,,"""
HTML_ACTIVITIES_GP_FOOTER = """
]
]
});</script></body></html>"""
HTML_ACTIVITY_AS = { # Google+
'id': tag_uri('z13gjrz4ymeldtd5f04chnrixnvpjjqy42o'),
'url': 'https://plus.google.com/+DavidBarrettQuinthar/posts/VefFHLMoCqV',
'actor': {
'id': tag_uri('105815303293125791402'),
'url': 'https://plus.google.com/105815303293125791402',
'objectType': 'person',
'displayName': 'David Barrett',
'image': {
'url': 'https://lh4.googleusercontent.com/-OvNQMFbbks0/AAAAAAAAAAI/AAAAAAAAOuo/YXnsx5bfWxo/photo.jpg',
},
},
'verb': 'post',
'object': {
'id': tag_uri('z13gjrz4ymeldtd5f04chnrixnvpjjqy42o'),
'url': 'https://plus.google.com/+DavidBarrettQuinthar/posts/VefFHLMoCqV',
'objectType': 'note',
'published': '2015-08-24T14:11:53Z',
'updated': '2015-08-24T14:11:53Z',
'content': u'Hi! It’s been a while since I’ve written because we’ve been hard at work, but I’m very happy to take the wraps off our latest feature (or really, series of features): Realtime Expense Reports. I know I’ve been hyping this up for a long time, and you’re…',
'attachments': [
{
'objectType': 'article',
'displayName': 'Realtime Expense Reports are Here! (And so much more...)',
'content': "Hi! It's been a while since I've written because we've been hard at work, but I'm very happy to take the wraps off our latest feature (or really, series of features): Realtime Expense Reports. I kn...",
'url': 'http://blog.expensify.com/2015/08/24/realtime-expense-reports-are-here-and-so-much-more/',
'image': {
'url': 'http://0.gravatar.com/blavatar/ee4c59993abdb971416349dee59ca9d1?s=200&ts=1440425508',
}
}
]
},
'location': {
'displayName': '(41.2305630, 9.1726818)',
'url': 'https://maps.google.com?ll=41.230564,9.172682&q=41.230564,9.172682',
'latitude': 41.230564,
'longitude': 9.172682,
},
# 'access': {
# 'kind': 'plus#acl',
# 'description': 'Public',
# 'items': [
# {
# 'type': 'public'
# }
# ]
# }
}
CREDS_JSON = json.dumps({
'access_token': 'my token',
'client_id': appengine_config.GOOGLE_CLIENT_ID,
'client_secret': appengine_config.GOOGLE_CLIENT_SECRET,
'refresh_token': 'my refresh token',
'token_expiry': '',
'token_uri': '',
'user_agent': '',
'invalid': '',
})
class GooglePlusTest(testutil.HandlerTest):
def setUp(self):
super(GooglePlusTest, self).setUp()
self.auth_entity = oauth_googleplus.GooglePlusAuth(
id='my_string_id',
user_json=json.dumps({
'displayName': 'Bob',
}),
creds_json=CREDS_JSON)
self.googleplus = googleplus.GooglePlus(auth_entity=self.auth_entity)
def tearDown(self):
oauth_googleplus.json_service = None
def init(self, **kwargs):
"""Sets up the API service from test_googleplus_discovery.
Pass a requestBuilder or http kwarg to inject expected HTTP requests and
responses.
"""
oauth_googleplus.json_service = discovery.build_from_document(
DISCOVERY_DOC, **kwargs)
def test_get_comment(self):
self.init(requestBuilder=http.RequestMockBuilder({
'plus.comments.get': (None, json.dumps(COMMENT_GP)) # None means 200 OK
}))
self.assert_equals(COMMENT_AS, self.googleplus.get_comment('234'))
def test_get_activity(self):
self.init(requestBuilder=http.RequestMockBuilder({
'plus.activities.get': (None, json.dumps(ACTIVITY_GP))
}))
self.assert_equals([ACTIVITY_AS],
self.googleplus.get_activities(activity_id='234'))
def test_get_activities_no_extras_to_fetch(self):
self.init(requestBuilder=http.RequestMockBuilder({
'plus.activities.list': (None, json.dumps({
'items': [ACTIVITY_GP, ACTIVITY_GP],
})),
},
# ACTIVITY_GP doesn't say there are any comments, +1s, or shares (via
# totalItems), so we shouldn't ask for them.
check_unexpected=True))
got = self.googleplus.get_activities(fetch_replies=True, fetch_likes=True,
fetch_shares=True)
self.assert_equals([ACTIVITY_AS, ACTIVITY_AS], got)
def test_get_activities_fetch_extras(self):
self.init()
# Generate minimal fake responses for each request in the batch.
#
# Test with multiple activities to cover the bug described in
# https://github.com/snarfed/bridgy/issues/22#issuecomment-56329848 :
# util.CacheDict.get_multi() didn't originally handle generator args.
batch = MIMEMultipart()
for i, item in enumerate((COMMENT_GP, PLUSONER, RESHARER) * 2):
msg = Message()
msg.set_payload('HTTP/1.1 200 OK\n\r\n\r\n' + json.dumps({'items': [item]}))
msg['Content-ID'] = '<response-abc+%d>' % (i + 1)
batch.attach(msg)
# as_string() must be called before get_boundary() to generate the
# boundaries between parts, but can't be called again, so we capture the
# result.
batch_str = batch.as_string()
gpe_1 = ACTIVITY_GP_EXTRAS
gpe_2 = copy.deepcopy(gpe_1)
gpe_2['id'] = '002'
http_seq = http.HttpMockSequence(
[({'status': '200'}, json.dumps({'items': [gpe_1, gpe_2]})),
({'status': '200',
'content-type': 'multipart/mixed; boundary="%s"' % batch.get_boundary()},
batch_str),
({'status': '200'}, json.dumps({'items': [gpe_1, gpe_2]})),
])
self.auth_entity.http = lambda: http_seq
ase_1 = ACTIVITY_AS_EXTRAS
ase_2 = copy.deepcopy(ase_1)
ase_2['id'] = tag_uri('002')
ase_2['object']['tags'][0]['id'] = tag_uri('002_liked_by_222')
ase_2['object']['tags'][1]['id'] = tag_uri('002_shared_by_444')
cache = util.CacheDict()
self.assert_equals([ase_1, ase_2], self.googleplus.get_activities(
fetch_replies=True, fetch_likes=True, fetch_shares=True, cache=cache))
for id in '001', '002':
for prefix in 'AGL ', 'AGS ':
self.assertEquals(1, cache[prefix + id])
# no new extras, so another request won't fill them in
as_1 = copy.deepcopy(ACTIVITY_AS)
for field in 'replies', 'plusoners', 'resharers':
as_1['object'][field] = {'totalItems': 1}
as_2 = copy.deepcopy(as_1)
as_2['id'] = tag_uri('002')
self.assert_equals([as_1, as_2], self.googleplus.get_activities(
fetch_replies=True, fetch_likes=True, fetch_shares=True, cache=cache))
def test_get_activities_search(self):
self.init(requestBuilder=http.RequestMockBuilder({
'plus.activities.search': (None, json.dumps({'items': [ACTIVITY_GP]})),
}))
self.assert_equals([ACTIVITY_AS],
self.googleplus.get_activities(search_query='qwert'))
# TODO: resurrect?
# def test_get_activities_request_etag(self):
# self.init()
# http_seq = http.HttpMockSequence(
# [({'status': '200'}, json.dumps({'items': [item]}))])
# self.auth_entity.http = lambda: http_seq
# resp = self.googleplus.get_activities_response(
# fetch_replies=True, fetch_likes=True, fetch_shares=True)
# self.assertEquals('"my etag"', resp['etag'])
def test_get_activities_response_etag(self):
self.init(requestBuilder=http.RequestMockBuilder({
'plus.activities.list': (httplib2.Response({'status': 200}),
json.dumps({'etag': '"my etag"'})),
}))
resp = self.googleplus.get_activities_response(
fetch_replies=True, fetch_likes=True, fetch_shares=True)
self.assertEquals('"my etag"', resp['etag'])
def test_get_activities_304_not_modified(self):
"""Requests with matching ETags return 304 Not Modified."""
self.init(requestBuilder=http.RequestMockBuilder({
'plus.activities.list': (httplib2.Response({'status': 304}), '{}'),
}))
self.assert_equals([], self.googleplus.get_activities(
fetch_replies=True, fetch_likes=True, fetch_shares=True))
def test_postprocess_actor_url_field(self):
pa = self.googleplus.postprocess_actor
self.assertEqual({'foo': 'bar'}, pa({'foo': 'bar'}))
self.assertEqual({'url': 'x',
'urls': [{'value': 'x'}]},
pa({'urls': [{'value': 'x'}]}))
self.assertEqual({'url': 'x',
'urls': [{'value': 'x'}, {'value': 'y'}]},
pa({'urls': [{'value': 'x'}, {'value': 'y'}]}))
# check alias
self.assertEquals(self.googleplus.postprocess_actor,
self.googleplus.user_to_actor)
def test_get_actor_minimal(self):
self.assert_equals({'displayName': 'Bob'}, self.googleplus.get_actor())
def test_get_actor(self):
user = {
'id': '222',
'displayName': 'Alice',
'urls': [{'value': 'https://profiles.google.com/alice'}],
}
self.auth_entity.user_json = json.dumps(user)
user.update({
'id': tag_uri('222'),
'url': 'https://profiles.google.com/alice',
})
self.assert_equals(user, self.googleplus.get_actor())
def test_get_actor_other_user(self):
with self.assertRaises(NotImplementedError):
self.googleplus.get_actor('other')
def test_get_activities_extra_fetches_fail(self):
"""Sometimes the extras fetches return errors. Ignore that."""
self.init()
batch = MIMEMultipart()
for i in range(3):
msg = Message()
msg.set_payload('HTTP/1.1 500 Foo Bar\n\r\n\r\n')
msg['Content-ID'] = '<response-abc+%d>' % (i + 1)
batch.attach(msg)
# as_string() must be called before get_boundary() to generate the
# boundaries between parts, but can't be called again, so we capture the
# result.
batch_str = batch.as_string()
self.auth_entity.http = lambda: http.HttpMockSequence(
[({'status': '200'}, json.dumps({'items': [ACTIVITY_GP_EXTRAS]})),
({'status': '200',
'content-type': 'multipart/mixed; boundary="%s"' % batch.get_boundary()},
batch_str),
])
cache = util.CacheDict()
self.assert_equals([ACTIVITY_AS], self.googleplus.get_activities(
fetch_replies=True, fetch_likes=True, fetch_shares=True, cache=cache))
for prefix in 'AGC ', 'AGL ', 'AGS ':
self.assertNotIn(prefix + '001', cache)
def test_html_to_activities(self):
html = (HTML_ACTIVITIES_GP_HEADER + json.dumps(HTML_ACTIVITY_GP) +
HTML_ACTIVITIES_GP_FOOTER)
self.assert_equals([HTML_ACTIVITY_AS], self.googleplus.html_to_activities(html))
def test_html_to_activities_plusoned(self):
html_gp = copy.deepcopy(HTML_ACTIVITY_GP)
html_gp[1][6].values()[0][69] = [
202,
[['Billy Bob',
'1056789',
1,
1,
'https://lh3.googleusercontent.com/billybob.jpg',
'https://plus.google.com/+BillyBob',
'male',
]],
# ...
]
expected = copy.deepcopy(HTML_ACTIVITY_AS)
expected.update({
'verb': 'like',
'actor': {
'id': tag_uri('1056789'),
'url': 'https://plus.google.com/+BillyBob',
'objectType': 'person',
'displayName': 'Billy Bob',
'image': {'url': 'https://lh3.googleusercontent.com/billybob.jpg'},
},
})
html = (HTML_ACTIVITIES_GP_HEADER + json.dumps(html_gp) +
HTML_ACTIVITIES_GP_FOOTER)
self.assert_equals([expected], self.googleplus.html_to_activities(html))
def test_html_to_activities_similar_to_plusoned(self):
html_gp = copy.deepcopy(HTML_ACTIVITY_GP)
for data_at_69 in None, [], [None], [None, None], [None, [None]]:
html_gp[1][6].values()[0][69] = data_at_69
html = (HTML_ACTIVITIES_GP_HEADER + json.dumps(html_gp) +
HTML_ACTIVITIES_GP_FOOTER)
self.assert_equals([HTML_ACTIVITY_AS],
self.googleplus.html_to_activities(html))
def test_html_to_activities_missing_data(self):
self.assert_equals([], self.googleplus.html_to_activities(''))
| [
"git@ryanb.org"
] | git@ryanb.org |
99a04e93d0efdf9a383874eee21eb74616cb8109 | 30232fb6d21e037d73181bf89b1b938bd1bde8f5 | /BIKOD_01/line.py | 860676e8e6c383b8425ed75122c63d644dc24b86 | [] | no_license | flashgeomatics/Software_project | 3d1b58ce71fd3395a558eb593484cae51a683f5d | 7d8844845e8198d2b83452dd64dcc1ecd1f3c6bd | refs/heads/master | 2020-05-06T12:12:49.118441 | 2019-06-14T12:40:32 | 2019-06-14T12:40:32 | 180,108,923 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,961 | py | #Importing the required packages
import geopandas as gpd
import pandas as pd
from bokeh.models import ColumnDataSource, LabelSet, Select
from bokeh.plotting import figure, show, output_file
from bokeh.tile_providers import get_provider, Vendors #bokeh version 1.1
#from bokeh.tile_providers import CARTODBPOSITRON #bokeh version 1.0
from bokeh.io import curdoc
from bokeh.layouts import column, row
import math
from sqlalchemy import create_engine
engine = create_engine('postgresql://postgres:ruking29@localhost:5432/se4g')
bike = pd.read_sql_table('bike',engine)
# #FIRST GRAPH
d = pd.to_datetime(bike['time']).dt.date
bike['time'] = d
bike.rename(columns={'time':'date'}, inplace=True)
stat_names = list(bike)
del stat_names[1]
options=[]
for i in stat_names:
string = 'Station %s' %i
options.append(string)
days = []
for i in range(1,32):
days.append(str(i))
months = []
for i in range(1,13):
months.append(str(i))
curr_date = pd.to_datetime('1-1-2010')
hours = list(range(0,24))
data = ColumnDataSource({'x' : hours, 'y': list(bike[bike["date"] == curr_date.date()]['1'])})
#Create the Line plot
p = figure(title='Daily # of bikes in the station ', title_location='above', x_axis_label = 'Time(hours)', y_axis_label = '# of bikes', x_range=(1, 24))
p.vbar(x='x', top='y', source=data, width=0.6, color='red')
#p.circle(x = 'x', y = 'y', source=data, color = 'black', size = 10, alpha = 0.8)
p.title.text_color = 'black'
p.title.text_font_size = '15pt'
#Create Select Widget
select_widget_1 = Select(options = options, value = options[1],
title = 'Select a station')
select_widget_2 = Select(options =["January", "February", "March", "April", "May", "June", "July", "August","September", "October", "November", "December"], value = months[0], title = 'Select a month')
select_widget_3 = Select(options = days, value = days[0], title = 'Select a day')
def callback(attr, old, new):
column2plot = select_widget_1.value
day2plot = select_widget_3.value
month2plot = select_widget_2.value
date2plot = pd.to_datetime('2010-'+str(month2plot)+'-'+str(day2plot))
if len(column2plot) == 9:
data.data = {'x' : hours, 'y': list(bike[bike["date"] == date2plot.date()][str(column2plot[-1])])}
elif len(column2plot) == 10:
data.data = {'x' : hours, 'y': list(bike[bike["date"] == date2plot.date()][str(column2plot[-2]+column2plot[-1])])}
p.vbar(x='x', top='y', source = data, width=0.6, color='red')
#Update Select Widget to each interaction
select_widget_1.on_change('value', callback)
select_widget_2.on_change('value', callback)
select_widget_3.on_change('value', callback)
layout = column(row(column(select_widget_1, select_widget_2, select_widget_3), p))
#Output the plot
output_file("graph.html")
show(layout)
curdoc().add_root(layout)#Importing the required packages
| [
"noreply@github.com"
] | noreply@github.com |
0a5e3662691892d6bd7a535f5db74ef43571e670 | e9a9f795ae460dca5837facd73a854945d85e2e2 | /src/lib/count.py | edf2ec217dcabf55fab918cfae097b5204af18d0 | [
"Apache-2.0"
] | permissive | spongeb0b724/superl-url | c0dcc42857b3e82129c93987defe8cc92c591c05 | 42934e941f05eab3c7c31514157b94d70b437c8c | refs/heads/master | 2020-03-12T20:36:18.729683 | 2018-04-03T09:20:56 | 2018-04-03T09:20:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | # -*- coding: utf-8 -*-
# Project = https://github.com/super-l/search-url.git
# Author = superl
# Blog = www.superl.org QQ:86717375
# Team = Code Security Team(C.S.T) | 铭剑创鼎
class SupCount():
all_totals = 0
all_checked_totals = 0
all_filter_totals = 0
all_delete_totals = 0
| [
"superl@0xcode.org"
] | superl@0xcode.org |
c345b0531b13c7d148425261eb95e192498d8921 | 21488828191cbd9aa42791d1ab557b826f9bdb29 | /node_modules/uws/build/config.gypi | a50453f820eb5557210f87a54b3f018fca8b7f1d | [
"Zlib",
"MIT"
] | permissive | EricSong8612/menu | 26aa97234a5f6a282e487a8c2ead708134f646a8 | 8bce0babcd007ef424fef33fc59fc44c163a19ce | refs/heads/master | 2021-01-16T07:13:14.811401 | 2017-08-11T15:29:10 | 2017-08-11T15:29:10 | 99,977,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,882 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"coverage": "false",
"debug_devtools": "node",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_data_file": "icudt59l.dat",
"icu_data_in": "../../deps/icu-small/source/data/in/icudt59l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "deps/icu-small",
"icu_small": "true",
"icu_ver_major": "59",
"llvm_version": 0,
"node_byteorder": "little",
"node_enable_d8": "false",
"node_enable_v8_vtunejit": "false",
"node_install_npm": "true",
"node_module_version": 57,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_lttng": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"node_use_v8_platform": "true",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_no_asm": 0,
"shlib_suffix": "57.dylib",
"target_arch": "x64",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "true",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_use_snapshot": "false",
"want_separate_host_toolset": 0,
"want_separate_host_toolset_mkpeephole": 0,
"xcode_version": "7.0",
"nodedir": "/Users/JianSong/.node-gyp/8.1.4",
"standalone_static_library": 1,
"dry_run": "",
"legacy_bundling": "",
"save_dev": "",
"browser": "",
"only": "",
"viewer": "man",
"also": "",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"maxsockets": "50",
"shell": "/bin/bash",
"metrics_registry": "https://registry.npmjs.org/",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"timing": "",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"fetch_retries": "2",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"logs_max": "10",
"prefer_online": "",
"cache_lock_retries": "10",
"global_style": "",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"offline": "",
"proprietary_attribs": "true",
"searchlimit": "20",
"access": "",
"json": "",
"allow_same_version": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/JianSong/.npm-init.js",
"userconfig": "/Users/JianSong/.npmrc",
"node_version": "8.1.4",
"user": "",
"auth_type": "legacy",
"editor": "vi",
"ignore_prepublish": "",
"save": "true",
"tag": "latest",
"global": "",
"progress": "true",
"ham_it_up": "",
"optional": "true",
"searchstaleness": "900",
"bin_links": "true",
"force": "",
"save_prod": "",
"searchopts": "",
"depth": "Infinity",
"rebuild_bundle": "true",
"sso_poll_frequency": "500",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"scripts_prepend_node_path": "warn-only",
"sso_type": "oauth",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"prefer_offline": "",
"version": "",
"cache_min": "10",
"cache": "/Users/JianSong/.npm",
"searchexclude": "",
"color": "true",
"package_lock": "true",
"save_optional": "",
"user_agent": "npm/5.0.3 node/v8.1.4 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"send_metrics": "",
"init_version": "1.0.0",
"umask": "0022",
"scope": "",
"git": "git",
"init_author_name": "",
"onload_script": "",
"tmp": "/var/folders/xq/m3xlyvjs0zzckylsl97tbynr0000gn/T",
"unsafe_perm": "true",
"link": "",
"prefix": "/usr/local"
}
}
| [
"js8612@nyu.edu"
] | js8612@nyu.edu |
907f0883fb7e553f80b705bb6e6439ed7eea2d00 | 729a6ad8e10d70ae9c291304e6bcb291ff5ba93e | /toytree/utils.py | 4a18e59bcd86169a0ab3d6496797d913e0b698e1 | [
"BSD-3-Clause"
] | permissive | PhilippineDubertrand/toytree | 3774e2cfdd96f3bd8e6be328232fc61f2d711a98 | cdb57fae164f0035dc5f451e08289780deae927a | refs/heads/master | 2022-11-20T01:41:45.088289 | 2020-06-29T16:21:47 | 2020-06-29T16:21:47 | 274,166,890 | 0 | 0 | BSD-3-Clause | 2020-06-22T14:56:25 | 2020-06-22T14:56:25 | null | UTF-8 | Python | false | false | 10,195 | py | #!/usr/bin/env python
from __future__ import print_function, division, absolute_import
import re
from copy import deepcopy
import numpy as np
#######################################################
# Exception Classes
#######################################################
class ToytreeError(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class TreeError(Exception):
"A problem occurred during a TreeNode operation"
def __init__(self, value=''):
self.value = value
def __str__(self):
return repr(self.value)
# TREE FORMATS
NW_FORMAT = {
# flexible with support
# Format 0 = (A:0.35,(B:0.72,(D:0.60,G:0.12)1.00:0.64)1.00:0.56);
0: [
('name', str, True),
('dist', float, True),
('support', float, True),
('dist', float, True),
],
# flexible with internal node names
# Format 1 = (A:0.35,(B:0.72,(D:0.60,G:0.12)E:0.64)C:0.56);
1: [
('name', str, True),
('dist', float, True),
('name', str, True),
('dist', float, True),
],
# strict with support values
# Format 2 = (A:0.35,(B:0.72,(D:0.60,G:0.12)1.00:0.64)1.00:0.56);
2: [
('name', str, False),
('dist', float, False),
('support', str, False),
('dist', float, False),
],
# strict with internal node names
# Format 3 = (A:0.35,(B:0.72,(D:0.60,G:0.12)E:0.64)C:0.56);
3: [
('name', str, False),
('dist', float, False),
('name', str, False),
('dist', float, False),
],
# strict with internal node names
# Format 4 = (A:0.35,(B:0.72,(D:0.60,G:0.12)));
4: [
('name', str, False),
('dist', float, False),
(None, None, False),
(None, None, False),
],
# Format 5 = (A:0.35,(B:0.72,(D:0.60,G:0.12):0.64):0.56);
5: [
('name', str, False),
('dist', float, False),
(None, None, False),
('dist', float, False),
],
# Format 6 = (A:0.35,(B:0.72,(D:0.60,G:0.12)E)C);
6: [
('name', str, False),
(None, None, False),
(None, None, False),
('dist', float, False),
],
# Format 7 = (A,(B,(D,G)E)C);
7: [
('name', str, False),
('dist', float, False),
('name', str, False),
(None, None, False),
],
# Format 8 = (A,(B,(D,G)));
8: [
('name', str, False),
(None, None, False),
('name', str, False),
(None, None, False),
],
# Format 9 = (,(,(,)));
9: [
('name', str, False),
(None, None, False),
(None, None, False),
(None, None, False),
],
# Format 10 = ((a[&Z=1,Y=2]:1.0[&X=3], b[&Z=1,Y=2]:3.0[&X=2]):1.0[&L=1,W=0], ...
# NHX Like mrbayes NEXUS common
10: [
('name', str, True),
('dist', str, True),
('name', str, True),
('dist', str, True),
]
}
# class TreeInference:
# - get distance matrix (from an input data set... phy, nex)
# - ----- create a class to store DNA matrix (pandas colored)
# - NJ tree infer
# ------ uses distance matrix
# - UPGMA tree infer
# ------ uses distance matrix
#class TreeMoves:
# def move_spr(self):
# """
# Sub-tree pruning and Regrafting.
# Select one edge randomly from the tree and split on that edge to create
# two subtrees. Attach one of the subtrees (e.g., the smaller one)
# randomly to the larger tree to create a new node.
# ... does SPR break edges connected to root when tree is real rooted?
# """
# pass
# # On rooted trees we can work with nodes easier than edges. Start by
# # selected a node at random that is not root.
# # nodes = [i for i in self.ttree.tree.traverse() if not i.is_root()]
# # rnode = nodes[random.randint(0, len(nodes) - 1)]
# # # get all edges on the tree, skip last one which is non-real root edge
# # edges = self.ttree.tree.get_edges()[:-1]
# # # select a random edge
# # redge = edges[random.randint(0, len(edges))]
# # # break into subtrees
# # tre1 = self.tree.prune(self.tree.get_common_ancestor(redge[0]).idx)
# # tre2 = self.tree.prune(self.tree.get_common_ancestor(redge[1]).idx)
# def move_tbr(self):
# pass
# def move_nni(self):
# pass
# def non_parametric_rate_smoothing(self):
# """
# Non-parametric rate smooting.
# A method for estimating divergence times when evolutionary rates are
# variable across lineages by minimizing ancestor-descendant local rate
# changes. According to Sanderson this method is motivated by the
# likelihood that evolutionary rates are autocorrelated in time.
# returns Toytree
# """
# # p is a fixed exponent
# p = 2
# W = []
# for node in self.ttree.traverse():
# if not node.is_leaf():
# children = node.children
# ks = []
# for child in children:
# dist = abs(node.dist - child.dist)
# ks.append(dist ** p)
# W.append(sum(ks))
# # root rate is mean of all descendant rates --
# # n is the number of edges (rates) (nnodes - 1 for root)
# r_root = np.mean(W)
# rootw = []
# for child in self.ttree.tree.children:
# rootw.append((r_rroot - child.dist) ** p)
# w_root = sum(rootw)
# W.append(w_root)
# k = []
# for
# k = sum( np.exp(abs(ri - rj), p) )
# W = sum(k)
# def penalized_likelihood(...):
# pass
#
# def wfunc(ttree, p):
# ws = []
# for node in ttree.tree.traverse():
# if not node.is_leaf():
# w = sum([(node.dist - child.dist) ** p for child in node.children])
# ws.append(w)
# return sum(ws)
#######################################################
# Other
#######################################################
def bpp2newick(bppnewick):
"converts bpp newick format to normal newick. ugh."
regex1 = re.compile(r" #[-+]?[0-9]*\.?[0-9]*[:]")
regex2 = re.compile(r" #[-+]?[0-9]*\.?[0-9]*[;]")
regex3 = re.compile(r": ")
new = regex1.sub(":", bppnewick)
new = regex2.sub(";", new)
new = regex3.sub(":", new)
return new.strip()
# TODO: would be useful for (eg., root) to have option to return not mrca,
# and fuzzy match just tips, or nodes, etc...
def normalize_values(vals, nbins=10, minsize=2, maxsize=12):
"""
Distributes values into bins spaced at reasonable sizes for plotting.
Example, this can be used automatically scale Ne values to plot as
edge widths.
"""
# make copy of original
ovals = deepcopy(vals)
# if 6X min value is higher than max then add this
# as a fake value to scale more nicely
vals = list(vals)
if min(vals) * 6 > max(vals):
vals.append(min(vals) * 6)
# sorted vals list
svals = sorted(vals)
# put vals into bins
bins = np.histogram(vals, bins=nbins)[0]
# convert binned vals to widths in 2-12
newvals = {}
sizes = np.linspace(minsize, maxsize, nbins)
for idx, inbin in enumerate(bins):
for num in range(inbin):
newvals[svals.pop(0)] = sizes[idx]
return np.array([newvals[i] for i in ovals])
# def fuzzy_match_tipnames(ttree, names, wildcard, regex, mono=True, retnode=True):
def fuzzy_match_tipnames(ttree, names, wildcard, regex, mrca=True, mono=True):
"""
Used in multiple internal functions (e.g., .root()) and .drop_tips())
to select an internal mrca node, or multiple tipnames, using fuzzy matching
so that every name does not need to be written out by hand.
name: verbose list
wildcard: matching unique string
regex: regex expression
mrca: return mrca node of selected tipnames.
mono: raise error if selected tipnames are not monophyletic
"""
# require arguments
if not any([names, wildcard, regex]):
raise ToytreeError(
"must enter an outgroup, wildcard selector, or regex pattern")
# get list of **nodes** from {list, wildcard, or regex}
tips = []
if names:
if isinstance(names, (str, int)):
names = [names]
notfound = [i for i in names if i not in ttree.get_tip_labels()]
if any(notfound):
raise ToytreeError(
"Sample {} is not in the tree".format(notfound))
tips = [i for i in ttree.treenode.get_leaves() if i.name in names]
# use regex to match tipnames
elif regex:
tips = [
i for i in ttree.treenode.get_leaves() if re.match(regex, i.name)
]
if not any(tips):
raise ToytreeError("No Samples matched the regular expression")
# use wildcard substring matching
elif wildcard:
tips = [i for i in ttree.treenode.get_leaves() if wildcard in i.name]
if not any(tips):
raise ToytreeError("No Samples matched the wildcard")
# build list of **tipnames** from matched nodes
if not tips:
raise ToytreeError("no matching tipnames")
tipnames = [i.name for i in tips]
# if a single tipname matched no need to check for monophyly
if len(tips) == 1:
if mrca:
return tips[0]
else:
return tipnames
# if multiple nodes matched, check if they're monophyletic
mbool, mtype, mnames = (
ttree.treenode.check_monophyly(
tipnames, "name", ignore_missing=True)
)
# get mrca node
node = ttree.treenode.get_common_ancestor(tips)
# raise an error if required to be monophyletic but not
if mono:
if not mbool:
raise ToytreeError(
"Taxon list cannot be paraphyletic")
# return tips or nodes
if not mrca:
return tipnames
else:
return node
| [
"de2356@columbia.edu"
] | de2356@columbia.edu |
bd0209b90473b52098a42dc9de3dc5869fe1ef18 | 7f57a990b390ddc05a9033a9414459a1a353e751 | /crawling/migrations/0007_auto_20201009_1313.py | 5c428fffacebee86c150832fe038eb4b29dd8c4c | [] | no_license | acardiav/telegram_chatbot | 18f360b3e3ccef44f2290e16fd0bab866d67f304 | 1f3e64381a0ac2803060b85ac41f9bb22bda3ca5 | refs/heads/master | 2023-01-01T20:56:55.640272 | 2020-10-22T01:35:32 | 2020-10-22T01:35:32 | 305,295,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | # Generated by Django 3.0.10 on 2020-10-09 04:13
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('crawling', '0006_merge_20201009_1259'),
]
operations = [
migrations.AlterModelTable(
name='post',
table='test',
),
]
| [
"acardiav@gmail.com"
] | acardiav@gmail.com |
3179b52f9eae9dc675a92b6b50fc961d0a7fb9f0 | 3881c6471d5017589f5203016b886549c2cdacc5 | /datasets/superb/superb.py | abbb7bf765b7ba3e3dd43245c66fd9a5174b6137 | [
"Apache-2.0"
] | permissive | SebastinSanty/datasets | 73f0dc9981d2ce68ce233ba732d48d6637d0d6ae | 99403fc639665497219ad1b74b7a93ff97e3f5aa | refs/heads/master | 2023-08-30T01:11:43.355033 | 2021-11-15T14:45:22 | 2021-11-15T14:45:22 | 428,611,292 | 0 | 0 | Apache-2.0 | 2021-11-16T10:31:07 | 2021-11-16T10:31:05 | null | UTF-8 | Python | false | false | 30,236 | py | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""SUPERB: Speech processing Universal PERformance Benchmark."""
import csv
import glob
import os
import textwrap
from dataclasses import dataclass
import datasets
from datasets.tasks import AutomaticSpeechRecognition
_CITATION = """\
@article{DBLP:journals/corr/abs-2105-01051,
author = {Shu{-}Wen Yang and
Po{-}Han Chi and
Yung{-}Sung Chuang and
Cheng{-}I Jeff Lai and
Kushal Lakhotia and
Yist Y. Lin and
Andy T. Liu and
Jiatong Shi and
Xuankai Chang and
Guan{-}Ting Lin and
Tzu{-}Hsien Huang and
Wei{-}Cheng Tseng and
Ko{-}tik Lee and
Da{-}Rong Liu and
Zili Huang and
Shuyan Dong and
Shang{-}Wen Li and
Shinji Watanabe and
Abdelrahman Mohamed and
Hung{-}yi Lee},
title = {{SUPERB:} Speech processing Universal PERformance Benchmark},
journal = {CoRR},
volume = {abs/2105.01051},
year = {2021},
url = {https://arxiv.org/abs/2105.01051},
archivePrefix = {arXiv},
eprint = {2105.01051},
timestamp = {Thu, 01 Jul 2021 13:30:22 +0200},
biburl = {https://dblp.org/rec/journals/corr/abs-2105-01051.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
"""
_DESCRIPTION = """\
Self-supervised learning (SSL) has proven vital for advancing research in
natural language processing (NLP) and computer vision (CV). The paradigm
pretrains a shared model on large volumes of unlabeled data and achieves
state-of-the-art (SOTA) for various tasks with minimal adaptation. However, the
speech processing community lacks a similar setup to systematically explore the
paradigm. To bridge this gap, we introduce Speech processing Universal
PERformance Benchmark (SUPERB). SUPERB is a leaderboard to benchmark the
performance of a shared model across a wide range of speech processing tasks
with minimal architecture changes and labeled data. Among multiple usages of the
shared model, we especially focus on extracting the representation learned from
SSL due to its preferable re-usability. We present a simple framework to solve
SUPERB tasks by learning task-specialized lightweight prediction heads on top of
the frozen shared model. Our results demonstrate that the framework is promising
as SSL representations show competitive generalizability and accessibility
across SUPERB tasks. We release SUPERB as a challenge with a leaderboard and a
benchmark toolkit to fuel the research in representation learning and general
speech processing.
Note that in order to limit the required storage for preparing this dataset, the
audio is stored in the .wav format and is not converted to a float32 array. To
convert the audio file to a float32 array, please make use of the `.map()`
function as follows:
```python
import soundfile as sf
def map_to_array(batch):
speech_array, _ = sf.read(batch["file"])
batch["speech"] = speech_array
return batch
dataset = dataset.map(map_to_array, remove_columns=["file"])
```
"""
class SuperbConfig(datasets.BuilderConfig):
"""BuilderConfig for Superb."""
def __init__(
self,
features,
url,
data_url=None,
supervised_keys=None,
task_templates=None,
**kwargs,
):
super().__init__(version=datasets.Version("1.9.0", ""), **kwargs)
self.features = features
self.data_url = data_url
self.url = url
self.supervised_keys = supervised_keys
self.task_templates = task_templates
class Superb(datasets.GeneratorBasedBuilder):
"""Superb dataset."""
BUILDER_CONFIGS = [
SuperbConfig(
name="asr",
description=textwrap.dedent(
"""\
ASR transcribes utterances into words. While PR analyzes the
improvement in modeling phonetics, ASR reflects the significance of
the improvement in a real-world scenario. LibriSpeech
train-clean-100/dev-clean/test-clean subsets are used for
training/validation/testing. The evaluation metric is word error
rate (WER)."""
),
features=datasets.Features(
{
"file": datasets.Value("string"),
"audio": datasets.features.Audio(sampling_rate=16_000),
"text": datasets.Value("string"),
"speaker_id": datasets.Value("int64"),
"chapter_id": datasets.Value("int64"),
"id": datasets.Value("string"),
}
),
supervised_keys=("file", "text"),
url="http://www.openslr.org/12",
data_url="http://www.openslr.org/resources/12/",
task_templates=[AutomaticSpeechRecognition(audio_file_path_column="file", transcription_column="text")],
),
SuperbConfig(
name="ks",
description=textwrap.dedent(
"""\
Keyword Spotting (KS) detects preregistered keywords by classifying utterances into a predefined set of
words. The task is usually performed on-device for the fast response time. Thus, accuracy, model size, and
inference time are all crucial. SUPERB uses the widely used Speech Commands dataset v1.0 for the task.
The dataset consists of ten classes of keywords, a class for silence, and an unknown class to include the
false positive. The evaluation metric is accuracy (ACC)"""
),
features=datasets.Features(
{
"file": datasets.Value("string"),
"audio": datasets.features.Audio(sampling_rate=16_000),
"label": datasets.ClassLabel(
names=[
"yes",
"no",
"up",
"down",
"left",
"right",
"on",
"off",
"stop",
"go",
"_silence_",
"_unknown_",
]
),
}
),
supervised_keys=("file", "label"),
url="https://www.tensorflow.org/datasets/catalog/speech_commands",
data_url="http://download.tensorflow.org/data/{filename}",
),
SuperbConfig(
name="ic",
description=textwrap.dedent(
"""\
Intent Classification (IC) classifies utterances into predefined classes to determine the intent of
speakers. SUPERB uses the Fluent Speech Commands dataset, where each utterance is tagged with three intent
labels: action, object, and location. The evaluation metric is accuracy (ACC)."""
),
features=datasets.Features(
{
"file": datasets.Value("string"),
"audio": datasets.features.Audio(sampling_rate=16_000),
"speaker_id": datasets.Value("string"),
"text": datasets.Value("string"),
"action": datasets.ClassLabel(
names=["activate", "bring", "change language", "deactivate", "decrease", "increase"]
),
"object": datasets.ClassLabel(
names=[
"Chinese",
"English",
"German",
"Korean",
"heat",
"juice",
"lamp",
"lights",
"music",
"newspaper",
"none",
"shoes",
"socks",
"volume",
]
),
"location": datasets.ClassLabel(names=["bedroom", "kitchen", "none", "washroom"]),
}
),
supervised_keys=None,
url="https://fluent.ai/fluent-speech-commands-a-dataset-for-spoken-language-understanding-research/",
data_url="http://fluent.ai:2052/jf8398hf30f0381738rucj3828chfdnchs.tar.gz",
),
SuperbConfig(
name="si",
description=textwrap.dedent(
"""\
Speaker Identification (SI) classifies each utterance for its speaker identity as a multi-class
classification, where speakers are in the same predefined set for both training and testing. The widely
used VoxCeleb1 dataset is adopted, and the evaluation metric is accuracy (ACC)."""
),
features=datasets.Features(
{
"file": datasets.Value("string"),
"audio": datasets.features.Audio(sampling_rate=16_000),
# VoxCeleb1 contains 1251 speaker IDs in range ["id10001",..."id11251"]
"label": datasets.ClassLabel(names=[f"id{i + 10001}" for i in range(1251)]),
}
),
supervised_keys=("file", "label"),
url="https://www.robots.ox.ac.uk/~vgg/data/voxceleb/vox1.html",
),
SuperbConfig(
name="sd",
description=textwrap.dedent(
"""\
Speaker Diarization (SD) predicts `who is speaking when` for each timestamp, and multiple speakers can
speak simultaneously. The model has to encode rich speaker characteristics for each frame and should be
able to represent mixtures of signals. [LibriMix] is adopted where LibriSpeech
train-clean-100/dev-clean/test-clean are used to generate mixtures for training/validation/testing.
We focus on the two-speaker scenario as the first step. The time-coded speaker labels were generated using
alignments from Kaldi LibriSpeech ASR model. The evaluation metric is diarization error rate (DER)."""
),
features=datasets.Features(
{
"record_id": datasets.Value("string"),
"file": datasets.Value("string"),
"audio": datasets.features.Audio(sampling_rate=16_000),
"start": datasets.Value("int64"),
"end": datasets.Value("int64"),
"speakers": [
{
"speaker_id": datasets.Value("string"),
"start": datasets.Value("int64"),
"end": datasets.Value("int64"),
}
],
}
), # TODO
supervised_keys=None, # TODO
url="https://github.com/ftshijt/LibriMix",
data_url="https://huggingface.co/datasets/superb/superb-data/resolve/main/sd/{split}/{filename}",
),
SuperbConfig(
name="er",
description=textwrap.dedent(
"""\
Emotion Recognition (ER) predicts an emotion class for each utterance. The most widely used ER dataset
IEMOCAP is adopted, and we follow the conventional evaluation protocol: we drop the unbalanced emotion
classes to leave the final four classes with a similar amount of data points and cross-validate on five
folds of the standard splits. The evaluation metric is accuracy (ACC)."""
),
features=datasets.Features(
{
"file": datasets.Value("string"),
"audio": datasets.features.Audio(sampling_rate=16_000),
"label": datasets.ClassLabel(names=["neu", "hap", "ang", "sad"]),
}
),
supervised_keys=("file", "label"),
url="https://sail.usc.edu/iemocap/",
),
]
@property
def manual_download_instructions(self):
if self.config.name == "si":
return textwrap.dedent(
"""
Please download the VoxCeleb dataset using the following script,
which should create `VoxCeleb1/wav/id*` directories for both train and test speakers`:
```
mkdir VoxCeleb1
cd VoxCeleb1
wget https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partaa
wget https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partab
wget https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partac
wget https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partad
cat vox1_dev* > vox1_dev_wav.zip
unzip vox1_dev_wav.zip
wget https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_test_wav.zip
unzip vox1_test_wav.zip
# download the official SUPERB train-dev-test split
wget https://raw.githubusercontent.com/s3prl/s3prl/master/s3prl/downstream/voxceleb1/veri_test_class.txt
```"""
)
elif self.config.name == "er":
return textwrap.dedent(
"""
Please download the IEMOCAP dataset after submitting the request form here:
https://sail.usc.edu/iemocap/iemocap_release.htm
Having downloaded the dataset you can extract it with `tar -xvzf IEMOCAP_full_release.tar.gz`
which should create a folder called `IEMOCAP_full_release`
"""
)
return None
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=self.config.features,
supervised_keys=self.config.supervised_keys,
homepage=self.config.url,
citation=_CITATION,
task_templates=self.config.task_templates,
)
def _split_generators(self, dl_manager):
if self.config.name == "asr":
_DL_URLS = {
"dev": self.config.data_url + "dev-clean.tar.gz",
"test": self.config.data_url + "test-clean.tar.gz",
"train": self.config.data_url + "train-clean-100.tar.gz",
}
archive_path = dl_manager.download_and_extract(_DL_URLS)
return [
datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"archive_path": archive_path["train"]}),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION, gen_kwargs={"archive_path": archive_path["dev"]}
),
datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path["test"]}),
]
elif self.config.name == "ks":
_DL_URLS = {
"train_val_test": self.config.data_url.format(filename="speech_commands_v0.01.tar.gz"),
"test": self.config.data_url.format(filename="speech_commands_test_set_v0.01.tar.gz"),
}
archive_path = dl_manager.download_and_extract(_DL_URLS)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={"archive_path": archive_path["train_val_test"], "split": "train"},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
gen_kwargs={"archive_path": archive_path["train_val_test"], "split": "val"},
),
datasets.SplitGenerator(
name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path["test"], "split": "test"}
),
]
elif self.config.name == "ic":
archive_path = dl_manager.download_and_extract(self.config.data_url)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={"archive_path": archive_path, "split": "train"},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
gen_kwargs={"archive_path": archive_path, "split": "valid"},
),
datasets.SplitGenerator(
name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path, "split": "test"}
),
]
elif self.config.name == "si":
manual_dir = os.path.abspath(os.path.expanduser(dl_manager.manual_dir))
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={"archive_path": manual_dir, "split": 1},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
gen_kwargs={"archive_path": manual_dir, "split": 2},
),
datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"archive_path": manual_dir, "split": 3}),
]
elif self.config.name == "sd":
splits = ["train", "dev", "test"]
_DL_URLS = {
split: {
filename: self.config.data_url.format(split=split, filename=filename)
for filename in ["reco2dur", "segments", "utt2spk", "wav.zip"]
}
for split in splits
}
archive_path = dl_manager.download_and_extract(_DL_URLS)
return [
datasets.SplitGenerator(
name=datasets.NamedSplit(split), gen_kwargs={"archive_path": archive_path[split], "split": split}
)
for split in splits
]
elif self.config.name == "er":
manual_dir = os.path.abspath(os.path.expanduser(dl_manager.manual_dir))
return [
datasets.SplitGenerator(
name=f"session{i}",
gen_kwargs={"archive_path": manual_dir, "split": i},
)
for i in range(1, 6)
]
def _generate_examples(self, archive_path, split=None):
"""Generate examples."""
if self.config.name == "asr":
transcripts_glob = os.path.join(archive_path, "LibriSpeech", "*", "*", "*", "*.txt")
key = 0
for transcript_path in sorted(glob.glob(transcripts_glob)):
transcript_dir_path = os.path.dirname(transcript_path)
with open(transcript_path, "r", encoding="utf-8") as f:
for line in f:
line = line.strip()
id_, transcript = line.split(" ", 1)
audio_file = f"{id_}.flac"
speaker_id, chapter_id = [int(el) for el in id_.split("-")[:2]]
audio_path = os.path.join(transcript_dir_path, audio_file)
yield key, {
"id": id_,
"speaker_id": speaker_id,
"chapter_id": chapter_id,
"file": audio_path,
"audio": audio_path,
"text": transcript,
}
key += 1
elif self.config.name == "ks":
words = ["yes", "no", "up", "down", "left", "right", "on", "off", "stop", "go"]
splits = _split_ks_files(archive_path, split)
for key, audio_file in enumerate(sorted(splits[split])):
base_dir, file_name = os.path.split(audio_file)
_, word = os.path.split(base_dir)
if word in words:
label = word
elif word == "_silence_" or word == "_background_noise_":
label = "_silence_"
else:
label = "_unknown_"
yield key, {"file": audio_file, "audio": audio_file, "label": label}
elif self.config.name == "ic":
root_path = os.path.join(archive_path, "fluent_speech_commands_dataset")
csv_path = os.path.join(root_path, "data", f"{split}_data.csv")
with open(csv_path, encoding="utf-8") as csv_file:
csv_reader = csv.reader(csv_file, delimiter=",", skipinitialspace=True)
next(csv_reader)
for row in csv_reader:
key, file_path, speaker_id, text, action, object_, location = row
audio_path = os.path.join(root_path, file_path)
yield key, {
"file": audio_path,
"audio": audio_path,
"speaker_id": speaker_id,
"text": text,
"action": action,
"object": object_,
"location": location,
}
elif self.config.name == "si":
wav_path = os.path.join(archive_path, "wav")
splits_path = os.path.join(archive_path, "veri_test_class.txt")
with open(splits_path, "r", encoding="utf-8") as f:
for key, line in enumerate(f):
split_id, file_path = line.strip().split(" ")
if int(split_id) != split:
continue
speaker_id = file_path.split("/")[0]
audio_path = os.path.join(wav_path, file_path)
yield key, {
"file": audio_path,
"audio": audio_path,
"label": speaker_id,
}
elif self.config.name == "sd":
data = SdData(archive_path)
args = SdArgs()
chunk_indices = _generate_chunk_indices(data, args, split=split)
if split != "test":
for key, (rec, st, ed) in enumerate(chunk_indices):
speakers = _get_speakers(rec, data, args)
yield key, {
"record_id": rec,
"file": data.wavs[rec],
"audio": data.wavs[rec],
"start": st,
"end": ed,
"speakers": speakers,
}
else:
key = 0
for rec in chunk_indices:
for rec, st, ed in chunk_indices[rec]:
speakers = _get_speakers(rec, data, args)
yield key, {
"record_id": rec,
"file": data.wavs[rec],
"audio": data.wavs[rec],
"start": st,
"end": ed,
"speakers": speakers,
}
key += 1
elif self.config.name == "er":
root_path = os.path.join(archive_path, f"Session{split}")
wav_path = os.path.join(root_path, "sentences", "wav")
labels_path = os.path.join(root_path, "dialog", "EmoEvaluation", "*.txt")
emotions = ["neu", "hap", "ang", "sad", "exc"]
key = 0
for labels_file in sorted(glob.glob(labels_path)):
with open(labels_file, "r", encoding="utf-8") as f:
for line in f:
if line[0] != "[":
continue
_, filename, emo, _ = line.split("\t")
if emo not in emotions:
continue
wav_subdir = filename.rsplit("_", 1)[0]
filename = f"{filename}.wav"
audio_path = os.path.join(wav_path, wav_subdir, filename)
yield key, {
"file": audio_path,
"audio": audio_path,
"label": emo.replace("exc", "hap"),
}
key += 1
class SdData:
def __init__(self, data_dir):
"""Load sd data."""
self.segments = self._load_segments_rechash(data_dir["segments"])
self.utt2spk = self._load_utt2spk(data_dir["utt2spk"])
self.wavs = self._load_wav_zip(data_dir["wav.zip"])
self.reco2dur = self._load_reco2dur(data_dir["reco2dur"])
def _load_segments_rechash(self, segments_file):
"""Load segments file as dict with recid index."""
ret = {}
if not os.path.exists(segments_file):
return None
with open(segments_file, encoding="utf-8") as f:
for line in f:
utt, rec, st, et = line.strip().split()
if rec not in ret:
ret[rec] = []
ret[rec].append({"utt": utt, "st": float(st), "et": float(et)})
return ret
def _load_wav_zip(self, wav_zip):
"""Return dictionary { rec: wav_rxfilename }."""
wav_dir = os.path.join(wav_zip, "wav")
return {
os.path.splitext(filename)[0]: os.path.join(wav_dir, filename) for filename in sorted(os.listdir(wav_dir))
}
def _load_utt2spk(self, utt2spk_file):
"""Returns dictionary { uttid: spkid }."""
with open(utt2spk_file, encoding="utf-8") as f:
lines = [line.strip().split(None, 1) for line in f]
return {x[0]: x[1] for x in lines}
def _load_reco2dur(self, reco2dur_file):
"""Returns dictionary { recid: duration }."""
if not os.path.exists(reco2dur_file):
return None
with open(reco2dur_file, encoding="utf-8") as f:
lines = [line.strip().split(None, 1) for line in f]
return {x[0]: float(x[1]) for x in lines}
@dataclass
class SdArgs:
chunk_size: int = 2000
frame_shift: int = 160
subsampling: int = 1
label_delay: int = 0
num_speakers: int = 2
rate: int = 16000
use_last_samples: bool = True
def _generate_chunk_indices(data, args, split=None):
chunk_indices = [] if split != "test" else {}
# make chunk indices: filepath, start_frame, end_frame
for rec in data.wavs:
data_len = int(data.reco2dur[rec] * args.rate / args.frame_shift)
data_len = int(data_len / args.subsampling)
if split == "test":
chunk_indices[rec] = []
if split != "test":
for st, ed in _gen_frame_indices(
data_len,
args.chunk_size,
args.chunk_size,
args.use_last_samples,
label_delay=args.label_delay,
subsampling=args.subsampling,
):
chunk_indices.append((rec, st * args.subsampling, ed * args.subsampling))
else:
for st, ed in _gen_chunk_indices(data_len, args.chunk_size):
chunk_indices[rec].append((rec, st * args.subsampling, ed * args.subsampling))
return chunk_indices
def _count_frames(data_len, size, step):
# no padding at edges, last remaining samples are ignored
return int((data_len - size + step) / step)
def _gen_frame_indices(data_length, size=2000, step=2000, use_last_samples=False, label_delay=0, subsampling=1):
i = -1
for i in range(_count_frames(data_length, size, step)):
yield i * step, i * step + size
if use_last_samples and i * step + size < data_length:
if data_length - (i + 1) * step - subsampling * label_delay > 0:
yield (i + 1) * step, data_length
def _gen_chunk_indices(data_len, chunk_size):
step = chunk_size
start = 0
while start < data_len:
end = min(data_len, start + chunk_size)
yield start, end
start += step
def _get_speakers(rec, data, args):
return [
{
"speaker_id": data.utt2spk[segment["utt"]],
"start": round(segment["st"] * args.rate / args.frame_shift),
"end": round(segment["et"] * args.rate / args.frame_shift),
}
for segment in data.segments[rec]
]
def _split_ks_files(archive_path, split):
audio_path = os.path.join(archive_path, "**", "*.wav")
audio_paths = glob.glob(audio_path)
if split == "test":
# use all available files for the test archive
return {"test": audio_paths}
val_list_file = os.path.join(archive_path, "validation_list.txt")
test_list_file = os.path.join(archive_path, "testing_list.txt")
with open(val_list_file, encoding="utf-8") as f:
val_paths = f.read().strip().splitlines()
val_paths = [os.path.join(archive_path, p) for p in val_paths]
with open(test_list_file, encoding="utf-8") as f:
test_paths = f.read().strip().splitlines()
test_paths = [os.path.join(archive_path, p) for p in test_paths]
# the paths for the train set is just whichever paths that do not exist in
# either the test or validation splits
train_paths = list(set(audio_paths) - set(val_paths) - set(test_paths))
return {"train": train_paths, "val": val_paths}
| [
"noreply@github.com"
] | noreply@github.com |
93dc5c3a9db14864da78ac12366778f18d0c1263 | b289a5076e06a24064526569086644f6383587c4 | /projetofinanceiro/appfinanceiro/apps.py | 1fec721d51e98309f6b4f627541b2729ccc1f5a5 | [] | no_license | Rubensrvsc/Programacao-WEB | d2eb36d7364736fdb93981b549e139d79e048310 | e38f3a809a0aa244f32f053ed9aa45c7e8586b5e | refs/heads/master | 2020-03-29T12:59:25.098325 | 2019-01-02T19:49:42 | 2019-01-02T19:49:42 | 149,933,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | from django.apps import AppConfig
class AppfinanceiroConfig(AppConfig):
name = 'appfinanceiro'
| [
"Rubensspfc100@gmail.com"
] | Rubensspfc100@gmail.com |
bb86bd392aeaae885574fab7e2cc24a1371fecd2 | b7dc9efcbc9a2bbec3020effb9236d66282d020c | /roboticarm/__init__.py | 188134e55956717996d540ae2e459a8150ff8ff3 | [] | no_license | skarkalas/roboticarm | 3abd157f36409a24311616ce92f70fbbe9203f4f | ce8884bf25541a005f582cf19da81c0494eb85ac | refs/heads/master | 2021-01-16T20:07:03.282072 | 2017-08-19T11:58:58 | 2017-08-19T11:58:58 | 100,196,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 56 | py | from roboarm import RoboArm
from wiimote import Wiimote
| [
"sokratis.karkalas@gmail.com"
] | sokratis.karkalas@gmail.com |
75a1c7bfd7129ce55f5eba80d259be9cc3f58c32 | d4cd2476f8fa8a7d94e183a68bd0678971310c5b | /checkio/05_Alice_in_Wonderland/01_Alice_05_DigitDoublets.py | 93be0ef309f0753e3758c5c296e1049c4e7b3414 | [] | no_license | gwqw/LessonsSolution | b495579f6d5b483c30d290bfa8ef0a2e29515985 | 0b841b1ae8867890fe06a5f0dcee63db9a3319a3 | refs/heads/master | 2020-07-05T19:15:53.758725 | 2019-10-01T11:34:44 | 2019-10-01T11:34:44 | 202,744,145 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,207 | py | # check if nums differs only by one digit
def isOneDiff(n1, n2):
n1 = str(n1)
n2 = str(n2)
diffcount = 0
for i in range(len(n1)):
if n1[i] != n2[i]: diffcount += 1
if diffcount > 1: return False
return (diffcount == 1)
# find next nums in list
def findnext(numbers):
first_num = numbers[0]
next_nums = []
for n in numbers[1:]:
if isOneDiff(n, first_num):
next_nums.append(n)
return next_nums
# move next number to second position
def regroupList(numbers, snum):
i = numbers.index(snum)
reslst = numbers[:]
n = reslst[i]
reslst[i] = reslst[1]
reslst[1] = n
return reslst
# construct all trees
def constrTree(numbers):
#print("inp_nums= ", numbers)
res_tree = []
isFinal = len(numbers) == 2
finalNum = numbers[-1]
# find next and form tree
next_nums = findnext(numbers)
#print("next_nums= ", next_nums)
for n in next_nums:
if n == finalNum:
#print("find final")
res_tree.append([numbers[0], n])
break
elif not isFinal:
lst = regroupList(numbers, n)
tmptree = constrTree(lst[1:])
for t in tmptree:
t.insert(0, numbers[0])
res_tree.append(t)
return res_tree
# find the shortest tree
def findShortest(trees):
short_len = 100000
short_tree = []
for t in trees:
if len(t) < short_len:
short_len = len(t)
short_tree = t
return short_tree
def checkio(numbers):
print("input_tree= ", numbers)
res_trees = constrTree(numbers)
print("res_trees= ", res_trees)
short_tree = findShortest(res_trees)
print("short_tree= ", short_tree)
return short_tree
#These "asserts" using only for self-checking and not necessary for auto-testing
if __name__ == '__main__':
assert checkio([123, 991, 323, 321, 329, 121, 921, 125, 999]) == [123, 121, 921, 991, 999], "First"
assert checkio([111, 222, 333, 444, 555, 666, 121, 727, 127, 777]) == [111, 121, 127, 727, 777], "Second"
assert checkio([456, 455, 454, 356, 656, 654]) == [456, 454, 654], "Third, [456, 656, 654] is correct too"
| [
"="
] | = |
0c97b72236200ab4983b904865a9cc78a9c4a3bd | 295b94e0e1be3ddf1d17d5c7c8fc899bf8385d63 | /Generator/models.py | 33ea404ba75168144ff5db7eabcdfd3dc6f8377f | [] | no_license | NavenAllen/Question-Banks-Generator | 4e4b235cd451798a4401e2010d14d95939f81961 | 97841c39a1fc5ecd4e8e573eb2b9cbd909ce5a5f | refs/heads/master | 2020-03-08T03:17:41.830076 | 2018-04-11T20:38:49 | 2018-04-11T20:38:49 | 127,886,562 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py | from django.db import models
from django.forms import ModelForm
class Upload(models.Model):
pic = models.FileField(upload_to="images/")
upload_date=models.DateTimeField(auto_now_add =True)
# FileUpload form class.
class UploadForm(ModelForm):
class Meta:
model = Upload
fields = ('pic',)
| [
"naven1999@gmail.com"
] | naven1999@gmail.com |
8768faa5431569743e0a31b1002db656d70a142c | 6fdb4eaf5b0e6dbd7db4bf947547541e9aebf110 | /shared-data/python/tests/errors/__init__.py | 8b858a24b392381b87b32f4c5db9f32be4fbee49 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | Opentrons/opentrons | 874321e01149184960eeaeaa31b1d21719a1ceda | 026b523c8c9e5d45910c490efb89194d72595be9 | refs/heads/edge | 2023-09-02T02:51:49.579906 | 2023-08-31T16:02:45 | 2023-08-31T16:02:45 | 38,644,841 | 326 | 174 | Apache-2.0 | 2023-09-14T21:47:20 | 2015-07-06T20:41:01 | Python | UTF-8 | Python | false | false | 43 | py | """Tests for shared-data global errors."""
| [
"noreply@github.com"
] | noreply@github.com |
11c43d634df186462fbdd367e52b5f01578ff910 | b3f7b53a6c0f9abb4b5947f490abc962855eedd8 | /member/migrations/0001_initial.py | 359549a906d1ec930c565b02715f9b4bff3a8519 | [] | no_license | 17611165193/shiqing | e43dfd9640451e83fa4fc0d0c056a04746720766 | e4f8949f9c8b8578d21106da647524d091827484 | refs/heads/master | 2022-12-12T18:12:26.312807 | 2018-09-18T06:44:20 | 2018-09-18T06:44:20 | 149,234,968 | 0 | 0 | null | 2022-12-08T02:48:14 | 2018-09-18T05:44:13 | Python | UTF-8 | Python | false | false | 933 | py | # Generated by Django 2.1 on 2018-09-07 06:05
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, null=True, verbose_name='姓名')),
('password', models.CharField(max_length=50, null=True, verbose_name='用户密码')),
('mailbox', models.EmailField(max_length=20, null=True, verbose_name='邮箱')),
('phone', models.IntegerField(max_length=20, null=True, verbose_name='手机号码')),
('created_at', models.DateTimeField(auto_now_add=True, null=True, verbose_name='创建时间')),
],
),
]
| [
"liuwei19990123@163.com"
] | liuwei19990123@163.com |
b3e461cea550883ae63c8977bc70ae4e86235418 | 68f04ff1df8dc61636db7a015b752e313ca21dfa | /PythonBootCamp/selectionsort.py | 00ed457b6e9b914bf79412dade261b4d646b1fe8 | [] | no_license | himanshusoni30/PythonProjects | 5497352055aaf53b5ebda2c98651a6a5763ef496 | 239130a97d74596e3a4ca4c3566ee2b0156f7418 | refs/heads/master | 2022-12-20T00:13:17.585447 | 2020-09-18T19:11:38 | 2020-09-18T19:11:38 | 296,708,644 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | '''Selection Sort in arrays (list)'''
def sortAscending(arr):
for i in range(0,len(arr)-1):
for j in range(i,len(arr)):
if arr[i] > arr[j]:
arr[i] = arr[i] + arr[j]
arr[j] = arr[i] - arr[j]
arr[i] = arr[i] - arr[j]
# return arr
def sortDescending(arr):
for i in range(0,len(arr)-1):
for j in range(i,len(arr)):
if arr[i] < arr[j]:
arr[i] = arr[i] + arr[j]
arr[j] = arr[i] - arr[j]
arr[i] = arr[i] - arr[j]
# return arr
def printSortedArray(arr):
print(arr)
arr = [17, 25, 31, 13, 2, 32, 65, 100, 2000]
print("Array before sorting: ")
print(arr)
sortAscending(arr)
print("Array after sorting in ascending order: ")
printSortedArray(arr)
sortDescending(arr)
print("Array after sorting in descending order: ")
printSortedArray(arr) | [
"eng.sonihimanshu@gmail.com"
] | eng.sonihimanshu@gmail.com |
64105f427369003eb4056a2e87bd1dab94884668 | 8fea1939599995000b87f3c192244b8a00b168c9 | /python/shangwubu/shangwubu/spiders/shangwubu_news.py | 5b3fb351dddb29e25b5794097b87a4893b8f96b6 | [] | no_license | syd359/nlpwidg | 3d177dbfd61b71cb897af7d9c3e3686c64885672 | d7e8647d35b800003c10c74ab72114613baaebd0 | refs/heads/master | 2020-03-17T14:32:08.492487 | 2018-05-19T10:37:24 | 2018-05-19T10:37:24 | 133,675,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,422 | py | import scrapy
import re
from shangwubu.items import ShangwubuItem
from robobrowser import RoboBrowser
import jieba
class ShangwubuSpider(scrapy.Spider):
name = "shangwubu_news"
start_urls = [
'http://www.mofcom.gov.cn/article/ae/ai/?'
]
allowed_domains = [
'mofcom.gov.cn'
]
# browser = RoboBrowser(history=True)
# browser.open('http://www.mofcom.gov.cn/article/ae/ai/?')
# response = browser.response
def parse(self, response):
'''
1. title
2. post_time
3. url
4. content
5. keywords ???
'''
# browser = RoboBrowser(history=True)
# browser.open(self.start_urls[0])
# self.response = browser.response.text
# print(response)
for el in response.css('div.listBox li'):
item = ShangwubuItem()
item['title'] = el.css('a::text').extract_first()
item['post_time'] = el.css('span::text').extract_first()
url = el.css('a::attr(href)').extract_first()
if url:
item['url'] = 'http://www.mofcom.gov.cn/' + url
else:
item['url'] = url
content_page = el.css('a::attr(href)').extract_first()
content_page_url = response.urljoin(content_page)
yield scrapy.Request(content_page_url, meta={'item': item}, callback=self.parse_content)
# next_page
next_page_number = response.css('div.listBox script::text').extract_first()
pattern = 'currentpage = "(.*?)";'
next_page = int(re.findall(pattern, next_page_number)[0]) + 1
if next_page < 201:
url = 'http://www.mofcom.gov.cn/article/ae/ai/?' + str(next_page)
next_page_url = response.urljoin(url)
yield scrapy.Request(next_page_url, callback=self.parse)
def parse_content(self, response):
'''
1. category
2. content
'''
# item = response.meta['item']
# x = response.xpath('//script[@type="text/javascript"]/text()').extract()
# target = re.findall(x, "var contype = (.*?);")
# item['category'] = target
item = response.meta['item']
item['content'] = response.css('div.artCon P::text').extract()
yield item
| [
"siyudong359@gmail.com"
] | siyudong359@gmail.com |
d7df6a4d66ed2fa92ca477942ec9176c1f23591a | f5f771cd8600c2aeb7fc9b192d9084ec5fdf3616 | /lux/extensions/odm/mapper.py | ef04cc0a9b43586b1fb4efb156df2f1e77bd748a | [
"BSD-3-Clause"
] | permissive | SirZazu/lux | 75fe9fde4ddaee1c9c17e55c6e6d07a289ea2f5b | d647c34d11d1172d40e16b6afaba4ee67950fb5a | refs/heads/master | 2021-01-21T19:40:46.536485 | 2015-06-02T16:30:18 | 2015-06-02T16:30:18 | 36,931,033 | 0 | 3 | null | 2015-10-09T14:08:26 | 2015-06-05T12:15:21 | Python | UTF-8 | Python | false | false | 11,508 | py | import re
import os
import logging
from copy import copy
from contextlib import contextmanager
from inspect import ismodule
from importlib import import_module
from itertools import chain
from sqlalchemy import MetaData, Table, inspect, event, exc
from sqlalchemy.engine import create_engine
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.ext.declarative import DeclarativeMeta
from sqlalchemy.orm.session import Session
from pulsar import ImproperlyConfigured
from pulsar.apps.data import Store, create_store
_camelcase_re = re.compile(r'([A-Z]+)(?=[a-z0-9])')
logger = logging.getLogger('lux.odm')
class BaseModel(object):
@declared_attr
def __tablename__(self):
return self.__name__.lower()
Model = declarative_base(cls=BaseModel)
class Mapper:
'''SQLAlchemy wrapper for lux applications
'''
def __init__(self, app, binds):
self.app = app
self._autodiscover(binds)
def __getitem__(self, model):
return self._declarative_register[model]
def __getattr__(self, name):
if name in self._declarative_register:
return self._declarative_register[name]
raise AttributeError('No model named "%s"' % name)
def database_create(self, database, **params):
'''Create databases for each engine and return a new :class:`.Mapper`.
'''
binds = {}
dbname = database
for key, engine in self.keys_engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot create a database, no db name given"
key = key if key else 'default'
binds[key] = self._database_create(engine, dbname)
return self.__class__(self.app, binds)
def database_all(self):
'''Return a dictionary mapping engines with databases
'''
all = {}
for engine in self.engines():
all[engine] = self._database_all(engine)
return all
def database_drop(self, database=None, **params):
dbname = database
for engine in self.engines():
if hasattr(database, '__call__'):
dbname = database(engine)
assert dbname, "Cannot drop database, no db name given"
self._database_drop(engine, dbname)
def tables(self):
tables = []
for engine in self.engines():
tbs = engine.table_names()
if tbs:
tables.append((str(engine.url), tbs))
return tables
def table_create(self, remove_existing=False):
"""Creates all tables.
"""
for engine in self.engines():
tables = self._get_tables(engine)
if not remove_existing:
self.metadata.create_all(engine, tables=tables)
else:
pass
def table_drop(self):
"""Drops all tables.
"""
for engine in self.engines():
self.metadata.drop_all(engine, tables=self._get_tables(engine))
def reflect(self, bind='__all__'):
"""Reflects tables from the database.
"""
self._execute_for_all_tables(bind, 'reflect', skip_tables=True)
@contextmanager
def begin(self, close=True, expire_on_commit=False, **options):
"""Provide a transactional scope around a series of operations.
By default, ``expire_on_commit`` is set to False so that instances
can be used outside the session.
"""
session = self.session(expire_on_commit=expire_on_commit, **options)
try:
yield session
session.commit()
except Exception:
session.rollback()
raise
finally:
if close:
session.close()
def session(self, **options):
options['binds'] = self.binds
return LuxSession(self, **options)
def get_engine(self, key=None):
'''Get an engine by key
'''
if key in self._engines:
return self._engines[key]
elif key in self._nosql_engines:
return self._nosql_engines[key]
def engines(self):
return chain(self._engines.values(), self._nosql_engines.values())
def keys_engines(self):
return chain(self._engines.items(), self._nosql_engines.items())
def close(self):
for engine in self.engines():
engine.dispose()
# INTERNALS
def _get_tables(self, engine):
tables = []
for table, eng in self.binds.items():
if eng == engine:
tables.append(table)
return tables
def _database_all(self, engine):
if isinstance(engine, Store):
return engine.database_all()
elif engine.name == 'sqlite':
database = engine.url.database
if os.path.isfile(database):
return [database]
else:
return []
else:
insp = inspect(engine)
return insp.get_schema_names()
def _database_create(self, engine, dbname):
if isinstance(engine, Store):
from pulsar.apps.greenio import wait
return wait(engine.database_create(dbname))
elif engine.name != 'sqlite':
conn = engine.connect()
# the connection will still be inside a transaction,
# so we have to end the open transaction with a commit
conn.execute("commit")
conn.execute('create database %s' % dbname)
conn.close()
url = copy(engine.url)
url.database = dbname
return str(url)
def _database_drop(self, engine, database):
logger.info('dropping database "%s" from %s', database, engine)
if engine.name == 'sqlite':
try:
os.remove(database)
except FileNotFoundError:
pass
elif isinstance(engine, Store):
engine.database_drop(database)
else:
conn = engine.connect()
conn.execute("commit")
conn.execute('drop database %s' % database)
conn.close()
def _autodiscover(self, binds):
# Setup mdoels and engines
if not binds:
binds = {}
elif isinstance(binds, str):
binds = {'default': binds}
if binds and 'default' not in binds:
raise ImproperlyConfigured('default datastore not specified')
self.metadata = MetaData()
self._engines = {}
self._nosql_engines = {}
self._declarative_register = {}
self.binds = {}
# Create all sql engines in the binds dictionary
# Quietly fails if the engine is not recognised,
# it my be a NoSQL store
for name, bind in tuple(binds.items()):
key = None if name == 'default' else name
try:
self._engines[key] = create_engine(bind)
except exc.NoSuchModuleError:
self._nosql_engines[key] = create_store(bind)
#
if self._nosql_engines and not self.app.green_pool:
raise ImproperlyConfigured('NoSql stores requires GREEN_POOL')
for label, mod in module_iterator(self.app.config['EXTENSIONS']):
# Loop through attributes in mod_models
for name in dir(mod):
value = getattr(mod, name)
if isinstance(value, (Table, DeclarativeMeta)):
for table in value.metadata.sorted_tables:
if table.key not in self.metadata.tables:
engine = None
label = table.info.get('bind_label')
keys = ('%s.%s' % (label, table.key),
label, None) if label else (None,)
for key in keys:
engine = self.get_engine(key)
if engine:
break
assert engine
table.tometadata(self.metadata)
self.binds[table] = engine
if (isinstance(value, DeclarativeMeta) and
hasattr(value, '__table__')):
table = value.__table__
self._declarative_register[table.key] = value
class LuxSession(Session):
"""The sql alchemy session that lux uses.
It extends the default session system with bind selection and
modification tracking.
"""
def __init__(self, mapper, **options):
#: The application that this session belongs to.
self.mapper = mapper
if self.app.config['DATABASE_SESSION_SIGNALS']:
self.register()
super().__init__(**options)
@property
def app(self):
return self.mapper.app
def register(self):
if not hasattr(self, '_model_changes'):
self._model_changes = {}
event.listen(self, 'before_flush', self.record_ops)
event.listen(self, 'before_commit', self.record_ops)
event.listen(self, 'before_commit', self.before_commit)
event.listen(self, 'after_commit', self.after_commit)
event.listen(self, 'after_rollback', self.after_rollback)
@staticmethod
def record_ops(session, flush_context=None, instances=None):
try:
d = session._model_changes
except AttributeError:
return
for targets, operation in ((session.new, 'insert'),
(session.dirty, 'update'),
(session.deleted, 'delete')):
for target in targets:
state = inspect(target)
key = state.identity_key if state.has_identity else id(target)
d[key] = (target, operation)
@staticmethod
def before_commit(session):
try:
d = session._model_changes
except AttributeError:
return
# if d:
# before_models_committed.send(session.app,
# changes=list(d.values()))
@staticmethod
def after_commit(session):
try:
d = session._model_changes
except AttributeError:
return
# if d:
# models_committed.send(session.app, changes=list(d.values()))
# d.clear()
@staticmethod
def after_rollback(session):
try:
d = session._model_changes
except AttributeError:
return
# d.clear()
def module_iterator(application):
'''Iterate over applications modules
'''
if ismodule(application) or isinstance(application, str):
if ismodule(application):
mod, application = application, application.__name__
else:
try:
mod = import_module(application)
except ImportError:
# the module is not there
mod = None
if mod:
label = application.split('.')[-1]
try:
mod_models = import_module('.models', application)
except ImportError:
mod_models = mod
label = getattr(mod_models, 'APP_LABEL', label)
yield label, mod_models
else:
for app in application:
yield from module_iterator(app)
| [
"luca.sbardella@gmail.com"
] | luca.sbardella@gmail.com |
b697db6e2804c02c3b53e43792ba5bb8a54a21a6 | a031b08f2477dd1696ffa955ac99b869c56ad623 | /ex7/ex7.py | 11ad52d99efadfd558106c02c0d6ed009af64eba | [] | no_license | jkw224/PythonExercises | bf356f1a0ad3a0ccc2059943e4d45879d2e8b876 | 9d953b14ab6d93f81411fde41cdac6c2c0c6f84d | refs/heads/master | 2021-01-25T06:05:58.041690 | 2015-01-21T23:18:33 | 2015-01-21T23:18:33 | 28,823,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 582 | py | city_temp = {
"Boston": "0 C",
"Boise": "48 F",
"Phoenix": "85 F",
"Miami": "40 C",
"Riverside": "30 C",
"Baltimore": "32 F"
}
for key, value in city_temp.items():
val = int(value[:-2])
if value[-1] == ("F" or "f"):
print("In %s it is %s degrees Fahrenheit\n\twhich is equivalent to %d degress Celsius" % (key, value[:-2], (val - 32) * 5/9))
elif value[-1] == ("C" or "c"):
print("In %s it is %s degrees Celsius\n\twhich is equivalent to %d degress Fahrenheit" % (key, value[:-2], (val * (9/5))+32))
else:
print("-1") | [
"jonathankimballwood@gmail.com"
] | jonathankimballwood@gmail.com |
62cca5b8ca0a33c7f2733ab7f0ba980c10fd57d2 | 236d6f9896d6e39ee72015d957204cc7de0f2e44 | /weather.py | 8b1463709547a5fb98ef113396a87468a6387d01 | [] | no_license | codeasylums-bootcamp/bazinga_ML_winter19 | c3b26a3e544631c42eff5eec9c3462520209680d | 6134aed1b84306292bf5239c683ac0778b6a9917 | refs/heads/master | 2020-11-24T09:32:30.133380 | 2020-01-12T04:33:29 | 2020-01-12T04:33:29 | 228,081,407 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | #!/bin/python3
import subprocess
import sys
place=input("What place?\n")
place="wttr.in/"+place
subprocess.call(["curl",str(place)])
| [
"mdtngr@gmail.com"
] | mdtngr@gmail.com |
7c7042124f67b3df6bb20cbc607c2758baf785d8 | 35db584864327388aa40a2ad0c7333ae34233446 | /esp32/micropython/uftpd.py | 289cc82c80234f486e16ff51545eab3f84ed2312 | [] | no_license | emard/ulx3s-bin | 7b7d3b61961bcf919671fa3eb7674a9410cd3f1d | 2a40f50e0142f2b2856bf0a7471a8741881ec427 | refs/heads/master | 2022-05-31T01:50:50.577033 | 2022-04-19T16:07:00 | 2022-04-19T16:07:00 | 124,758,000 | 24 | 8 | null | 2022-04-12T17:45:54 | 2018-03-11T13:14:35 | Python | UTF-8 | Python | false | false | 18,999 | py | #
# Small ftp server for ESP8266 Micropython
# Based on the work of chrisgp - Christopher Popp and pfalcon - Paul Sokolovsky
#
# The server accepts passive mode only. It runs in background.
# Start the server with:
#
# import uftpd
# uftpd.start([port = 21][, verbose = level])
#
# port is the port number (default 21)
# verbose controls the level of printed activity messages, values 0, 1, 2
#
# Copyright (c) 2016 Christopher Popp (initial ftp server framework)
# Copyright (c) 2016 Paul Sokolovsky (background execution control structure)
# Copyright (c) 2016 Robert Hammelrath (putting the pieces together and a
# few extensions)
# Distributed under MIT License
#
import socket
import network
import uos
from gc import collect
from time import sleep_ms, localtime
from micropython import alloc_emergency_exception_buf
from machine import SDCard, Pin
# constant definitions
_CHUNK_SIZE = const(1024)
_SO_REGISTER_HANDLER = const(20)
_COMMAND_TIMEOUT = const(300)
_DATA_TIMEOUT = const(100)
_DATA_PORT = const(13333)
# Global variables
ftpsocket = None
datasocket = None
client_list = []
verbose_l = 0
client_busy = False
# Interfaces: (IP-Address (string), IP-Address (integer), Netmask (integer))
AP_addr = ("0.0.0.0", 0, 0xffffff00)
STA_addr = ("0.0.0.0", 0, 0xffffff00)
_month_name = ("", "Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
class FTP_client:
def __init__(self, ftpsocket):
global AP_addr, STA_addr
self.command_client, self.remote_addr = ftpsocket.accept()
self.remote_addr = self.remote_addr[0]
self.command_client.settimeout(_COMMAND_TIMEOUT)
log_msg(1, "FTP Command connection from:", self.remote_addr)
self.command_client.setsockopt(socket.SOL_SOCKET,
_SO_REGISTER_HANDLER,
self.exec_ftp_command)
self.command_client.sendall("220 Hello, this is the ULX3S.\r\n")
self.cwd = '/'
self.fromname = None
# self.logged_in = False
self.act_data_addr = self.remote_addr
self.DATA_PORT = 20
self.active = True
# check which interface was used by comparing the caller's ip
# adress with the ip adresses of STA and AP; consider netmask;
# select IP address for passive mode
if ((AP_addr[1] & AP_addr[2]) ==
(num_ip(self.remote_addr) & AP_addr[2])):
self.pasv_data_addr = AP_addr[0]
elif ((STA_addr[1] & STA_addr[2]) ==
(num_ip(self.remote_addr) & STA_addr[2])):
self.pasv_data_addr = STA_addr[0]
elif ((AP_addr[1] == 0) and (STA_addr[1] != 0)):
self.pasv_data_addr = STA_addr[0]
elif ((AP_addr[1] != 0) and (STA_addr[1] == 0)):
self.pasv_data_addr = AP_addr[0]
else:
self.pasv_data_addr = "0.0.0.0" # Invalid value
def send_list_data(self, path, data_client, full):
try:
for fname in uos.listdir(path):
data_client.sendall(self.make_description(path, fname, full))
except: # path may be a file name or pattern
path, pattern = self.split_path(path)
try:
for fname in uos.listdir(path):
if self.fncmp(fname, pattern):
data_client.sendall(
self.make_description(path, fname, full))
except:
pass
def make_description(self, path, fname, full):
global _month_name
if full:
stat = uos.stat(self.get_absolute_path(path, fname))
file_permissions = ("drwxr-xr-x"
if (stat[0] & 0o170000 == 0o040000)
else "-rw-r--r--")
file_size = stat[6]
tm = localtime(stat[7])
if tm[0] != localtime()[0]:
description = "{} 1 owner group {:>10} {} {:2} {:>5} {}\r\n".\
format(file_permissions, file_size,
_month_name[tm[1]], tm[2], tm[0], fname)
else:
description = "{} 1 owner group {:>10} {} {:2} {:02}:{:02} {}\r\n".\
format(file_permissions, file_size,
_month_name[tm[1]], tm[2], tm[3], tm[4], fname)
else:
description = fname + "\r\n"
return description
def send_file_data(self, path, data_client):
with open(path,"rb") as file:
chunk = file.read(_CHUNK_SIZE)
while len(chunk) > 0:
data_client.sendall(chunk)
chunk = file.read(_CHUNK_SIZE)
data_client.close()
def save_file_data(self, path, data_client, mode):
with open(path, mode) as file:
chunk = data_client.recv(_CHUNK_SIZE)
while len(chunk) > 0:
file.write(chunk)
chunk = data_client.recv(_CHUNK_SIZE)
data_client.close()
def get_absolute_path(self, cwd, payload):
# Just a few special cases "..", "." and ""
# If payload start's with /, set cwd to /
# and consider the remainder a relative path
if payload.startswith('/'):
cwd = "/"
for token in payload.split("/"):
if token == '..':
cwd = self.split_path(cwd)[0]
elif token != '.' and token != '':
if cwd == '/':
cwd += token
else:
cwd = cwd + '/' + token
return cwd
def split_path(self, path): # instead of path.rpartition('/')
tail = path.split('/')[-1]
head = path[:-(len(tail) + 1)]
return ('/' if head == '' else head, tail)
# compare fname against pattern. Pattern may contain
# the wildcards ? and *.
def fncmp(self, fname, pattern):
pi = 0
si = 0
while pi < len(pattern) and si < len(fname):
if (fname[si] == pattern[pi]) or (pattern[pi] == '?'):
si += 1
pi += 1
else:
if pattern[pi] == '*': # recurse
if pi == len(pattern.rstrip("*?")): # only wildcards left
return True
while si < len(fname):
if self.fncmp(fname[si:], pattern[pi + 1:]):
return True
else:
si += 1
return False
else:
return False
if pi == len(pattern.rstrip("*")) and si == len(fname):
return True
else:
return False
def open_dataclient(self):
if self.active: # active mode
data_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
data_client.settimeout(_DATA_TIMEOUT)
data_client.connect((self.act_data_addr, self.DATA_PORT))
log_msg(1, "FTP Data connection with:", self.act_data_addr)
else: # passive mode
data_client, data_addr = datasocket.accept()
log_msg(1, "FTP Data connection with:", data_addr[0])
return data_client
def mount(self):
try:
self.sd = SDCard(slot=3)
uos.mount(self.sd,"/sd")
return True
except:
return False
def umount(self):
try:
uos.umount("/sd")
try:
self.sd.deinit()
del self.sd
except:
pass
# let all SD pins be inputs
for i in bytearray([2,4,12,13,14,15]):
p = Pin(i,Pin.IN)
a = p.value()
del p, a
return True
except:
return False
def exec_ftp_command(self, cl):
global datasocket
global client_busy
global my_ip_addr
try:
collect()
data = cl.readline().decode("utf-8").rstrip("\r\n")
if len(data) <= 0:
# No data, close
# This part is NOT CLEAN; there is still a chance that a
# closing data connection will be signalled as closing
# command connection
log_msg(1, "*** No data, assume QUIT")
close_client(cl)
return
if client_busy: # check if another client is busy
cl.sendall("400 Device busy.\r\n") # tell so the remote client
return # and quit
client_busy = True # now it's my turn
# check for log-in state may done here, like
# if self.logged_in == False and not command in\
# ("USER", "PASS", "QUIT"):
# cl.sendall("530 Not logged in.\r\n")
# return
command = data.split()[0].upper()
payload = data[len(command):].lstrip() # partition is missing
path = self.get_absolute_path(self.cwd, payload)
log_msg(1, "Command={}, Payload={}".format(command, payload))
if command == "USER":
# self.logged_in = True
cl.sendall("230 Logged in.\r\n")
# If you want to see a password,return
# "331 Need password.\r\n" instead
# If you want to reject an user, return
# "530 Not logged in.\r\n"
elif command == "PASS":
# you may check here for a valid password and return
# "530 Not logged in.\r\n" in case it's wrong
# self.logged_in = True
cl.sendall("230 Logged in.\r\n")
elif command == "SYST":
cl.sendall("215 UNIX Type: L8\r\n")
elif command in ("TYPE", "NOOP", "ABOR"): # just accept & ignore
cl.sendall('200 OK\r\n')
elif command == "QUIT":
cl.sendall('221 Bye.\r\n')
close_client(cl)
elif command == "PWD" or command == "XPWD":
cl.sendall('257 "{}"\r\n'.format(self.cwd))
elif command == "CWD" or command == "XCWD":
try:
if (uos.stat(path)[0] & 0o170000) == 0o040000:
self.cwd = path
cl.sendall('250 OK\r\n')
else:
cl.sendall('550 Fail\r\n')
except:
cl.sendall('550 Fail\r\n')
elif command == "PASV":
cl.sendall('227 Entering Passive Mode ({},{},{}).\r\n'.format(
self.pasv_data_addr.replace('.', ','),
_DATA_PORT >> 8, _DATA_PORT % 256))
self.active = False
elif command == "PORT":
items = payload.split(",")
if len(items) >= 6:
self.act_data_addr = '.'.join(items[:4])
if self.act_data_addr == "127.0.1.1":
# replace by command session addr
self.act_data_addr = self.remote_addr
self.DATA_PORT = int(items[4]) * 256 + int(items[5])
cl.sendall('200 OK\r\n')
self.active = True
else:
cl.sendall('504 Fail\r\n')
elif command == "LIST" or command == "NLST":
if payload.startswith("-"):
option = payload.split()[0].lower()
path = self.get_absolute_path(
self.cwd, payload[len(option):].lstrip())
else:
option = ""
try:
data_client = self.open_dataclient()
cl.sendall("150 Directory listing:\r\n")
self.send_list_data(path, data_client,
command == "LIST" or 'l' in option)
cl.sendall("226 Done.\r\n")
data_client.close()
except:
cl.sendall('550 Fail\r\n')
if data_client is not None:
data_client.close()
elif command == "RETR":
try:
data_client = self.open_dataclient()
cl.sendall("150 Opened data connection.\r\n")
self.send_file_data(path, data_client)
# if the next statement is reached,
# the data_client was closed.
data_client = None
cl.sendall("226 Done.\r\n")
except:
cl.sendall('550 Fail\r\n')
if data_client is not None:
data_client.close()
elif command == "STOR" or command == "APPE":
result = False
try:
data_client = self.open_dataclient()
cl.sendall("150 Opened data connection.\r\n")
if path == "/fpga":
import ecp5
ecp5.prog_stream(data_client,_CHUNK_SIZE)
result = ecp5.prog_close()
data_client.close()
elif path.startswith("/flash@"):
import ecp5
dummy, addr = path.split("@")
addr = int(addr)
result = ecp5.flash_stream(data_client,addr)
ecp5.flash_close()
del addr, dummy
data_client.close()
elif path.startswith("/sd@"):
import sdraw
dummy, addr = path.split("@")
addr = int(addr)
sd_raw = sdraw.sdraw()
result = sd_raw.sd_write_stream(data_client,addr)
del sd_raw, addr, dummy
data_client.close()
else:
self.save_file_data(path, data_client,
"w" if command == "STOR" else "a")
result = True
# if the next statement is reached,
# the data_client was closed.
data_client = None
except:
if data_client is not None:
data_client.close()
if result:
cl.sendall("226 Done.\r\n")
else:
cl.sendall('550 Fail\r\n')
del result
elif command == "SIZE":
try:
cl.sendall('213 {}\r\n'.format(uos.stat(path)[6]))
except:
cl.sendall('550 Fail\r\n')
elif command == "STAT":
if payload == "":
cl.sendall("211-Connected to ({})\r\n"
" Data address ({})\r\n"
" TYPE: Binary STRU: File MODE: Stream\r\n"
" Session timeout {}\r\n"
"211 Client count is {}\r\n".format(
self.remote_addr, self.pasv_data_addr,
_COMMAND_TIMEOUT, len(client_list)))
else:
cl.sendall("213-Directory listing:\r\n")
self.send_list_data(path, cl, True)
cl.sendall("213 Done.\r\n")
elif command == "DELE":
try:
uos.remove(path)
cl.sendall('250 OK\r\n')
except:
cl.sendall('550 Fail\r\n')
elif command == "RNFR":
try:
# just test if the name exists, exception if not
uos.stat(path)
self.fromname = path
cl.sendall("350 Rename from\r\n")
except:
cl.sendall('550 Fail\r\n')
elif command == "RNTO":
try:
uos.rename(self.fromname, path)
cl.sendall('250 OK\r\n')
except:
cl.sendall('550 Fail\r\n')
self.fromname = None
elif command == "CDUP" or command == "XCUP":
self.cwd = self.get_absolute_path(self.cwd, "..")
cl.sendall('250 OK\r\n')
elif command == "RMD" or command == "XRMD":
try:
uos.rmdir(path)
cl.sendall('250 OK\r\n')
except:
cl.sendall('550 Fail\r\n')
elif command == "MKD" or command == "XMKD":
try:
uos.mkdir(path)
cl.sendall('250 OK\r\n')
except:
cl.sendall('550 Fail\r\n')
elif command == "SITE":
if path == "/mount":
if self.mount():
cl.sendall('250 OK\r\n')
else:
cl.sendall('550 Fail\r\n')
elif path == "/umount":
if self.umount():
cl.sendall('250 OK\r\n')
else:
cl.sendall('550 Fail\r\n')
elif path == "/passthru":
import ecp5
ecp5.passthru()
cl.sendall('250 OK passthru\r\n')
elif path.endswith(".bit") or path.endswith(".bit.gz"):
try:
import ecp5
if ecp5.prog(path, close=False):
if path.startswith("/sd/"):
try:
self.umount()
cl.sendall('111 umount /sd OK\r\n')
except:
cl.sendall('411 umount /sd Fail\r\n')
if ecp5.prog_close():
cl.sendall('250 OK\r\n')
else:
cl.sendall('550 Fail\r\n')
else:
cl.sendall('550 Fail\r\n')
except:
cl.sendall('550 Fail\r\n')
else:
if path.startswith("/"):
exe=path[1:]
else:
exe=path
try:
exec(exe)
cl.sendall('250 OK '+exe+'\r\n')
except:
cl.sendall('550 Fail '+exe+'\r\n')
del exe
else:
cl.sendall("502 Unsupported command.\r\n")
# log_msg(2,
# "Unsupported command {} with payload {}".format(command,
# payload))
# handle unexpected errors
except Exception as err:
log_msg(1, "Exception in exec_ftp_command: {}".format(err))
# tidy up before leaving
client_busy = False
def log_msg(level, *args):
global verbose_l
if verbose_l >= level:
print(*args)
# close client and remove it from the list
def close_client(cl):
cl.setsockopt(socket.SOL_SOCKET, _SO_REGISTER_HANDLER, None)
cl.close()
for i, client in enumerate(client_list):
if client.command_client == cl:
del client_list[i]
break
def accept_ftp_connect(ftpsocket):
# Accept new calls for the server
try:
client_list.append(FTP_client(ftpsocket))
except:
log_msg(1, "Attempt to connect failed")
# try at least to reject
try:
temp_client, temp_addr = ftpsocket.accept()
temp_client.close()
except:
pass
def num_ip(ip):
items = ip.split(".")
return (int(items[0]) << 24 | int(items[1]) << 16 |
int(items[2]) << 8 | int(items[3]))
def stop():
global ftpsocket, datasocket
global client_list
global client_busy
for client in client_list:
client.command_client.setsockopt(socket.SOL_SOCKET, _SO_REGISTER_HANDLER, None)
client.command_client.close()
del client_list
client_list = []
client_busy = False
if ftpsocket is not None:
ftpsocket.setsockopt(socket.SOL_SOCKET, _SO_REGISTER_HANDLER, None)
ftpsocket.close()
if datasocket is not None:
datasocket.close()
# start listening for ftp connections on port 21
def start(port=21, verbose=0, splash=True):
global ftpsocket, datasocket
global verbose_l
global client_list
global client_busy
global AP_addr, STA_addr
alloc_emergency_exception_buf(100)
verbose_l = verbose
client_list = []
client_busy = False
ftpsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
datasocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ftpsocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
datasocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
ftpsocket.bind(('0.0.0.0', port))
datasocket.bind(('0.0.0.0', _DATA_PORT))
ftpsocket.listen(0)
datasocket.listen(0)
datasocket.settimeout(10)
ftpsocket.setsockopt(socket.SOL_SOCKET, _SO_REGISTER_HANDLER, accept_ftp_connect)
wlan = network.WLAN(network.AP_IF)
if wlan.active():
ifconfig = wlan.ifconfig()
# save IP address string and numerical values of IP adress and netmask
AP_addr = (ifconfig[0], num_ip(ifconfig[0]), num_ip(ifconfig[1]))
if splash:
print("FTP server started on {}:{}".format(ifconfig[0], port))
wlan = network.WLAN(network.STA_IF)
if wlan.active():
ifconfig = wlan.ifconfig()
# save IP address string and numerical values of IP adress and netmask
STA_addr = (ifconfig[0], num_ip(ifconfig[0]), num_ip(ifconfig[1]))
if splash:
print("FTP server started on {}:{}".format(ifconfig[0], port))
def restart(port=21, verbose=0, splash=True):
stop()
sleep_ms(200)
start(port, verbose, splash)
start(splash=True)
collect()
| [
"vordah@gmail.com"
] | vordah@gmail.com |
700521073b1e9083df2d03d4121f4e79d1fc9e92 | 81d19801555ff279b42902ed61b32bf42151f5b9 | /tuio/__init__.py | 4c6f3fde078b58235be17c1c3167d8458e38a301 | [] | no_license | midorinashi/CS402-Final-Project | ed507a70c79326cbbe5e66163bd27f6621ef54db | a3961ee5325edd6518f2508eb0c084ccc1c9b3e4 | refs/heads/master | 2021-04-28T01:25:52.892988 | 2018-06-08T22:56:52 | 2018-06-08T22:56:52 | 122,277,421 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,455 | py | # -*- coding: utf-8 -*-
"""A Python library that understands the TUIO protocol"""
__author__ = "Jannis Leidel"
__version__ = "0.1"
__copyright__ = "Copyright (c) 2007-2008 Jannis Leidel"
__license__ = "MIT"
__url__ = "http://code.google.com/p/pytuio/"
import os
import sys
import math
import socket
import inspect
import OSC
import profiles
class CallbackError(Exception):
pass
class Tracking(object):
def __init__(self, host='127.0.0.1', port=3333):
self.host = host
self.port = port
self.current_frame = 0
self.last_frame = 0
self.open_socket()
self.manager = OSC.CallbackManager()
self.profiles = self.load_profiles()
def open_socket(self):
"""
Opens the socket and binds to the given host and port. Uses
SO_REUSEPORT to be as robust as possible.
"""
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.socket.setblocking(0)
self.socket.bind((self.host, self.port))
start = open_socket
def close_socket(self):
"""
Closes the socket connection
"""
self.socket.close()
stop = close_socket
def refreshed(self):
"""
Returns True if there was a new frame
"""
return self.current_frame >= self.last_frame
def load_profiles(self):
"""
Loads all possible TUIO profiles and returns a dictionary with the
profile addresses as keys and an instance of a profile as the value
"""
_profiles = {}
for name, klass in inspect.getmembers(profiles):
if inspect.isclass(klass) and name.endswith('Profile') and name != 'TuioProfile':
# Adding profile to the self.profiles dictionary
profile = klass()
_profiles[profile.address] = profile
# setting convenient variable to access objects of profile
try:
setattr(self, profile.list_label, profile.objs)
except AttributeError:
continue
# Mapping callback method to every profile
self.manager.add(self.callback, profile.address)
return _profiles
def get_profile(self, profile):
"""Returns a specific profile from the profile list and otherwise None"""
return self.profiles.get(profile, None)
def get_helpers(self):
"""Returns a list of helper functions that provide access to the
objects of each profile."""
return list([profile.list_label for profile in self.profiles.values()])
def update(self):
"""
Tells the connection manager to receive the next 1024 byte of messages
to analyze.
"""
try:
self.manager.handle(self.socket.recv(1024))
except socket.error:
pass
def callback(self, *incoming):
"""
Gets called by the CallbackManager if a new message was received
"""
message = incoming[0]
if message:
address, command = message[0], message[2]
profile = self.get_profile(address)
if profile is not None:
try:
getattr(profile, command)(self, message)
except AttributeError:
pass | [
"traceylin@dn51vc9b.sunet"
] | traceylin@dn51vc9b.sunet |
84819ead29e0e12b987c520793c6c80fa0b7672d | c3ac9ba8f24be1bf067a77c5bc940702e7b330b6 | /Tutorials/search/biniry_search.py | 2547db5abc528666c3ac503296ac8e476cd00b19 | [] | no_license | Cwinka/tutorials | 6a195d18ca46dd85ca7370fdf56c9670e5bf07f5 | f170d9e708b55ae4d439f208ed8d32ae0889c11b | refs/heads/main | 2023-06-01T23:58:56.975015 | 2021-06-20T15:26:24 | 2021-06-20T15:26:24 | 378,677,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,173 | py | import time
def biniry_search(lys, target):
if len(lys) < 1:
return -1
mid_idx = (len(lys)-1)//2
mid_val = lys[mid_idx]
if target == mid_val:
return mid_idx
elif target < mid_val:
return biniry_search(lys[:mid_idx], target)
else:
right = biniry_search(lys[mid_idx+1:], target)
right_corr = mid_idx + right + 1
if lys[right_corr] == target:
return right_corr
else:
return -1
def biniry_search_indeses(lys, target):
if len(lys) < 1:
return -1
left = 0
right = len(lys)-1
while left <= right:
mid_idx = left + (right-left)//2
mid_val = lys[mid_idx]
if target == mid_val:
return mid_idx
elif target < mid_val:
right = mid_idx -1
else:
left = mid_idx + 1
return -1
#
ns = list(range(8*200000))
tt = time.time()
biniry_search(ns, 200000)
tt2 = time.time() - tt
print(f"Bites long: {ns.__sizeof__()}. Searched for: {tt2}")
tt3 = time.time()
biniry_search_indeses(ns, 200000)
tt4 = time.time() - tt3
print(f"Bites long: {ns.__sizeof__()}. Searched for: {tt4}")
# def sparse_search(data, search_val):
# print("Data: " + str(data))
# print("Search Value: " + str(search_val))
# first = 0
# last = len(data)-1
# while first <= last:
# mid = (first + last)//2
# if not data[mid]:
# left = mid - 1
# right = mid + 1
# while True:
# if left < first and right > last:
# print("{} is not in the dataset".format(search_val))
# return
# elif right <= last and data[right]:
# mid = right
# break
# elif left >= first and data[left]:
# mid = left
# break
# right = right +1
# left += left +1
# if data[mid] == search_val:
# print("{0} found at position {1}".format(search_val, mid))
# return
# elif data[mid] < search_val:
# first = mid + 1
# else:
# last = mid - 1
#
#
# print("{0} is not in the dataset".format(search_val))
#
# sparse_search(["A", "", "", "", "B", "", "", "", "C", "", "", "D"], "A")
| [
"nikita00zorinnn@mail.ru"
] | nikita00zorinnn@mail.ru |
0a445d67b18dc157da950a170a893bcfb3bb2412 | 9896b6b629642fbc8c441c9a81bc24809e2686ef | /DjangoProject/settings.py | b70569b83f3ef26f58fe95507430f0935e943380 | [] | no_license | mamthal/Peg-a-Page | dfdf9bbf516ca86e7d11db1714f585073ef71f10 | 27983da85d49a5b1ba788d61944ebd816cbaa373 | refs/heads/master | 2020-12-03T05:32:38.541497 | 2013-11-14T23:52:50 | 2013-11-14T23:52:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,600 | py | # Django settings for DjangoProject project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'pegapage', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'nimble',
'PASSWORD': 'password',
'HOST': 'ec2-50-19-213-178.compute-1.amazonaws.com', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '3306', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ""
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
"./Peg-a-Page/static",
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '&xmo9!!1&!#k^d#c3$^86%a$#vlazj@r_qej@b&r#e3g!33tqp'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'DjangoProject.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'DjangoProject.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
"./Peg-a-Page/Templates"
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'PegAPage',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| [
"pallavikhandekar212@gmail.com"
] | pallavikhandekar212@gmail.com |
1822d5fc228ac04a9323438fa13bf038e43faa55 | e1ae9b76b2eb79952d822753cdd17081a64a2986 | /codefights/Arcade/Intro/commonCharacterCount.py | c5e83f5e2dded915fcf764dd694ae6a657095dbd | [] | no_license | raffyenriquez/CodingPractice | f477abf33236f6df2f1374c553aa5bb21cdc97ee | bb74987aa763e8eaf4cd32f5f988c615c03b816a | refs/heads/master | 2021-05-05T10:43:51.352249 | 2018-02-15T08:01:37 | 2018-02-15T08:01:37 | 118,079,619 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | def commonCharacterCount(s1, s2):
"""returns number of common characters between two strings"""
return sum(min(s1.count(x),s2.count(x)) for x in set(s1))
| [
"noreply@github.com"
] | noreply@github.com |
6c7157b662729c66c8f8593e3a2c69535e9dae21 | c8ccd397675e038bdd2c28025b6f2c53ed0b296a | /web/apps/main/models/__init__.py | afa9e3c4b2cf34c204f4c33b941de848152d0886 | [] | no_license | gharghi/amnava | dd7dcffc589a493471daf95809d7b6b892c11b39 | df9a2cd8cdb11f6b06edb3ada5c2dfff8738af77 | refs/heads/master | 2020-06-23T01:39:56.122388 | 2019-07-23T15:54:31 | 2019-07-23T15:54:31 | 198,462,832 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | from .asn import Asn
from .prefix import Prefix
from .route_object import RouteObject
from .dump import Dump
from .neighbors import Neighbors
from .origins import Origins
from .notifications import Notifications
from .notification_rule import NotificationRule | [
"shahin@asiatech.ir"
] | shahin@asiatech.ir |
c47123eb1d1b70624bb34e5b9652c9cf7a8dd2ec | 99c4d4a6592fded0e8e59652484ab226ac0bd38c | /code/batch-2/vse-naloge-brez-testov/DN10-M-123.py | 0c1eae41abe8c8c3d571897a3c84d3a0b0442dcb | [] | no_license | benquick123/code-profiling | 23e9aa5aecb91753e2f1fecdc3f6d62049a990d5 | 0d496d649247776d121683d10019ec2a7cba574c | refs/heads/master | 2021-10-08T02:53:50.107036 | 2018-12-06T22:56:38 | 2018-12-06T22:56:38 | 126,011,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,389 | py | otroci = {
"Adam": ["Matjaž", "Cilka", "Daniel"],
"Aleksander": [],
"Alenka": [],
"Barbara": [],
"Cilka": [],
"Daniel": ["Elizabeta", "Hans"],
"Erik": [],
"Elizabeta": ["Ludvik", "Jurij", "Barbara"],
"Franc": [],
"Herman": ["Margareta"],
"Hans": ["Herman", "Erik"],
"Jožef": ["Alenka", "Aleksander", "Petra"],
"Jurij": ["Franc", "Jožef"],
"Ludvik": [],
"Margareta": [],
"Matjaž": ["Viljem"],
"Petra": [],
"Tadeja": [],
"Viljem": ["Tadeja"],
}
def premozenje(oseba,denar):
xs = [denar[oseba]]
for otrok in otroci[oseba]:
xs.append(premozenje(otrok,denar))
return sum(xs)
def najbogatejsi(oseba,denar):
najvec_denarja = 0
#print("oseba: ",oseba)
#if denar[oseba] > najbolj_bogat:
obdelani = []
najbolj_bogat = (oseba,denar[oseba])
for otrok in otroci[oseba]:
if denar[otrok] >= (denar[oseba] in najbolj_bogat):
najbolj_bogat = najbogatejsi(otrok,denar)
#if int(denar[otrok]) > najvec_denarja:
# najvec_denarja = denar[otrok]
#print(najbolj_bogat,"-----1")
#print(najbolj_bogat,"-----2")
#print("------------------------------------------------------")
#print(najvec_denarja)
#print(otrok,'---',denar[otrok])
return najbolj_bogat
| [
"benjamin.fele@gmail.com"
] | benjamin.fele@gmail.com |
f69994566964aeb6a5c7f505a52d19451b40b25f | fdcf47f556e2c520ee60d05ff0acffd4826b30e0 | /mydatabase.py | 0714019eb99d3713dca7aec50cade3c9a1427f12 | [] | no_license | Carlos20040301/Cine | a1e13bf361fab62338cd11693a9dac5d33bf4aa9 | acbcb5e8cf2abbaa541d2175d4afc5fcbbd9efd6 | refs/heads/master | 2023-04-22T04:36:34.774482 | 2021-05-05T22:15:32 | 2021-05-05T22:15:32 | 364,600,289 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | import mysql.connector
class RedSocialDb:
def open_conecction():
connection=mysql.connector.connect(host="localhost",
user="root",
password="",
database="db_red_social")
return connection
def insert_db(self,email,pwd,age):
my_connection=self.open_connection()
cursor=my_connection.cursor()
query="INSERT INTO tbl_usuario(CORREO,PWD,EDAD) VALUES (%s,%s,%s)"
data=(email,pwd,age)
cursor.execute(query,data)
my_connection.commit()
my_connection.close()
| [
"carlosecastro04@gmail.com"
] | carlosecastro04@gmail.com |
a430b405c518f5492c4bfcf40ae484ae3432d216 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02618/s417415114.py | ddebb487f588173570c9610c70cadb46a063199e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,303 | py | from sys import exit
import copy
#import numpy as np
#from collections import deque
d, = map(int, input().split())
c= list(map(int, input().split()))
s=[list(map(int, input().split())) for _ in range(d)]
# t=[int(input()) for _ in range(d)]
sche=[0 for _ in range(d)]
s_tmp=float("inf")*(-1)
for off in range(0,13):
last=[0 for _ in range(26)]
sche=[0 for _ in range(d)]
for day in range(1,d+1):
idx=day-1
d_tmp=float("inf")*(-1)
i_tmp=0
for t in range(26):
delta=0
l_tmp=copy.copy(last)
delta+=s[idx][t]
l_tmp[t]=day
for l in range(26):
delta-=0.5*(off+1)*c[l]*((day-l_tmp[l])+(day+off-l_tmp[l]))
if delta>=d_tmp:
d_tmp=delta
i_tmp=t
sche[idx]=i_tmp+1
# score+=d_tmp
last[i_tmp]=day
# print(score)
# print(i_tmp+1)
score=0
last=[0 for _ in range(26)]
for i in range(1,d+1):
idx=i-1
score+=s[idx][sche[idx]-1]
for l in range(26):
score-=c[l]*(i-last[l])
last[sche[idx]-1]=i
# print(score)
if score>=s_tmp:
s_tmp=score
sche_tmp=copy.copy(sche)
for i in sche_tmp:
print(i)
# print(s_tmp)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
e712ac004c472f06084a23769197fbe9c9c1722a | f09dc121f213f2881df3572288b7ee5b39246d73 | /aliyun-python-sdk-dataworks-public/aliyunsdkdataworks_public/request/v20200518/DeleteConnectionRequest.py | 9a4d962fc099ab8a8094f136d5551e069631099c | [
"Apache-2.0"
] | permissive | hetw/aliyun-openapi-python-sdk | 2f31378ad6be0896fb8090423f607e9c7d3ae774 | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | refs/heads/master | 2023-01-19T22:42:36.214770 | 2020-12-04T10:55:14 | 2020-12-04T10:55:14 | 318,689,093 | 1 | 0 | NOASSERTION | 2020-12-05T03:03:03 | 2020-12-05T03:03:03 | null | UTF-8 | Python | false | false | 1,474 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdataworks_public.endpoint import endpoint_data
class DeleteConnectionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'dataworks-public', '2020-05-18', 'DeleteConnection')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ConnectionId(self):
return self.get_query_params().get('ConnectionId')
def set_ConnectionId(self,ConnectionId):
self.add_query_param('ConnectionId',ConnectionId) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
3bbab7120ebc507559f7d36009c79eedecf43fed | b4b796d863bcf5b9e8617dc2566bd5418c0a7737 | /py/50.Pow(x,n).py | 6f3c09f7df1574c285f5df2047963340b2c29871 | [] | no_license | NidhoggZe/LeetCode | a02e323ffdbc660f43a148fd7219ecf8dc2bff95 | 0ae2e9fac6692f76b71f929154ba72c31d2c2bfd | refs/heads/master | 2023-01-13T04:55:47.911464 | 2020-11-11T12:16:06 | 2020-11-11T12:16:06 | 311,876,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | class Solution:
def myPow(self, x: float, n: int) -> float:
ans = 1.0
if n < 0:
x = 1/x
n = -n
while n != 0:
if n & 1:
ans *= x
n //= 2
x *= x
return ans | [
"397257341@qq.com"
] | 397257341@qq.com |
369ca60c846b387a15196df4ced3a3e9b73e48bb | 683b6d8539721cc09da159226cc8d656b6655467 | /botshop/wsgi.py | 2fcf9970a21445676a4a74fad9fbeba125351ae3 | [
"MIT"
] | permissive | hiletroy/BotShop | 9afcce1b51b53fc8bc9327afb711b67f8fefb501 | 9bbc8cd2d6789a04b38984ac92e3d9d1877f430f | refs/heads/master | 2022-12-09T12:56:06.218398 | 2020-01-17T05:07:08 | 2020-01-17T05:07:08 | 86,826,281 | 0 | 0 | null | 2022-01-06T22:24:20 | 2017-03-31T14:11:12 | Python | UTF-8 | Python | false | false | 483 | py | """
WSGI config for botshop project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "botshop.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| [
"alexey.bavykin@gmail.com"
] | alexey.bavykin@gmail.com |
361c70e5469002886a8f75d036416aa07b316719 | 4e187da441d9788cab323add85d7ab8253dd3438 | /python2.7/chapter_6/test_server/addressesapp/migrations/0001_initial.py | d41244950c24e05e1950bc8b861a2bb412e7fdb9 | [] | no_license | ai2010/machine_learning_for_the_web | 7b506efcd8abb2e0a727e6d02b583ee741433042 | dcca7c3028b6b6af57cffdce9c4f916114c06f94 | refs/heads/master | 2023-07-25T12:56:47.132336 | 2022-03-31T13:42:45 | 2022-03-31T13:42:45 | 57,253,576 | 75 | 82 | null | 2023-07-06T21:40:19 | 2016-04-27T22:41:48 | Python | UTF-8 | Python | false | false | 738 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=255, verbose_name='Name')),
('mobilephone', models.IntegerField(default=-1, null=True)),
('mail', models.EmailField(max_length=255, blank=True)),
],
options={
},
bases=(models.Model,),
),
]
| [
"ciccibolli@gmail.com"
] | ciccibolli@gmail.com |
a71863966023b79206fa8aa368d5716c6ab02aae | 60cc8185187b584b78377d526cde7c1bee325db3 | /backend/generate.py | 1cfd4e20ca9a68c76b07279c47782ac09aa9020f | [] | no_license | sc1f/student-elections-explorer | da5d61d4c15b9cbd71f5dbc918aeec70d00b9a57 | 39b1ac42358dbbd50f29e123e45a6f84281ef165 | refs/heads/master | 2021-01-18T03:11:01.672984 | 2015-03-13T00:27:48 | 2015-03-13T00:27:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | from flask_frozen import Freezer
import copytext
from application import app
import settings
def generate():
app.config['FREEZER_DESTINATION'] = settings.web_app_location
app.config['FREEZER_BASE_URL'] = settings.external_url
freezer = Freezer(app)
copy = copytext.Copy(settings.copy_sheet_location)
@freezer.register_generator
def candidate_page():
for sheetName in copy.sheetNames():
if sheetName == 'metadata' or sheetName == 'Attribution': continue
for row in copy[sheetName]:
yield {"candidate_id": (row['Candidate Name'].unescape() + row['Major'].unescape() + row['Year'].unescape()).replace(" ", "_").replace("/", "_")}
# yield '/candidates/' + (row['Candidate Name'].unescape() + row['Major'].unescape() + row['Year'].unescape()).replace(" ", "_")
freezer.freeze()
if __name__ == '__main__': generate() | [
"mileshutson@utexas.edu"
] | mileshutson@utexas.edu |
3c576dc8b9848f179717809fc14cf28926a954cf | 68ab3ac9edc686dfdbd57132c97f5d832984c803 | /faceinsight/io/pubdataloader.py | d2db19f12508f6edb8e49b462ad97d9346c8ffa1 | [] | no_license | sealhuang/FaceInsight | a838b361fce5da2707642af0b2a25f9cdbd6f1c7 | 62db5e521550c56707dcb6813cbd68481bd6a96b | refs/heads/master | 2023-08-02T12:48:20.917956 | 2021-10-09T06:41:39 | 2021-10-09T06:41:39 | 181,633,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,644 | py | # vi: set ft=python sts=4 ts=4 sw=4 et:
"""Dataset utils for loading public dataset."""
from __future__ import absolute_import
from __future__ import print_function
import os
import numpy as np
from PIL import Image
def get_lfw_val_pair(pair_file, img_dir):
"""Get LFW data for validation."""
pair_info = open(pair_file, 'r').readlines()
# pop the first line out
pair_info.pop(0)
pair_info = [line.strip().split('\t') for line in pair_info]
# data containers
val_imgs = []
val_labels = []
for line in pair_info:
# same pair
if len(line)==3:
img1 = os.path.join(img_dir, line[0],
'%s_%04d.png'%(line[0], int(line[1])))
img2 = os.path.join(img_dir, line[0],
'%s_%04d.png'%(line[0], int(line[2])))
if os.path.exists(img1) and os.path.exists(img2):
val_imgs.append(img1)
val_imgs.append(img2)
val_labels.append(1)
# different pair
elif len(line)==4:
img1 = os.path.join(img_dir, line[0],
'%s_%04d.png'%(line[0], int(line[1])))
img2 = os.path.join(img_dir, line[2],
'%s_%04d.png'%(line[2], int(line[3])))
if os.path.exists(img1) and os.path.exists(img2):
val_imgs.append(img1)
val_imgs.append(img2)
val_labels.append(0)
assert len(val_imgs)==len(val_labels)*2, 'Unmatch data pair'
print('%s pairs collected'%(len(val_labels)))
return val_imgs, np.array(val_labels)
| [
"huanglijie@outlook.com"
] | huanglijie@outlook.com |
a8e411a0029259b2ad6f383769c868249ccc8975 | bc4a22787f5c0ab51512eee550776bb71c32eb81 | /forloop.py | 6669e5356706f9c4588f28c7942e80759fa6087f | [] | no_license | passarovertical/python | 874d6928adf0db32bde49e9892085228770a4d50 | 3b3750a02b67abe60b6c8396489ec87d33c5bbdd | refs/heads/master | 2021-09-29T00:55:34.534659 | 2018-11-22T01:15:55 | 2018-11-22T01:15:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | nomes = ['Jim', 'Karen', 'Kevin']
len(nomes)
for name in range(len(nomes)):
print(name)
# Pode-se usar for loops para continuar um loop sobre todo
# coleção, como str, array, por exemplo. | [
"lucas.bsilva1@gmail.com"
] | lucas.bsilva1@gmail.com |
2aac061975d168ee79510acb3399664ff62f5e16 | ebd00c8cb67a3597e6caedb690dfac3e5599796d | /translations/italiano/0.4.3/vegastrike-data.spec | d8b29228d682bf738a707e0393f3c1bd34da595a | [] | no_license | DMJC/vsengine | aef694057df00e925cd5d54a1bd4d9e0026f31c5 | d7314c38bcf9d164b49477beffb86217dd654d34 | refs/heads/master | 2020-06-25T11:46:16.708539 | 2018-07-01T18:12:30 | 2018-07-01T18:12:30 | 74,533,112 | 2 | 0 | null | null | null | null | MacCentralEurope | Python | false | false | 2,939 | spec | Nome: vegastrike-data
Contenuto: Vegastrike - un simulatore spaziale 3d opensource(data files)
Versione: 0.4.1D
Release: 1
Copyright: GPL
Categoria: Amusements/Games
Sorgenti: vegastrike-data.tar.gz
URL: http://vegastrike.sourceforge.net
Creatore pacchetto: Krister Kjelltröm aka Starchild <k00_kjr@k.kth.se>
Cartella di compilazione: %{_tmppath}/data
Prefisso: /usr/local
Provides: vegastrike-data
Necessita di: vegastrike >= 0.4.1
%description
Vega Strike Celeste - Commercia, combatti ed esplora l'universo.
Vega Strike Ť un RPG di simulazione 3d accelerato OpenGL/GPL per Windows/Linux/MacOSX che permette ad un giocatore di commerciare e assaltare vascelli di altri commercianti, nello stile di Elite. Cominci con una nave da carico Llama, con infinite possibiltŗ di fronte a te e giusto i soldi per costruirti una vita. Il pericolo ti aspetta nello spazio di fronte a te..
Questo archivio contiene i file essenziali per giocare.
Contiene anche la versione aggiornata al 25 settembre 2003 del file factions.xml.
%prep
rm -rf $RPM_BUILD_ROOT
%setup -n data
%build
echo "Non Ť stato individuato nulla da compilare"
%install
echo "Installazione..."
mkdir -p $RPM_BUILD_ROOT/usr/local/games/vegastrike/data
mkdir -p $RPM_BUILD_ROOT/usr/local/bin/
mkdir -p $RPM_BUILD_ROOT/usr/local/man/man1/
cp vslauncher $RPM_BUILD_ROOT/usr/local/bin/
cp vsinstall $RPM_BUILD_ROOT/usr/local/bin/
cp documentation/vsinstall.1 $RPM_BUILD_ROOT/usr/local/man/man1/
cp documentation/vslauncher.1 $RPM_BUILD_ROOT/usr/local/man/man1/
cp -R . $RPM_BUILD_ROOT/usr/local/games/vegastrike/data
echo "questo pacchetto contiene la versione aggiornata al 25 settembre 2003 del file factions.xml"
%clean
rm -rf $RPM_BUILD_ROOT
%files
%doc /usr/local/man/man1/vslauncher.1
%doc /usr/local/man/man1/vsinstall.1
# Normal files
/usr/local/games/vegastrike/data
%attr(755, root, root) /usr/local/bin/vslauncher
%attr(755, root, root) /usr/local/bin/vsinstall
%changelog
* Sat Jan 03 2004 Daniel Aleksandrow <dandandaman@users.sourceforge.net>
- changed data dir to /usr/local/games/vegastrike/data
* Tue Sep 30 2003 Krister Kjellström <k00_kjr@k.kth.se>
- Updated the description and paths, etc for 0.4.1
- Replaced /tmp with {_tmppath}
- Added attr() in front of the binaries in files section,
- don't know if they do any good:)
- Added comments below
- Added echo message after install phase: 'This pakage...
################################################################
#
# Note:
#
# Before building, make sure vsinstall and vslauncher
# is in the appropriet place.
# Also make sure there is no music subdirectory present, unless,
# of course, you intend to include it:)
#
# Should be made with -bb and --target noarch, ie:
# rpmbuild -bb vegastrike-data.spec --target noarch
#
################################################################
| [
"james@James-Work"
] | james@James-Work |
54d67ca0b0275a672a8ac8402fe331de48c258e1 | f827fd7699ffa5b59ec8c472a63ee317d78ec9a5 | /gui/Panduit_GUI/Tab_Verify.py | ebcd3b368acfe9be5dd3e7dffce0005d2327e26b | [] | no_license | cissuppandi/alruba | a2e8106329ff29d220d8431ba84f88191266d28c | 2410d5e8f2328cf86d7f3c91304046b9a17ee12d | refs/heads/master | 2020-04-01T09:33:53.085099 | 2018-10-23T10:29:21 | 2018-10-23T10:29:21 | 153,080,363 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,237 | py | import Login
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.wait import WebDriverWait
import time
import sys
import datetime
import os
import getpass
import File_Creation
def tab_verification(ip):
#######GETTING THE TAB FROM HOME ICON######
#ip=raw_input("Enter the IP address of the PDU")
driver=Login.login(ip)
#=File_Creation.file_create()
print("#######GETTING THE TAB FROM HOME ICON######")
#.write("\n#######GETTING THE TAB FROM HOME ICON######")
tab_select=driver.find_elements_by_tag_name("li")
for i in range(0,3):
tab=tab_select[i]
name=tab.text
print("CLICKING THE "+" "+name+"TAB")
#.write("\n\nCLICKING THE "+" "+name+"TAB")
tab.click()
time.sleep(2)
tab_select[0].click()
time.sleep(2)
###########TAB FROM THE PDU TAB######
print("###########TAB FROM THE PDU TAB######")
#.write("\n\n\n###########TAB FROM THE PDU TAB######")
tab_select=driver.find_elements_by_tag_name("li")
for i in range(3,len(tab_select)):
tab=tab_select[i]
name=tab.text
print("CLICKING THE "+" "+name+"TAB")
#.write("\n\nCLICKING THE "+" "+name+"TAB")
time.sleep(2)
tab.click()
time.sleep(2)
tab_select=driver.find_elements_by_tag_name("li")
for i in range(5,len(tab_select)):
time.sleep(2)
tab=tab_select[i]
name=tab.text
print("CLICKING THE "+" "+name+" "+"TAB")
#.write("\nCLICKING THE "+" "+name+" "+"TAB")
tab.click()
tab_select[0].click()
home=driver.find_elements_by_tag_name("svg")
home[0].click()
dash=driver.find_elements_by_css_selector("a.grommetux-anchor")
print("GETIING THE MENU NAME FROM HOME ")
#.write("\nGETIING THE MENU NAME FROM HOME ")
for i in range(0,3):
a=[]
time.sleep(1)
a=dash[i].get_attribute("href").split("/")
#.write(a[4])
print(a[4])
print("checking all the menu items of home")
#.write("\nchecking all the menu items of home")
menu=['DASHBOARD','IDENTIFICATION','CONTROL&MANAGE']
for i in range(0,3):
dash=driver.find_elements_by_css_selector("a.grommetux-anchor")
print("Checking the tab"+" "+"**"+menu[i]+"**")
#.write("\nChecking the tab"+" "+"**"+menu[i]+"**")
a=dash[i]
a.click()
time.sleep(6)
home[0].click()
#.close()
return driver
| [
"44084946+cissuppandi@users.noreply.github.com"
] | 44084946+cissuppandi@users.noreply.github.com |
f062f548ac52051b7be211f4aa05d7693fe355a0 | 7b0e2d6061e267fd2d9d8bec0bc76b9571684265 | /programmers_kdt_II_1-week5-/programmers_kdt_II_1-week5-/Monthly_proj/Monthly_Proj1/show/show/settings.py | 05b7384c8bcace74523c6cc6d25738ee2e86d90e | [] | no_license | wkdclrms123/KDT | c214d04ee7fd49635f36c2c45923f00aba2d2a97 | 00ca956a36c13b6a0d69a7966714d8100ced305b | refs/heads/main | 2023-05-07T03:05:09.365192 | 2021-05-31T06:40:17 | 2021-05-31T06:40:17 | 372,407,249 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,435 | py | """
Django settings for show project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-^)w5m1ftp$=80hws9byt4!!mx&=s^yi-v#o7bp418)wm@y%vq9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
".ap-northeast-2.compute.amazonaws.com",
"15.165.183.212"
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'show.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'template'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'show.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'ko-kr'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"wkdclrms123@naver.com"
] | wkdclrms123@naver.com |
51fe943b78b5a69eb3896de554aac6b22b32623a | 2e6b15509a4487241f5734346e8ac9173c958c99 | /apps/bibliocratie/views.py | cde73421a81a77498301244443cd413a3edf799f | [] | no_license | Bibliocratie/Bibliocratie | 9dd47ab105eb7e0dfb2566b307ad8bfd66b1aad5 | b66347ced05dc7821e721fd3d05d619791e4d543 | refs/heads/master | 2020-06-05T08:07:19.298421 | 2015-07-27T16:03:34 | 2015-07-27T16:03:34 | 39,377,060 | 9 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119,820 | py | # -*- coding: utf-8 -*-
import json
from django.utils.translation import ugettext_lazy as _
from django.utils.datastructures import MultiValueDictKeyError
from django.http import Http404
from django.contrib.auth import login as auth_login, logout as auth_logout
from django.contrib.auth.views import redirect_to_login
from django.utils.decorators import method_decorator
from django.contrib.admin.views.decorators import staff_member_required
from django.http import HttpResponse, HttpResponseRedirect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic import DetailView
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView, View
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
import dateutil.parser
import calendar
from decimal import *
from djangular.views.mixins import JSONResponseMixin, allow_remote_invocation
from djangular.views.crud import NgCRUDView
from rest_framework import viewsets
from rest_framework import filters
import django_filters
import watson
from bibliocratie.forms import *
from bibliocratie.serializers import *
from bibliocratie.receiver import *
logger = logging.getLogger(__name__)
REDIRECT_FIELD_NAME = 'next'
class HomeView(FormView):
template_name = 'bibliocratie/vitrine.html'
form_class = BibliocratieAuthenticationForm
success_url = reverse_lazy('home')
def get(self, request, *args, **kwargs):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('profil_detail',kwargs={'slug':request.user.slug}))
return super(HomeView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(HomeView, self).get_context_data(**kwargs)
try:
next = self.request.GET['next']
except:
next = None
context.update(
next = next,
today = timezone.now(),
lancement_form = LancementForm(),
)
return context
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
return super(HomeView, self).post(request, **kwargs)
def ajax(self, request):
if request.FILES.has_key('avatar'):
request.FILES.keys().index('avatar')
user_form = BiblioUserFileForm(request.POST, request.FILES, instance=request.user)
if user_form.is_valid():
obj = user_form.save()
response_data = {'errors': None, 'success_url': None}
return HttpResponse(json.dumps(response_data), content_type="application/json")
data = json.loads(request.body)
if data['action']=='login':
form = BibliocratieAuthenticationForm(data=data)
elif data['action']=='signup':
form = BibliocratieSignupForm(data=data)
elif data['action']=='recover':
form = BibliocratieRecoverForm(data=data)
elif data['action']=='biolieu':
form = BiblioUserPrefForm(data=data, instance=request.user)
elif data['action']=='adresse':
form = AdresseForm(data=data, instance=request.user.adresse)
if form.is_valid():
if data['action']=='biolieu' or data['action']=='adresse':
form.save()
elif data['action']=='signup' and data.has_key('need_more_info') and data['need_more_info']==True:
user = form.get_user()
user.need_more_info=True
user.save()
if data['action'] in ['login','signup']:
panier=Commande.objects.getUserPanier(request)
auth_login(self.request, form.get_user())
panier_apres=Commande.objects.getUserPanier(request)
if panier.pk!=None:
panier_apres.save()
panier_apres.copy(panier)
panier.delete()
if self.request.session.test_cookie_worked():
self.request.session.delete_test_cookie()
next_page = data.get('next')
if not next_page:
try:
next_page = reverse('profil_detail', args=[request.user.slug])
except:
next_page =None
# try:
# next_page = parse_qs(urlnextparam).values()[0][0]
# except:
# try:
# next_page = reverse('profil_detail', args=[request.user.slug])
# except:
# next_page = None
# next_page = request.META.get('HTTP_REFERER')
# next_page=settings.LOGIN_REDIRECT_URL
# if (REDIRECT_FIELD_NAME in request.POST or
# REDIRECT_FIELD_NAME in request.GET):
# next_page = request.POST.get(REDIRECT_FIELD_NAME,
# request.GET.get(REDIRECT_FIELD_NAME))
# Security check -- don't allow redirection to a different host.
# if not is_safe_url(url=next_page, host=request.get_host()):
# next_page = request.path
# response_data = {'errors': form.errors, 'success_url': force_text(next_page)}
response_data = {'errors': form.errors, 'success_url': next_page}
return HttpResponse(json.dumps(response_data), content_type="application/json")
@method_decorator(sensitive_post_parameters('password'))
def dispatch(self, request, *args, **kwargs):
request.session.set_test_cookie()
return super(HomeView, self).dispatch(request, *args, **kwargs)
class LoginView(HomeView):
template_name = 'registration/login.html'
class SigninView(HomeView):
def get_template_names(self):
return ['registration/signin.html']
class LogoutView(View):
def get(self, request, *args, **kwargs):
auth_logout(request)
return HttpResponseRedirect(settings.LOGOUT_REDIRECT_URL)
class ContactView(FormView):
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
return super(HomeView, self).post(request, **kwargs)
def ajax(self, request):
try:
data = json.loads(request.body)
except:
data={}
form = ContactForm(data=data)
if form.is_valid():
subject = _("Nouveau message d'un utilisateur")
to = ['contact@example.com']
ctx={
'email': form.cleaned_data['mail'],
'prenom' : form.cleaned_data['prenom'],
'nom' : form.cleaned_data['nom'],
'telephone': form.cleaned_data['telephone'],
'message' : form.cleaned_data['message']
}
message = get_template('mails/contact.html').render(Context(ctx))
msg = EmailMessage(subject, message, to=to)
msg.content_subtype = 'html'
msg.send()
response_data = {'errors': form.errors}
return HttpResponse(json.dumps(response_data), content_type="application/json")
class ProfilView(DetailView):
template_name = 'bibliocratie/profil.html'
model = BiblioUser
def get_context_data(self, **kwargs):
context = super(ProfilView, self).get_context_data(**kwargs)
user = self.get_object()
context.update(
user_form=BiblioUserForm(instance=user),
adresse_form_fact=AdresseForm(auto_id=u'id1_%s', form_name='facturation_form',scope_prefix="facturation_data",instance=user.adresse),
preference_form = PreferenceForm(instance=user.userpreference),
)
return context
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
user_form = None
facturation_form = None
livraison_form = None
preference_form = None
old_slug=self.get_object().slug
if request.FILES.has_key('avatar'):
request.FILES.keys().index('avatar')
user_form = BiblioUserFileForm(request.POST, request.FILES, instance=self.get_object())
if user_form.is_valid():
obj = user_form.save()
return HttpResponse(json.dumps({}), content_type="application/json")
else:
data=json.loads(request.body)
user = self.get_object()
if data.has_key('preference_data'):
preference_form = PreferenceForm(data=data["preference_data"],instance=user.userpreference)
if preference_form.is_valid():
obj = preference_form.save()
if data.has_key('facturation_data'):
facturation_form = AdresseForm(data=data["facturation_data"],instance=user.adresse)
if facturation_form.is_valid():
obj = facturation_form.save()
if data.has_key('biblio_user_data'):
user_form = BiblioUserForm(data=data["biblio_user_data"],instance=user)
if user_form.is_valid():
obj = user_form.save()
response_data = {
'biblio_user_errors':user_form.errors if user_form else None,
'facturation_errors':facturation_form.errors if facturation_form else None,
'preference_errors':preference_form.errors if preference_form else None,
'refresh':old_slug!=user.slug,
'new_url':reverse('profil_detail',kwargs={'slug' : user.slug})
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
class MembresView(TemplateView):
template_name = 'bibliocratie/membres.html'
class PlayView(TemplateView):
template_name = 'bibliocratie/play.html'
class AideView(TemplateView):
template_name = 'bibliocratie/aide.html'
class PourquoiBibliocratieView(TemplateView):
template_name = 'bibliocratie/pourquoi_bibliocratie.html'
class ModeEmploiView(TemplateView):
template_name = 'bibliocratie/mode_emploi.html'
class ConfidentialiteView(TemplateView):
template_name = 'bibliocratie/confidentialite.html'
class SecuriteView(TemplateView):
template_name = 'bibliocratie/securite.html'
class CGUView(TemplateView):
template_name = 'bibliocratie/cgu.html'
class LancementView(TemplateView):
template_name = 'bibliocratie/lancement.html'
model = Livre
def get(self, request, *args, **kwargs):
return super(LancementView, self).get(request, *args, **kwargs)
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
data=json.loads(request.body)
form = LancementForm(data=data)
if form.is_valid():
obj = form.save()
response_data = {
'errors':form.errors,
'success_url': reverse('lancement_debut', args=[obj.slug]) if form.is_valid() else None,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(LancementView, self).get_context_data(**kwargs)
context.update(
form=LancementForm(),
)
return context
class LancementDebutView(DetailView):
template_name = 'bibliocratie/lancement_debut.html'
model = Livre
def get(self, request, *args, **kwargs):
livre = self.get_object()
# if not request.user.is_authenticated():
# return redirect_to_login(next=reverse('lancement_debut', args=[self.get_object().slug]))
if livre.auteurs.all().count()==0:
livre.auteurs.add(self.request.user)
livre.save()
#le livre a cette etape n'est consultable que par le staff, et les auteurs et le owner
if request.user in livre.auteurs.all():
return super(LancementDebutView, self).get(request, *args, **kwargs)
else:
raise Http404
def post(self, request, **kwargs):
if not request.user.is_authenticated():
return redirect_to_login(next=reverse('lancement_debut', args=[self.get_object().slug]))
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
success_url = None
form_errors = {}
data=json.loads(request.body)
data['category']=data['categorie']['value']
data['genre']=data['genre']['value']
data['type_encre']=data['couleur']['value']
form = LancementDebutForm(data=data, instance=self.get_object())
if form.is_valid():
obj = form.save(commit=False)
for tag in obj.tags.all():
obj.tags.remove(tag)
for tag_name in data['tags']:
tag, created = Tag.objects.get_or_create(text = tag_name['text'].lower())
if obj.tags.filter(text=tag.text).count()==0:
obj.tags.add(tag)
errors = []
if obj.category=="" :
errors.append(force_text(_("La categorie n'a pas ete renseignee")))
if obj.genre=='':
errors.append(force_text(_("Le genre n'a pas ete renseigne")))
if obj.type_encre=='':
errors.append(force_text(_("Le type d'encre n'a pas ete renseigne")))
if obj.tags.count()==0:
errors.append(force_text(_("Aucun tag n'a ete renseigne")))
if len(errors):
form_errors = {'__all__': errors}
else:
next=data['next']
obj.lancement_debut_valide=True;
obj.lancement_interieur_valide=False;
obj.lancement_couverture_valide=False;
obj.lancement_prixdate_valide=False;
obj.lancement_fin_valide=False;
if not obj.maquette:
obj.format='CST'
else:
obj.format='NTS'
obj.save()
if next:
success_url=reverse('lancement_interne', args=[obj.slug])
else:
form_errors=form.errors
response_data = {
'errors':form_errors,
'success_url':success_url,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(LancementDebutView, self).get_context_data(**kwargs)
lancement_debut_form=LancementDebutForm(instance=self.get_object())
genre_list = []
categorie_list = []
couleur_list = []
for genre in lancement_debut_form.fields['genre'].choices:
genre_list.append({'value':genre[0],'display':genre[1].title()})
for categorie in lancement_debut_form.fields['category'].choices:
categorie_list.append({'value':categorie[0],'display':categorie[1].title()})
for couleur in lancement_debut_form.fields['type_encre'].choices:
couleur_list.append({'value':couleur[0],'display':couleur[1].title()})
object = self.get_object()
context.update(
lancement_debut_form=lancement_debut_form,
genre_list=json.dumps(SelectSerializer(genre_list,many=True).data),
categorie_list=json.dumps(SelectSerializer(categorie_list,many=True).data),
couleur_list=json.dumps(SelectSerializer(couleur_list,many=True).data),
categorie=json.dumps(SelectSerializer({'value':object.category,'display':object.get_category_display()}).data),
genre=json.dumps(SelectSerializer({'value':object.genre,'display':object.get_genre_display()}).data),
couleur=json.dumps(SelectSerializer({'value':object.type_encre,'display':object.get_type_encre_display()}).data),
maquette=object.maquette,
couverture=object.couverture,
pre_souscription=object.pre_souscription,
tags = json.dumps(TagSerializer(self.get_object().tags,many=True).data),
)
return context
class LancementInterneView(DetailView):
template_name = 'bibliocratie/lancement_interne.html'
model = Livre
def get(self, request, *args, **kwargs):
livre = self.get_object()
if not livre.lancement_debut_valide:
return HttpResponseRedirect(reverse('lancement_debut',kwargs={'slug':livre.slug}))
#le livre a cette etape n'est consultable que par les auteurs
if request.user in livre.auteurs.all():
return super(LancementInterneView, self).get(request, *args, **kwargs)
else:
raise Http404
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
error = False
success_url = None
form_errors = {}
if request.FILES.has_key('fichier_auteur'):
form = LancementFichiersForm(request.POST, request.FILES, instance=self.get_object())
if form.is_valid():
obj = form.save()
else:
data=json.loads(request.body)
form = LancementInterneForm(data=data, instance=self.get_object())
if form.is_valid():
obj = form.save(commit=False)
errors = form._errors.setdefault(forms.forms.NON_FIELD_ERRORS, forms.util.ErrorList())
if not hasattr(obj.fichier_auteur,'url'):
errors.append(force_text(_("le fichier auteur n'est pas present")))
error = True
if obj.type_encre=='COL':
if not obj.nb_pages_couleur:
form._errors['nb_pages_couleur'] = [force_text(_("Vous devez renseigner le nombre de pages en couleur"))]
error = True
if obj.nb_pages_nb==None:
form._errors['nb_pages_nb'] = [force_text(_("Vous devez renseigner le nombre de pages noir et blanc"))]
error = True
if obj.nb_pages_couleur and obj.nb_pages_nb:
obj.nb_pages=obj.nb_pages_couleur + obj.nb_pages_nb
else:
#Cas du noir et blanc
if not obj.maquette: #L'auteur fait sa maquette
if obj.nb_pages==None:
form._errors['nb_pages'] = [force_text(_("Vous devez renseigner le nombre de pages de votre livre"))]
error = True
else:
if obj.nb_pages<16:
form._errors['nb_pages'] = [force_text(_("Le nombre de pages de votre maquete ne peut etre inferieur a 16"))]
error = True
else: #Bibliocratie fait la maquette
if obj.nb_carac:
if obj.nb_carac<3291:
form._errors['nb_carac'] = [force_text(_("Le nombre de caracteres doit etre superieur a 3291"))]
error = True
else:
form._errors['nb_carac'] = [force_text(_("Vous devez renseigner le nombre de caracteres de votre livre"))]
error = True
if obj.nb_chapitres==None:
form._errors['nb_chapitres'] = [force_text(_("Vous devez renseigner le nombre de chapitres (0 si aucun)"))]
error = True
elif obj.nb_chapitres<0:
form._errors['nb_chapitres'] = [force_text(_("Votre nombre de chapitre est negatif, ce n'est pas normal"))]
error = True
#calcul du nombre de pages
if obj.format=='FM1':
obj.nb_pages = math.ceil(obj.nb_chapitres*0.9+obj.nb_carac/860)
obj.nb_pages = obj.nb_pages + obj.nb_pages % 2
elif obj.format=='FM2':
obj.nb_pages = math.ceil(obj.nb_chapitres*1.2+obj.nb_carac/1070)
obj.nb_pages = obj.nb_pages + obj.nb_pages % 2
elif obj.format=='FM3':
obj.nb_pages = math.ceil(obj.nb_chapitres*0.7+obj.nb_carac/1600)
obj.nb_pages = obj.nb_pages + obj.nb_pages % 2
if obj.format=='CST':
if obj.largeur_mm<100:
form._errors['largeur_mm'] = [force_text(_("La largeur de votre livre ne peut etre inferieure a 100"))]
error = True
if obj.hauteur_mm<100:
form._errors['hauteur_mm'] = [force_text(_("La hauteur de votre livre ne peut etre inferieure a 100"))]
error = True
if obj.format=='NTS':
form._errors['format'] = [force_text(_("Vous n'avez pas choisi de format"))]
error = True
if not error:
next=data['next']
obj.lancement_interieur_valide=True;
obj.lancement_couverture_valide=False;
obj.lancement_prixdate_valide=False;
obj.lancement_fin_valide=False;
obj.save()
if next:
success_url=reverse('lancement_couverture', args=[obj.slug])
else:
error=True
response_data = {
'errors':form.errors if error else {},
'success_url':success_url,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(LancementInterneView, self).get_context_data(**kwargs)
context.update(
form=LancementInterneForm(instance=self.get_object()),
formfichier=LancementFichiersForm(instance=self.get_object())
)
return context
class LancementCouvertureView(DetailView):
template_name = 'bibliocratie/lancement_couverture.html'
model = Livre
def get(self, request, *args, **kwargs):
livre = self.get_object()
if not livre.lancement_interieur_valide:
return HttpResponseRedirect(reverse('lancement_interne',kwargs={'slug':livre.slug}))
#le livre a cette etape n'est consultable que les auteurs
if request.user in livre.auteurs.all():
return super(LancementCouvertureView, self).get(request, *args, **kwargs)
else:
raise Http404
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
success_url = None
form_errors = {}
if request.FILES.has_key('image_couverture') or request.FILES.has_key('maquete_couverture'):
form = LancementFichiersForm(request.POST, request.FILES, instance=self.get_object())
if form.is_valid():
obj = form.save()
else:
data=json.loads(request.body)
form = LancementCouvertureForm(data=data, instance=self.get_object())
if form.is_valid():
obj = form.save(commit=False)
errors = []
if not hasattr(obj.image_couverture,'url'):
errors.append(force_text(_("le fichier image de la couverture n'est pas present")))
if not obj.couverture:
if not hasattr(obj.maquete_couverture,'url'):
errors.append(force_text(_("le fichier maquete de la couverture n'est pas present")))
if obj.couverture and obj.modele_couverture=='':
errors.append(force_text(_("Vous devez choisir un modele de couverture")))
if len(errors):
form_errors = {'__all__': errors}
else:
next=data['next']
obj.lancement_couverture_valide=True;
obj.lancement_prixdate_valide=False;
obj.lancement_fin_valide=False;
obj.save()
if next:
success_url=reverse('lancement_prixdate', args=[obj.slug])
else:
form_errors=form.errors
response_data = {
'errors':form_errors,
'success_url':success_url,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(LancementCouvertureView, self).get_context_data(**kwargs)
context.update(
form=LancementCouvertureForm(instance=self.get_object()),
formfichier=LancementFichiersForm(instance=self.get_object())
)
return context
class LancementPrixdateView(DetailView):
template_name = 'bibliocratie/lancement_prixdate.html'
model = Livre
def get(self, request, *args, **kwargs):
livre = self.get_object()
if not livre.lancement_couverture_valide:
return HttpResponseRedirect(reverse('lancement_couverture',kwargs={'slug':livre.slug}))
#le livre a cette etape n'est consultable que par le staff, et les auteurs et le owner
if request.user in livre.auteurs.all():
return super(LancementPrixdateView, self).get(request, *args, **kwargs)
else:
raise Http404
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
success_url = None
data=json.loads(request.body)
form = LancementPrixDateForm(data=data, instance=self.get_object())
if form.is_valid():
obj = form.save(commit=False)
errors = []
cout_production = obj.cout_production
if cout_production<obj.get_cout_production()['prix_exemplaire']:
errors.append(force_text(_("Le prix de production ne peut etre inferieur a ")+ str(cout_production)))
if cout_production==None:
obj.prix_vente=-1
if len(errors):
form_errors = {'__all__': errors}
else:
#les campagnes se terminent le soir!
form_errors=form.errors
next=data['next']
if obj.pre_souscription and obj.date_feedback:
obj.date_souscription=obj.date_feedback + relativedelta(weeks=+2)
if obj.pre_souscription:
obj.date_fin_presouscription= obj.date_souscription+relativedelta(weekday=MO(-1))
obj.lancement_prixdate_valide=True;
obj.lancement_fin_valide=False;
obj.save()
if next:
success_url=reverse('lancement_fin', args=[obj.slug])
else:
form_errors=form.errors
response_data = {
'errors':form_errors,
'success_url':success_url,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(LancementPrixdateView, self).get_context_data(**kwargs)
instance=self.get_object()
#la souscription se termine le soir, on affiche donc la date de la veille.
if instance.nb_jours_campagne:
instance.nb_jours_campagne-=1
form=LancementPrixDateForm(instance=instance)
context.update(
form=form,
)
return context
class LancementVousView(DetailView):
template_name = 'bibliocratie/lancement_vous.html'
model = Livre
def get(self, request, *args, **kwargs):
livre = self.get_object()
if not livre.lancement_prixdate_valide:
return HttpResponseRedirect(reverse('lancement_prixdate',kwargs={'slug':livre.slug}))
#le livre a cette etape n'est consultable que par le staff, et les auteurs et le owner
if request.user in livre.auteurs.all():
return super(LancementVousView, self).get(request, *args, **kwargs)
else:
raise Http404
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
data=json.loads(request.body)
user_form = BiblioUserBiolieu(instance=self.request.user, data=data['biblio_user_data'])
adresse_form = AdresseForm(instance=self.request.user.adresse, data=data['adresse_data'])
if user_form.is_valid() and adresse_form.is_valid():
user = user_form.save()
adresse = adresse_form.save()
error = False
errors = []
if not user.biographie:
user_form._errors['biographie'] = [force_text(_("Vous devez renseigner votre biographie"))]
error = True
errors.append(force_text(_("Vous devez renseigner votre biographie")))
if not user.lieu:
user_form._errors['lieu'] = [force_text(_("Vous devez renseigner un lieu"))]
errors.append(force_text(_("Vous devez renseigner un lieu")))
error = True
if not user.avatar:
user_form._errors['avatar'] = [force_text(_("Vous devez uploader un avatar"))]
errors.append(force_text(_("Vous devez uploader un avatar")))
error = True
if not error:
obj = self.get_object()
obj.lancement_vous_valide=True
obj.biographie=user.biographie
obj.save()
else:
user_form.errors['__all__'] = errors
response_data = {
'user_form_errors' : user_form.errors,
'adresse_form_errors' : adresse_form.errors,
'success_url' : reverse('livre_detail', args=[self.get_object().slug]) if user_form.is_valid() else None,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(LancementVousView, self).get_context_data(**kwargs)
context.update(
user_form = BiblioUserBiolieu(instance=self.request.user),
adresse_form = AdresseForm(instance=self.request.user.adresse)
)
return context
class LancementFinView(DetailView):
template_name = 'bibliocratie/lancement_fin.html'
model = Livre
def get(self, request, *args, **kwargs):
livre = self.get_object()
if not livre.lancement_vous_valide:
return HttpResponseRedirect(reverse('lancement_vous',kwargs={'slug':livre.slug}))
#le livre a cette etape n'est consultable que par le staff, et les auteurs et le owner
if request.user in livre.auteurs.all():
return super(LancementFinView, self).get(request, *args, **kwargs)
else:
raise Http404
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
data=json.loads(request.body)
form = LancementFinForm(data=data, instance=self.get_object())
if form.is_valid():
obj = form.save(commit=False)
obj.lancement_fin_valide=True
obj.save()
response_data = {
'errors':form.errors,
'success_url':reverse('livre_detail', args=[obj.slug]) if form.is_valid() else None,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(LancementFinView, self).get_context_data(**kwargs)
context.update(
form=LancementFinForm(instance=self.get_object()),
)
return context
class NotificationsView(TemplateView):
template_name = 'bibliocratie/notifications_fullpage.html'
class LivreList(TemplateView):
template_name = 'bibliocratie/livre_list.html'
class PresouscriptionList(TemplateView):
template_name = 'bibliocratie/presouscription_list.html'
class LivreDetail(DetailView):
model = Livre
def is_editable(self):
# Détermine si le bouton edit est affiché
if self.object.phase in ['CREATION','VALIDATE','CREATRAN','GETMONEY']:
#En creation seuls les auteurs et le staff ont accès au livre
if (self.request.user in self.object.auteurs.all()):
return {
"general":True,
"type_titres":True,
"type_prix":True,
"type_couvertures":True,
"type_extraits":True,
"type_biographies":True
}
if self.object.phase in ['FEEDBACK']:
#En creation seuls les auteurs et le staff ont accès au livre
if (self.request.user in self.object.auteurs.all()):
return {
"general":True,
"type_titres":True if self.object.type_titres=='NEVER_OPENED' else False,
"type_prix":True if self.object.type_prix=='NEVER_OPENED' else False,
"type_couvertures":True if self.object.type_couvertures=='NEVER_OPENED' else False,
"type_extraits":True if self.object.type_extraits=='NEVER_OPENED' else False,
"type_biographies":True if self.object.type_biographies=='NEVER_OPENED' else False,
}
return {
"general":False,
"type_titres":False,
"type_prix":False,
"type_couvertures":False,
"type_extraits":False,
"type_biographies":False
}
def is_sondageable(self):
# Détermine si le bouton sondage edit est affiché
if self.object.phase in ['CREATION','CREATRAN']:
if (self.request.user in self.object.auteurs.all()):
return {
"type_titres":True,
"type_prix":True,
"type_couvertures":True,
"type_extraits":True,
"type_biographies":True
}
if self.object.phase == 'FEEDBACK':
if (self.request.user in self.object.auteurs.all()):
return {
"type_titres":True if self.object.type_titres=='NEVER_OPENED' else False,
"type_prix":True if self.object.type_prix=='NEVER_OPENED' else False,
"type_couvertures":True if self.object.type_couvertures=='NEVER_OPENED' else False,
"type_extraits":True if self.object.type_extraits=='NEVER_OPENED' else False,
"type_biographies":True if self.object.type_biographies=='NEVER_OPENED' else False,
}
return {
"general":False,
"type_titres":False,
"type_prix":False,
"type_couvertures":False,
"type_extraits":False,
"type_biographies":False
}
def is_proposable(self):
if self.object.phase in ['CREATION','FEEDBACK','CREATRAN']:
return True
return False
def is_presouscription_transform(self):
if self.object.phase=='CREA-FEE' and self.request.user in self.object.auteurs.all():
return True
else:
return False
def get_context_data(self, **kwargs):
context = super(LivreDetail, self).get_context_data(**kwargs)
self.object = self.get_object()
try:
user_rating = Rating.objects.get(livre = self.object, user=self.request.user).rating
except:
user_rating = 0
if self.object.phase=='CREA-FEE':
if self.request.user not in self.object.auteurs.all():
if self.object.type_titres=='OPEN':
self.object.type_titres='READ_ONLY'
if self.object.type_prix=='OPEN':
self.object.type_prix='READ_ONLY'
if self.object.type_extraits=='OPEN':
self.object.type_extraits='READ_ONLY'
if self.object.type_couvertures=='OPEN':
self.object.type_couvertures='READ_ONLY'
if self.object.type_biographies=='OPEN':
self.object.type_biographies='READ_ONLY'
if self.request.user.is_authenticated():
recommendation_livre =self.request.user.recommendation_livre(livre = self.object)
else:
user,created = BiblioUser.objects.get_or_create(email='anonyme@anonyme.com', username='anonyme', is_active=False)
recommendation_livre=user.recommendation_livre(livre = self.object)
is_buyer=self.request.user.is_authenticated() and (Livre.objects.filter(souscription__panier__client=self.request.user).filter(id=self.object.id).count()>0)
context.update(
image_form=ImagePropositionForm(),
number_form=NumberPropositionForm(),
text_form=TextPropositionForm(data={'valeur':""}),
char_form=CharPropositionForm(),
livre_form=LivreForm(instance=self.object),
commentaire_form=CommentaireForm(),
editable=self.is_editable(),
sondageable=self.is_sondageable(),
tags = json.dumps(TagSerializer(self.object.tags,many=True).data),
user_rating = user_rating,
presouscription_transform = self.is_presouscription_transform(),
recommendation_livre = recommendation_livre,
is_buyer = is_buyer > 0,
)
return context
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if not self.object.is_active:
raise Http404;
if self.object.phase in ['CREATION','FROZEN','VALIDATE']:
#En creation seuls les auteurs et le staff ont accès au livre
if request.user.is_anonymous():
return HttpResponseRedirect(reverse('signin')+'?next='+reverse('livre_detail', args=[self.object.slug]))
if request.user not in self.object.auteurs.all():
raise Http404("Le livre demande n'existe pas")
#Si le debut n'est pas valide il faut le valider
if not self.object.lancement_debut_valide:
return HttpResponseRedirect(reverse('lancement_debut',kwargs={'slug':self.object.slug}))
#Si la fin n'est pas valide il faut la valider
if not self.object.lancement_fin_valide:
return HttpResponseRedirect(reverse('lancement_fin',kwargs={'slug':self.object.slug}))
if self.object.phase in ['CREATION','FROZEN','VALIDATE','FEEDBACK','CREA-FEE']:
if self.object.pre_souscription:
self.template_name = 'bibliocratie/presouscription_detail.html'
else:
self.template_name = 'bibliocratie/livre_detail.html'
if self.object.phase in ['GETMONEY','CREATRAN','FROZ-FEE']:
self.template_name = 'bibliocratie/livre_detail.html'
elif self.object.phase=='SUCCES':
self.template_name = 'bibliocratie/livre_detail_succes.html'
elif self.object.phase=='ECHEC':
self.template_name = 'bibliocratie/livre_detail_echec.html'
elif self.object.phase=='CANCELLE':
raise Http404("Le livre demande n'existe pas")
logger.debug("fin de get LivreDetail : " + self.object.slug)
return super(LivreDetail,self).get(request, *args, **kwargs)
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def post(self, request, **kwargs):
if request.is_ajax():
self.object = self.get_object()
return self.ajax(request)
raise Http404;
def ajax(self, request):
if not request.user.is_authenticated():
response_data = {
'errors': {'__all__': [force_text(_("Vous devez etre authentifie pour soumettre des donnees"))]},
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
if self.is_editable()['type_extraits']:
if request.FILES.has_key('extrait1_img') or request.FILES.has_key('extrait2_img') or \
request.FILES.has_key('extrait3_img') or request.FILES.has_key('extrait4_img') or request.FILES.has_key('image_couverture'):
form = LivreFileForm(data=request.POST, files=request.FILES, instance=self.get_object())
if form.is_valid():
obj = form.save()
response_data = {}
return HttpResponse(json.dumps(response_data), content_type="application/json")
else:
response_data = {'error':form.errors}
return HttpResponse(json.dumps(response_data), content_type="application/json")
raise Http404
if request.POST.has_key('image_type'):
if self.is_proposable() or self.is_presouscription_transform():
#L'utilisateur a posté une proposition d'image
form = ImagePropositionForm(request.POST, request.FILES)
if form.is_valid():
obj = form.save(commit=False)
obj.auteur=request.user
obj.livre=self.get_object()
if request.POST['image_type']=='extrait':
obj.type='EXTRA'
else:
obj.type='COVER'
if self.is_presouscription_transform():
#quand la presouscription se transforme en souscription, les propositions de l'auteurs sont automatiquement choisies
obj.private=True
obj.choisir()
obj.save()
response_data = {}
return HttpResponse(json.dumps(response_data), content_type="application/json")
else:
response_data = {'error':form.errors}
return HttpResponse(json.dumps(response_data), content_type="application/json")
raise Http404
else:
response_data = {
'errors': {'__all__': [u"le livre n'est pas ouvert aux propositions. Veuillez enregistrer vos modifications."]},
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
data=json.loads(request.body)
if data.has_key("commentaire"):
form = CommentaireForm(data=data['commentaire'])
if form.is_valid():
obj = form.save(commit=False)
obj.user = request.user
if self.object.phase=="SUCCES":
obj.avis_lecture = True
else:
obj.avis_lecture = False
try:
obj.reponse_a=Commentaire.objects.get(id=data['reply_to'])
except:
obj.reponse_a=None
obj.livre=self.get_object()
obj.save()
response_data = {
'livre': LivreApiSerializer(self.get_object(), context={'request': self.request}).data,
'errors': form.errors,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
if data.has_key("type_proposition"):
error = None
if data['type_proposition']=='TITRE':
if self.object.type_titres=="OPEN" or request.user in self.object.auteurs.all():
form = CharPropositionForm(data=data['proposition'])
else:
error = _("le livre n'est pas ouvert aux propositions TITRE.")
if data['type_proposition']=='PHRASE':
if self.object.type_phrases=="OPEN" or request.user in self.object.auteurs.all():
form = CharPropositionForm(data=data['proposition'])
else:
error = _("le livre n'est pas ouvert aux propositions PHRASE.")
if data['type_proposition']=='EXTRA':
if self.object.type_extraits=="OPEN" or request.user in self.object.auteurs.all():
form = TextPropositionForm(data=data['proposition'])
else:
error = _("le livre n'est pas ouvert aux propositions EXTRA.")
if data['type_proposition']=='BIO':
if self.object.type_biographies=="OPEN" or request.user in self.object.auteurs.all():
form = TextPropositionForm(data=data['proposition'])
else:
error = _("le livre n'est pas ouvert aux propositions BIO.")
if data['type_proposition']=='PRIX':
if self.object.type_prix=="OPEN" or request.user in self.object.auteurs.all():
form = NumberPropositionForm(data=data['proposition'])
else:
error = _("le livre n'est pas ouvert aux propositions PRIX.")
if not error:
if form.is_valid():
obj = form.save(commit=False)
if obj.get_type()=='NUMBER':
livre = self.get_object()
if obj.valeur<livre.get_cout_production()['prix_exemplaire']:
response_data = {
'errors': {'__all__': [force_text('Le prix ne peut etre inferieur au cout de production')]},
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
obj.auteur = request.user
obj.livre=self.get_object()
try:
obj.type=data['type_proposition']
except:
pass
if self.is_presouscription_transform():
#quand la presouscription se transforme en souscription, les propositions de l'auteurs sont automatiquement choisies
obj.private=True
obj.save()
obj.choisir()
else:
obj.save()
presouscription_transform = (self.object.phase == 'CREA-FEE') and (self.request.user in self.object.auteurs.all())
sondages_data = SondageApiSerializer(self.object, context={'request': self.request,'presouscription_transform':presouscription_transform}).data
response_data = {
'sondages' : sondages_data,
'errors': form.errors,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
else:
response_data = {
'errors': {'__all__': [force_text(error)]},
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
if self.is_editable()['general'] and data.has_key("livre"):
success_url=""
form = LivreForm(data=data['livre'],instance=self.get_object())
if form.is_valid():
obj = form.save()
if data['validation']:
#une demande de validation a ete faite sur le livre, on va donc faire des tests
error=""
error_count=0
if obj.resume=="":
error_count +=1
form._errors['resume'] = [force_text(_("Vous n'avez pas rempli de resume"))]
if obj.biographie=="":
error_count +=1
form._errors['biographie'] = [force_text(_("Vous n'avez pas rempli de biographie"))]
if obj.titre=="":
error_count +=1
form._errors['titre'] = [force_text(_("Vous n'avez pas rempli de titre"))]
if obj.type_extraits=="NEVER_OPENED":
if obj.extrait1_type=="T":
if len(obj.extrait1_txt)==0:
error_count +=1
form._errors['extrait1_txt'] = [force_text(_("Vous n'avez pas rempli l'extrait 1 texte"))]
else:
if not obj.extrait1_img:
error_count +=1
form._errors['extrait1_img'] = [force_text(_("Vous n'avez pas rempli l'extrait 1 image"))]
if obj.extrait2_type=="T":
if len(obj.extrait2_txt)==0:
error_count +=1
form._errors['extrait2_txt'] = [force_text(_("Vous n'avez pas rempli l'extrait 2 texte"))]
else:
if not obj.extrait2_img:
error_count +=1
form._errors['extrait2_img'] = [force_text(_("Vous n'avez pas rempli l'extrait 2 image"))]
if obj.extrait3_type=="T":
if len(obj.extrait3_txt)==0:
error_count +=1
form._errors['extrait3_txt'] = [force_text(_("Vous n'avez pas rempli l'extrait 3 texte"))]
else:
if not obj.extrait3_img:
error_count +=1
form._errors['extrait3_img'] = [force_text(_("Vous n'avez pas rempli l'extrait 3 image"))]
if obj.extrait4_type=="T":
if len(obj.extrait4_txt)==0:
error_count +=1
form._errors['extrait4_txt'] = [force_text(_("Vous n'avez pas rempli l'extrait 4 texte"))]
else:
if not obj.extrait4_img:
error_count +=1
form._errors['extrait4_img'] = [force_text(_("Vous n'avez pas rempli l'extrait 4 image"))]
else:
if len(obj.instructions_extraits)==0:
form._errors['instructions_extraits'] = [force_text(_("Vous n'avez pas donne d'instruction concernant le vote sur les extraits"))]
error_count +=1
if obj.type_extraits=="READ_ONLY":
if (TextProposition.objects.filter(livre=obj,type='EXTRA').count() + ImageProposition.objects.filter(livre=obj,type='EXTRA').count())<4:
error += force_text(_("Vous avez ouvert aux votes les extraits sans faire au moins quatre propositions"))
error_count +=1
if obj.type_titres=="READ_ONLY":
if CharProposition.objects.filter(livre=obj).count()<2:
error += force_text(_("Vous avez ouvert aux votes les titres sans faire au moins deux propositions"))
error_count +=1
if obj.type_prix=="READ_ONLY":
if NumberProposition.objects.filter(livre=obj).count()<2:
error += force_text(_("Vous avez ouvert aux votes les prix sans faire au moins deux propositions"))
error_count +=1
if obj.type_couvertures=="READ_ONLY":
if ImageProposition.objects.filter(livre=obj,type='COVER').count()<2:
error += force_text(_("Vous avez ouvert aux votes l'image de couverture sans faire au moins deux propositions"))
error_count +=1
if obj.type_biographies=="NEVER_OPENED":
if len(obj.biographie)==0:
form._errors['biographie'] = [force_text(_("Vous n'avez pas rempli de biographie"))]
error_count +=1
else:
if len(obj.instructions_biographie)==0:
form._errors['instructions_biographie'] = [force_text(_("Vous n'avez pas donne d'instruction concernant le vote sur votre biographie"))]
error_count +=1
if obj.type_biographies=="READ_ONLY":
if TextProposition.objects.filter(livre=obj,type='BIO').count()<2:
error += force_text(_("Vous avez ouvert aux votes la biographie sans faire au moins deux propositions"))
error_count +=1
if obj.pre_souscription:
if len(obj.instructions)==0:
form.errors['instructions']=[force_text(_("Vous n'avez pas donne d'instructions pour aider vos lecteurs"))]
error_count +=1
if obj.type_extraits=="NEVER_OPENED" and \
obj.type_titres=="NEVER_OPENED" and \
obj.type_prix=="NEVER_OPENED" and \
obj.type_couvertures=="NEVER_OPENED" and \
obj.type_biographies=="NEVER_OPENED":
error += force_text(_("Pour la presouscription, vous devez au moins ouvrir une rubrique aux sondages"))
error_count +=1
if obj.prix_vente==None:
error += force_text(_("Votre texte n'a pas de prix, mais votre livre doit en avoir un"))
error_count +=1
if error_count ==0:
if obj.phase =='CREATION':
obj.phase='FROZEN'
elif obj.phase == 'CREATRAN':
obj.phase='FROZ-FEE'
obj.save()
else:
form.errors['__all__'] = [error]
response_data = {
'errors': form.errors,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
success_url=reverse('livre_detail', args=[obj.slug])
response_data = {
'livre': LivreApiSerializer(self.get_object(), context={'request': self.request}).data,
'errors': form.errors,
'success_url':success_url if data['validation'] else None
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
else:
response_data = {
'errors': form.errors,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
if self.object.phase=='CREA-FEE':
if data['validation']:
try:
self.object.presouscription_transform()
response_data = {
'success_url':self.object.url(),
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
except Exception as e:
error = e.message
response_data = {
'errors': {'__all__': [force_text(error)]},
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
else:
response_data = {
'success_url':None,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
raise Http404
class PanierView(FormView):
template_name = 'bibliocratie/panier.html'
form_class = BibliocratieCouponForm
success_url = reverse_lazy('home')
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
return super(Commande, self).post(request, **kwargs)
def ajax(self, request):
form = self.form_class(data=json.loads(request.body))
panier = Commande.objects.getUserPanier(self.request)
try:
if form.is_valid():
panier.addDiscount(form.discount)
error = form.errors
except Exception as inst:
error = {'__all__': [inst.args[0]]}
response_data = {
'panier': PanierApiSerializer(panier).data,
'errors': error,
'success_url': force_text(self.success_url)
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
class CheckoutView(FormView):
template_name = 'bibliocratie/checkout.html'
form_class = AdresseForm
def get(self, request, *args, **kwargs):
panier = Commande.objects.getUserPanier(self.request)
if panier.existe():
return super(CheckoutView,self).get(request, *args, **kwargs)
else:
return HttpResponseRedirect(reverse('livre_list'))
@method_decorator(csrf_protect)
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
return super(CheckoutView, self).post(request, **kwargs)
def ajax(self, request):
data = json.loads(request.body)
proceed_with_payment = True
adresse_facturation_form = None
adresse_livraison_form = None
checkout_form = None
panier = Commande.objects.getUserPanier(self.request)
if data.has_key('checkout_data'):
checkout_form = CheckoutForm(data=data['checkout_data'])
if checkout_form.is_valid():
if checkout_form.cleaned_data['diff_address'] == True:
if data.has_key('livraison_data'):
adresse_livraison_form = AdresseForm(data=data['livraison_data'],instance=panier.adresse_livr)
if adresse_livraison_form.is_valid():
panier.livraison_autre_adresse = True
adresse_livr = adresse_livraison_form.save()
else:
proceed_with_payment = False
if data.has_key('facturation_data'):
adresse_facturation_form = AdresseForm(data=data['facturation_data'],instance=panier.adresse_fact)
if adresse_facturation_form.is_valid():
adresse_fact = adresse_facturation_form.save()
adresse_user = request.user.adresse
adresse_user.copy(adresse_fact)
adresse_user.save()
else:
proceed_with_payment = False
result = None
if proceed_with_payment and panier.total_sans_discount_ni_taxes!=0:
payline_wsdl_url = finders.find('payline/payline_v4.38.wsdl')
client = Client(url='file://' + payline_wsdl_url)
client.set_options(
location=settings.PAYLINE_URL,
username=settings.PAYLINE_MERCHANT_ID,
password=settings.PAYLINE_ACCESS_KEY)
payline_xml_url = finders.find('payline/payline_doWebPaymentRequest.xml')
xml_request = open(payline_xml_url, 'rb').read()
panier.save()
xml_request = xml_request \
.replace('REPLACEME_date', timezone.now().strftime('%d/%m/%Y %H:%M')) \
.replace('REPLACEME_amount', str(int(100 * panier.prix))) \
.replace('REPLACEME_command_ref', '%08d' % int(panier.no_commande)) \
.replace('REPLACEME_contract_number', settings.PAYLINE_CONTRACT_NUMBER) \
.replace('REPLACEME_server', get_current_site(self.request).domain) \
.replace('REPLACEME_lastname', panier.client.nom) \
.replace('REPLACEME_firstname', panier.client.prenom) \
.replace('REPLACEME_email', panier.client.email) \
.replace('REPLACEME_customer_id', unicode(panier.client.id))
result = client.service.doWebPayment(__inject={'msg': xml_request})
logger.debug("result doWebPayment : " + str(result))
if result.result.code == '00000':
panier.payline_token = result.token
panier.valider()
response_data = {'errors_livraison': adresse_livraison_form.errors if adresse_livraison_form else None,
'errors_facturation': adresse_facturation_form.errors if adresse_facturation_form else None,
'errors_checkout': checkout_form.errors if checkout_form else None,
'success_url': force_text(result.redirectURL) if result else None
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
def get_context_data(self, **kwargs):
context = super(CheckoutView, self).get_context_data(**kwargs)
panier = Commande.objects.getUserPanier(self.request)
panier.adresse_fact.copy(self.request.user.adresse)
panier.save()
context.update(
adresse_facturation_form=AdresseForm(auto_id=u'id1_%s', form_name='facturation_form',scope_prefix="facturation_data", instance = panier.adresse_fact),
adresse_livraison_form=AdresseForm(auto_id=u'id2_%s', form_name='livraison_form',scope_prefix="livraison_data",instance = panier.adresse_livr),
checkout_form=CheckoutForm(data={'diff_address': panier.livraison_autre_adresse}),
)
return context
class RetourPaylineView(TemplateView):
template_name = "bibliocratie/retour_payline.html"
# def get(self, request, *args, **kwargs):
# context = self.get_context_data(**kwargs)
# if context['commande'].etat=='REF':
# return HttpResponseRedirect(reverse('livre_list'))
# return self.render_to_response(context)
def get_context_data(self, **kwargs):
context = super(RetourPaylineView, self).get_context_data(**kwargs)
try:
payline_token = self.request.GET['token']
except MultiValueDictKeyError:
return {'status_retour': "erreur : pas de token payline"}
try:
panier = Commande.objects.get(payline_token=payline_token)
except ObjectDoesNotExist:
context.update(
status_retour = "erreur : pas de panier correspondant au token payline",
)
panier.UpdatePaylineStatus()
if panier.etat=='PAY':
context.update(
status_retour = "ok",
commande = panier,
)
elif panier.etat=='ARR':
panier.annuler()
context.update(
status_retour = "paiement arrété",
commande = panier,
)
elif panier.etat=='REF':
panier.refuser()
context.update(
status_retour = "paiement refusé",
commande = panier,
)
elif panier.etat=='PEN':
context.update(
status_retour = "paiement indécis",
commande = panier,
)
context['user_form'] = BiblioUserBiolieu(instance=self.request.user)
return context
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
raise Http404
def ajax(self,request):
user_form = None
if request.FILES.has_key('avatar'):
request.FILES.keys().index('avatar')
user_form = BiblioUserFileForm(request.POST, request.FILES, instance=self.request.user)
if user_form.is_valid():
obj = user_form.save()
return HttpResponse(json.dumps({}), content_type="application/json")
else:
data=json.loads(request.body)
user = self.request.user
user_form = BiblioUserBiolieu(data=data,instance=user)
if user_form.is_valid():
obj = user_form.save(commit=False)
obj.need_more_info=False
obj.save()
response_data = {
'errors' : user_form.errors,
'successurl' : reverse('profil_detail',kwargs={'slug':request.user.slug})
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
class PasswordResetView(TemplateView):
template_name = "mail/password_reset.html"
def get_context_data(self, **kwargs):
pk = self.kwargs.get('pk', None)
user = BiblioUser.objects.get(pk=pk, is_active=True)
current_site = get_current_site(self.request)
site_name = current_site.name
domain = current_site.domain
context={
'email': user.email,
'domain': domain,
'site_name': site_name,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'user': user,
'protocol': 'http',
'token': default_token_generator.make_token(user),
},
return context[0]
class NotifPaylineView(View):
def get(self,request):
#http://URL_DE_NOTIFICATION?notificationType=webtrs&token=TOKEN_LORS_DU_DOWEBPAYMENT
notificationType = request.GET.get('notificationType')
payline_token = request.GET.get('token')
print "notificationtype" + notificationType
print "payline token" + payline_token
if notificationType=='WEBTRS':
try:
panier = Commande.objects.get(payline_token=payline_token)
except ObjectDoesNotExist:
print "Le payline_token " + unicode(payline_token) + "n'existe pas"
return HttpResponse('pas ok')
panier.UpdatePaylineStatus()
return HttpResponse('ok')
return HttpResponse('pas ok')
class StaffView(TemplateView):
template_name = "bibliocratie/staff.html"
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
return super(StaffView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
presouscriptions = Livre.objects.filter(phase='FEEDBACK', is_active=True)
nb_votes = Vote.objects.filter(proposition__livre__phase='FEEDBACK').count()
nb_propositions = Proposition.objects.filter(livre__phase='FEEDBACK').count()
nb_commentaires = Commentaire.objects.filter(livre__phase__in=['FEEDBACK','GETMONEY','SUCCES','ECHEC']).count()
nb_succes = Livre.objects.filter(phase='SUCCES', is_active=True).count()
nb_echecs = Livre.objects.filter(phase='ECHEC', is_active=True).count()
nb_finished = nb_succes+nb_echecs
context={
'nb_users': BiblioUser.objects.count(),
'nb_commentaires': nb_commentaires,
'nb_votes': nb_votes,
'nb_propositions':nb_propositions,
'nb_souscriptions':Livre.objects.filter(phase='GETMONEY', is_active=True).count(),
'nb_presouscriptions': presouscriptions.count(),
'nb_crea_souscriptions': Livre.objects.filter(phase='CREATION',pre_souscription=False, is_active=True).count(),
'nb_crea_presouscriptions': Livre.objects.filter(phase='CREATION',pre_souscription=True, is_active=True).count(),
'nb_frozen_souscriptions': Livre.objects.filter(phase='FROZEN',pre_souscription=False, is_active=True).count(),
'nb_frozen_presouscriptions': Livre.objects.filter(phase='FROZEN',pre_souscription=True, is_active=True).count(),
'nb_valid_souscriptions': Livre.objects.filter(phase='VALIDATE',pre_souscription=False, is_active=True).count(),
'nb_valid_presouscriptions': Livre.objects.filter(phase='VALIDATE',pre_souscription=True, is_active=True).count(),
'nb_succes': nb_succes,
'nb_echecs': nb_echecs,
'pc_success': unicode(Decimal(float(nb_succes)*100/float(nb_finished)).quantize(Decimal('.01'), rounding=ROUND_HALF_UP)) if nb_finished else None,
'pc_echecs': unicode(Decimal(float(nb_echecs)*100/float(nb_finished)).quantize(Decimal('.01'), rounding=ROUND_HALF_UP)) if nb_finished else None,
'user_form' : BiblioUserEmailForm(),
'adresse_cli_form' : AdresseForm(auto_id=u'id1_%s', form_name='adresse_cli_form',scope_prefix="adresse_cli_data"),
'adresse_fact_form' : AdresseForm(auto_id=u'id2_%s', form_name='facturation_form',scope_prefix="facturation_data"),
'adresse_livr_form' : AdresseForm(auto_id=u'id3_%s', form_name='livraison_form',scope_prefix="livraison_data"),
'diff_form' : CheckoutForm(),
},
return context[0]
def post(self, request, **kwargs):
if request.is_ajax():
return self.ajax(request)
return super(StaffView, self).post(request, **kwargs)
def ajax(self, request):
data = json.loads(request.body)
if data.has_key('client') and data.has_key('adresse'):
client_form = BiblioUserEmailForm(data=data['client'])
adresse_form = AdresseForm(data=data['adresse'])
if adresse_form.is_valid() and client_form.is_valid():
adresse = adresse_form.save()
client = client_form.save(commit=False)
client.adresse = adresse
client.save()
response_data = {
'client': client_form.errors,
'adresse': adresse_form.errors
}
elif data.has_key('commande') and data.has_key('adresse_fact') and data.has_key('adresse_livr'):
dif = False
if data.has_key('diff'):
dif = data['diff']['diff_address']
try:
client = BiblioUser.objects.get(id=data['commande']['client']['id'], is_active=True)
except:
response_data = {
'error_msg' : u"Le client n'a pas été reconnu",
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
souscriptions = data['commande']['souscriptions']
if not len(souscriptions):
response_data = {
'error_msg' : u"Votre commande ne contient aucune souscription",
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
if not data['commande'].has_key('info'):
response_data = {
'error_msg' : u"Veuillez renseigner le champ commentaire/no cheque",
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
adresse_fact_form = AdresseForm(data=data['adresse_fact'])
if adresse_fact_form.is_valid():
ok=True
else:
ok=False
adresse_livr_form = None
if dif:
adresse_livr_form = AdresseForm(data=data['adresse_livr'])
if adresse_livr_form.is_valid():
ok=True
else:
ok=False
if ok:
commande = Commande(client=client,etat='PAY', infos=data['commande']['info'])
commande.save()
adresse_fact=commande.adresse_fact
adresse_fact.copy(adresse_fact_form.save(commit=False))
adresse_fact.save()
adresse_cli=client.adresse
adresse_cli.copy(adresse_fact)
adresse_cli.save()
adresse_livr=commande.adresse_livr
if dif:
adresse_livr.copy(adresse_livr_form.save())
adresse_livr.save()
commande.livraison_autre_adresse=True
commande.save()
for achat in souscriptions:
livre = Livre.objects.get(id=achat['id'], is_active=True)
souscription = Souscription(livre=livre, etat='ENC',quantite=achat['quantite'],panier=commande)
souscription.save()
response_data = {
'facturation': adresse_fact_form.errors,
'livraison': adresse_livr_form.errors if adresse_livr_form else None,
'commande' : CommandeSerializer(commande, context={'request': self.request}).data if ok else None,
}
return HttpResponse(json.dumps(response_data), content_type="application/json")
class LancementJsonView(JSONResponseMixin, View):
@allow_remote_invocation
def GetDatesDebut(self, in_data):
livre_id = in_data['livre_id']
livre = Livre.objects.get(id=livre_id, is_active=True)
TODAY = date.today()
dates_possibles = []
if (TODAY.isoweekday() in [1,2,3]):
date_possible = TODAY+relativedelta(weekday=WE(+2))
else:
date_possible = TODAY+relativedelta(weekday=WE(+1))
no_semaine = 1
MAX_SEMAINES = 8
while no_semaine<=MAX_SEMAINES :
dates_possibles.append({'title':str(no_semaine),'start':date_possible,'id':str(no_semaine),
'tooltip':"Choix possible",
'tooltipPlacement':"left",
'tooltipNotSelected':"Choix possible",
'titre':"Pre-souscript." if livre.pre_souscription else "Souscription",
'tooltipSelected':"Debut de la pre-souscription" if livre.pre_souscription else "Debut de la souscription"})
date_possible = date_possible + relativedelta(weeks=+1)
no_semaine += 1
event_souscription=None
if livre.pre_souscription:
paris = pytz.timezone('Europe/Paris')
event_souscription = {'title':"Souscription",
'start':livre.date_souscription.astimezone(paris).date().isoformat() if livre.date_souscription else None,
'id':"1",
'tooltip':"Date de souscription",
'tooltipPlacement':"left",
'tooltipNotSelected':"Choix possible",
'tooltipSelected':"Debut de la souscription",
'titre':"Souscription",
}
return {'dates_possibles' : dates_possibles,
'pre_souscription' : livre.pre_souscription,
'event_souscription': event_souscription}
@allow_remote_invocation
def GetDatesFin(self, in_data):
date_debut = in_data['date_debut']
livre_id = in_data['livre_id']
livre = Livre.objects.get(id=livre_id, is_active=True)
date_debut = dateutil.parser.parse(date_debut)
dates_fin_souscription = []
if livre.pre_souscription:
date_possible = date_debut+relativedelta(weekday=SA(+1),weeks=+3)
date_souscription = date_debut + relativedelta(weekday=WE(+3))
else:
date_possible = date_debut+relativedelta(weekday=SA(+1),weeks=+1)
date_souscription = date_debut
if livre.nb_jours_campagne:
date_fin = date_souscription + relativedelta(days=livre.nb_jours_campagne)
else:
date_fin=None
no_semaine = 1
MAX_SEMAINES = 8
while no_semaine<=MAX_SEMAINES :
dates_fin_souscription.append({'title': "Fin de souscription" if date_fin==date_possible else str(no_semaine),
'start':date_possible.date().isoformat(),
'id':str(no_semaine),
'className' : ['date-choisie'] if date_fin==date_possible else [],
'tooltip':"Fin de souscription" if date_fin==date_possible else "Choix possible",
'tooltipPlacement':"left",
'tooltipNotSelected':"Choix possible",
'titre':"Fin souscript.",
'tooltipSelected':"Fin de la souscription"})
date_possible = date_possible + relativedelta(weeks=+1)
no_semaine += 1
return {'dates_fin_souscription' : dates_fin_souscription,
'date_souscription': {'titre':'Souscription',
'start':date_souscription.date().isoformat(),'id':str(no_semaine),
'tooltip':"Souscription",
'tooltipPlacement':"left",
'tooltipNotSelected':"Choix possible",
'tooltipSelected':"Début de la souscription"
},
'pre_souscription' : livre.pre_souscription, #true or false
'date_fin':date_fin.date().isoformat() if date_fin else None,
}
@allow_remote_invocation
def GetCoutProduction(self, in_data):
if in_data.has_key('livre_id') and in_data.has_key('nb_exemplaires_cible'):
livre=Livre.objects.get(id=in_data['livre_id'], is_active=True)
livre.nb_exemplaires_cible=in_data['nb_exemplaires_cible']
return livre.get_cout_production()
else:
return {
'message' : None,
'prix_exemplaire' : None,
}
@allow_remote_invocation
def RefreshData(self, in_data):
livre_id=in_data['livre_id']
livre = Livre.objects.get(pk=livre_id, is_active=True)
out_data = {
'url_fichier': livre.fichier_auteur.url if hasattr(livre.fichier_auteur, 'url') else "",
'nom_fichier': livre.fichier_auteur.name,
'image_couverture': livre.image_300x400_url(),
'maquette_couverture': livre.maquete_couverture.url if hasattr(livre.maquete_couverture, 'url') else "",
'maquette_couverture_fichier_name': livre.maquete_couverture.name,
'success': True,
}
return out_data
class GlobalSearchJsonView(JSONResponseMixin, View):
@allow_remote_invocation
def Search(self, in_data):
search_results = watson.search(in_data['search'])
meta_list=[]
for result in search_results:
if result.meta:
meta_list.append(result.meta)
return {'list':meta_list,'search':in_data['search']}
class PanierJsonView(JSONResponseMixin, View):
'''
est connecté au Controlleur PanierCtrl
'''
@allow_remote_invocation
def addLivre(self, in_data):
"""
si in_data['livre_id']==-1 renvoie sumplement le panier
"""
livre_id = in_data['livre_id']
panier = Commande.objects.getUserPanier(self.request)
message = ""
# Si livre_id = -1, il s'agit juste d'un refresh du panier
if livre_id != -1:
livre= Livre.objects.get(id=livre_id, is_active=True)
#si le livre n'est pas en souscription, pas d'achat possible
if livre.phase == 'GETMONEY':
panier.save()
quantite = in_data['quantite']
panier.addSouscription(in_data['livre_id'],quantite)
else:
message = _("Ajout au panier impossible : le livre n'est pas en souscription")
out_data = {
'panier': PanierApiSerializer(panier, context={'request': self.request}).data,
'success': True,
'message': force_text(message),
}
return out_data
@allow_remote_invocation
def removeLivre(self, in_data):
"""
si in_data['livre_id']==-1 renvoie sumplement le panier
"""
livre_id = in_data['livre_id']
panier = Commande.objects.getUserPanier(self.request)
panier.removeLivre(in_data['livre_id'])
out_data = {
'panier': PanierApiSerializer(panier,context={'request': self.request}).data,
'success': True,
}
return out_data
@allow_remote_invocation
def removeSouscriptions(self, in_data):
"""
Retrait de toutes les occurences d'un livre dans un panier
"""
souscription_id = in_data['souscription_id']
panier = Commande.objects.getUserPanier(self.request)
for souscription in panier.souscription_set.all():
if souscription.id == souscription_id:
souscription.delete()
out_data = {
'panier': PanierApiSerializer(panier, context={'request': self.request}).data,
'success': True,
}
return out_data
@allow_remote_invocation
def removeDiscount(self, in_data):
"""
Retrait de toutes les occurences d'une discount dans un panier
"""
discount_id = in_data['discount_id']
discount = Discount.objects.get(id=discount_id)
discount.delete()
panier = Commande.objects.getUserPanier(self.request)
out_data = {
'panier': PanierApiSerializer(panier, context={'request': self.request}).data,
'success': True,
}
return out_data
@allow_remote_invocation
def setPaysLivraison(self, in_data):
"""
Indique au panier le pays de livraison, pour recalculer les frais de port
"""
pays = in_data
panier = Commande.objects.getUserPanier(self.request)
panier.setPaysLivraison(pays)
out_data = {
'panier': PanierApiSerializer(panier, context={'request': self.request}).data,
'success': True,
}
return out_data
@allow_remote_invocation
def goToOrder(self, in_data):
"""
Vérifie si l'utilisateur est authentifié et renvoie l'adresse du checkout
"""
if self.request.user.is_authenticated():
out_data = {
'success_url': reverse('checkout'),
'is_authenticated': True,
'success': True,
}
else:
out_data = {
'success_url': reverse('checkout'),
'is_authenticated': False,
'success': True,
}
return out_data
@allow_remote_invocation
def lancerMonProjet(self, in_data):
"""
Vérifie si l'utilisateur est authentifié et renvoie l'adresse du checkout
"""
if self.request.user.is_authenticated():
out_data = {
'success_url': reverse('lancement'),
'is_authenticated': True,
'success': True,
}
else:
out_data = {
'success_url': reverse('lancement'),
'is_authenticated': False,
'success': True,
}
return out_data
class ProfilJsonView(JSONResponseMixin, View):
'''
'''
@allow_remote_invocation
def follow(self, in_data):
user = self.request.user
if not user.is_authenticated():
if not in_data.has_key('question'):
out_data = {
'success': False,
'message': unicode(_("Vous devez etre authentifie pour suivre quelqu'un")),
}
else:
out_data = {
'css_follow': "non",
'txt_follow': "Suivre",
'success': True,
}
else:
followee=BiblioUser.objects.get(pk=in_data['userid'], is_active=True)
if user!=followee:
f=Follow.objects.filter(qui=user,suit=followee).first()
if f:
css_follow=f.lien.lower()
else:
css_follow="non"
if not in_data.has_key('question'):
f,created=Follow.objects.get_or_create(qui=user,suit=followee)
if created or f.lien=="ENN":
f.lien = 'AMI'
else:
f.lien = 'ENN'
f.save()
css_follow = f.lien.lower()
if css_follow=="non":
txt_follow = "Suivre"
if css_follow=="ami":
txt_follow = "Ne plus suivre"
if css_follow=="enn":
txt_follow = "Suivre"
else:
css_follow = force_text(_("non"))
txt_follow = force_text(_("Vous ne pouvez pas vous follower vous meme"))
out_data = {
'css_follow': css_follow,
'txt_follow': txt_follow,
'success': True,
}
return out_data
@allow_remote_invocation
def comment(self, in_data):
user = self.request.user
if user.is_authenticated():
if in_data.has_key('commentaire'):
timeline = Timeline.objects.get(id=in_data['timelineid'])
commentaire = TimelineCommentaire(user=user,contenu=in_data['commentaire'][:400],timeline=timeline)
commentaire.save()
timeline.timestamp=timezone.now()
timeline.save()
out_data = {
'timeline': TimelineApiSerializer(timeline, context={'request': self.request}).data,
'success': True,
}
else:
out_data = {
'timeline': False,
'success': unicode(_("Votre commentaire est vide")),
}
else:
out_data = {
'success': False,
'message': unicode(_("Vous devez etre authentifie pour comenter")),
}
return out_data
@allow_remote_invocation
def getCommandes(self, in_data):
user = self.request.user
if user.is_authenticated():
commandes = Commande.objects.filter(client=user,etat='PAY')
out_data = {
'commandes': CommandeSerializer(commandes, context={'request': self.request}, many=True).data,
'success': True,
}
else:
out_data = {
'success': False,
'message': unicode(_("Vous devez etre authentifie pour lister vos commandes")),
}
return out_data
@allow_remote_invocation
def passRecover(self, in_data):
user = self.request.user
if user.is_authenticated():
current_site = get_current_site(self)
site_name = current_site.name
domain = current_site.domain
subject = _("Reinitialisation de votre mot de passe")
to = [user.email]
ctx={
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'user': user,
'email': user.email,
'domain': domain,
'site_name': site_name,
'protocol': 'http',
'token': default_token_generator.make_token(user),
}
message = get_template('mails/password_reset.html').render(Context(ctx))
msg = EmailMessage(subject, message, to=to)
msg.content_subtype = 'html'
msg.send()
out_data = {
'success': True,
'message': unicode(_("Un message expliquant la procedure pour changer de mot de passe vient de vous etre envoye"))
}
else:
out_data = {
'success': False,
'message': unicode(_("Vous devez etre authentifie pour réinitialiser votre mot de passe")),
}
return out_data
class LivreJsonView(JSONResponseMixin, View):
'''
est connecté au Controlleur LivreCtrl
'''
@allow_remote_invocation
def getLivre(self, in_data):
"""
renvoie les infos liées au livre
"""
livre_id = in_data['livre_id']
livre = Livre.objects.get(id=livre_id, is_active=True)
if self.request.user in livre.auteurs.all():
je_suis_lauteur=True
else:
je_suis_lauteur=False
out_data = {
'livre': LivreApiSerializer(livre, context={'request': self.request}).data,
'success': True,
'je_suis_lauteur': je_suis_lauteur,
}
return out_data
@allow_remote_invocation
def getSelecteurs(self):
"""
renvoie les categories disponibles
"""
categories= Livre.objects.filter(is_active=True,phase='GETMONEY').annotate(nb_souscription=Count('souscription')).filter(nb_souscription__gt=4).values('category').annotate(count=Count('category'))
genres = Livre.objects.filter(is_active=True,phase='GETMONEY').annotate(nb_souscription=Count('souscription')).filter(nb_souscription__gt=4).values('genre').annotate(count=Count('genre'))
etats = Livre.objects.filter(is_active=True,phase='GETMONEY').annotate(nb_souscription=Count('souscription')).filter(nb_souscription__gt=4).values('etat').annotate(count=Count('etat'))
phases = ['GETMONEY','SUCCES','ECHEC']
for categorie in categories:
categorie['display']=force_text(dict(Livre.TYPE_CATEGORY).get(categorie['category'], categorie['category']), strings_only=True)
for genre in genres:
genre['display']=force_text(dict(Livre.TYPE_GENRE).get(genre['genre'], genre['genre']), strings_only=True)
for etat in etats:
etat['display']=force_text(dict(Livre.TYPE_ETAT).get(etat['etat'], etat['etat']), strings_only=True)
categories_json=[{'key':"",'value':force_text(dict(Livre.TYPE_CATEGORY).get('', ''), strings_only=True)}]
for categorie in categories:
categories_json.append({'key':categorie['category'],'value':force_text(dict(Livre.TYPE_CATEGORY).get(categorie['category'], categorie['category']), strings_only=True)})
genres_json=[{'key':"",'value':force_text(dict(Livre.TYPE_GENRE).get('', ''), strings_only=True)}]
for genre in genres:
genres_json.append({'key':genre['genre'],'value':force_text(dict(Livre.TYPE_GENRE).get(genre['genre'], genre['genre']), strings_only=True)})
etat_nul_trouve = False
for etat in etats:
if etat['etat']=="":
etat_nul_trouve=True
break
if not etat_nul_trouve:
etats_json=[{'key':"",'value':force_text(dict(Livre.TYPE_ETAT).get('', ''), strings_only=True)}]
else:
etats_json = []
for etat in etats:
etats_json.append({'key':etat['etat'],'value':force_text(dict(Livre.TYPE_ETAT).get(etat['etat'], etat['etat']), strings_only=True)})
phases_json=[]
for phase in phases:
phases_json.append({'key':phase,'value':force_text(dict(Livre.PHASES).get(phase, phase), strings_only=True)})
out_data = {
'categories': categories_json,
'genres': genres_json,
'etats': etats_json,
'phases': phases_json,
'success': True,
}
return out_data
@allow_remote_invocation
def getsondages(self, in_data):
"""
renvoie les sondages liées au livre
"""
livre_id = in_data['livre_id']
try:
livre=Livre.objects.get(id=livre_id, is_active=True)
presouscription_transform = (livre.phase == 'CREA-FEE') and (self.request.user in livre.auteurs.all())
sondages_data = SondageApiSerializer(livre, context={'request': self.request,'presouscription_transform':presouscription_transform}).data
except:
sondages_data=None
out_data = {
'sondages': sondages_data,
'success': True,
}
return out_data
@allow_remote_invocation
def me_rappeler(self, in_data):
"""
Permet a un user de s'inscrire pour etre rappele peu avant la fin de la souscription.
"""
user = self.request.user
livre_api = None
if user.is_anonymous():
result=False;
message=_('Vous devez etre authentifie pour utiliser la fonction de rappel')
else:
try:
livre_id = in_data['livre_id']
user = self.request.user
livre=Livre.objects.get(id=livre_id, is_active=True)
if livre.phase=='GETMONEY':
user_rappel,created = MeRappeler.objects.get_or_create(livre=livre,user=user)
result = True;
if created:
message = _('Votre demande a ete enregistree')
else:
message = _('Votre demande a deja ete enregistree')
else:
result = False;
message = _("Cette fonction n'est disponible que pendant la souscription")
except:
result = False;
message = _("Une erreur s'est produite pendant l'enregistrement de votre demande")
out_data = {
'success': result,
'message': force_text(message),
}
return out_data
@allow_remote_invocation
def demander_new(self, in_data):
"""
Permet a un user de demander une remise en souscription
"""
user = self.request.user
livre_api = None
if user.is_anonymous():
result=False;
message=_('Vous devez etre authentifie pour utiliser la fonction de demande de souscription')
else:
try:
livre_id = in_data['livre_id']
user = self.request.user
livre=Livre.objects.get(id=livre_id, is_active=True)
if livre.phase in ['SUCCES','ECHEC']:
user_rappel,created = DemanderNewSouscription.objects.get_or_create(livre=livre,user=user)
result = True;
if created:
message = _('Votre demande a ete enregistree')
else:
message = _('Votre demande a deja ete enregistree')
else:
result = False;
message = _("Cette fonction n'est disponible que pendant la souscription")
except:
result = False;
message = _("Une erreur s'est produite pendant l'enregistrement de votre demande")
out_data = {
'livre': LivreApiSerializer(livre, context={'request': self.request}).data if result else None,
'success': result,
'message': force_text(message),
}
return out_data
@allow_remote_invocation
def rate(self, in_data):
"""
Permet a un user de donner une note à un livre.
"""
user = self.request.user
livre_api = None
if user.is_anonymous():
result=False;
message=_('Vous devez etre authentifie pour voter sur un livre')
else:
try:
livre_id = in_data['livre_id']
user = self.request.user
livre=Livre.objects.get(id=livre_id, is_active=True)
if livre.phase=='GETMONEY':
user_rate,created = Rating.objects.get_or_create(livre=livre,user=user)
user_rate.rating=in_data['rate']
user_rate.save()
result = True;
message = ""
livre_api = LivreApiSerializer(livre, context={'request': self.request}).data
else:
result = False;
message = _("Le vote n'est ouvert que pendant la souscription")
except:
result = False;
message = _("Une erreur s'est produite pendant l'enregistrement de votre vote")
out_data = {
'success': result,
'message': force_text(message),
'livre': livre_api
}
return out_data
@allow_remote_invocation
def vote(self, in_data):
"""
renvoie les sondages liées au livre
"""
if not self.request.user.is_authenticated():
out_data = {
'success': False,
'message': force_text(_("Vous devez etre authentifie pour voter")),
}
return out_data
proposition_id = in_data['proposition_id']
proposition = Proposition.objects.get(pk=proposition_id)
livre = proposition.getTypedProposition().livre
if livre.phase=="CREA-FEE" and self.request.user in livre.auteurs.all():
typedProposition=proposition.getTypedProposition()
message=""
try:
typedProposition.choisir()
success = True
except Exception as e:
success = False
message = e.message
presouscription_transform = (livre.phase == 'CREA-FEE') and (self.request.user in livre.auteurs.all())
out_data = {
'success': success,
'message': message,
'sondages': SondageApiSerializer(livre, context={'request': self.request,'presouscription_transform':presouscription_transform}).data,
}
return out_data
if livre.phase!='FEEDBACK':
out_data = {
'success': False,
'message': force_text(_("Le vote n'est autorise qu'en presouscription")),
}
return out_data
try:
Vote.objects.get(proposition=proposition,user=self.request.user)
except Vote.DoesNotExist:
vote=Vote(proposition=proposition,user=self.request.user)
vote.save()
presouscription_transform = (livre.phase == 'CREA-FEE') and (self.request.user in livre.auteurs.all())
out_data = {
'livre': LivreApiSerializer(livre, context={'request': self.request}).data,
'success': True,
'sondages': SondageApiSerializer(livre, context={'request': self.request,'presouscription_transform':presouscription_transform}).data,
}
return out_data
@allow_remote_invocation
def remove_proposition(self, in_data):
"""
renvoie les sondages liées au livre
"""
proposition_id = in_data['proposition_id']
proposition = Proposition.objects.get(pk=proposition_id)
livre = Livre.objects.get(id=proposition.getTypedProposition().livre_id, is_active=True)
proposition.delete()
presouscription_transform = (livre.phase == 'CREA-FEE') and (self.request.user in livre.auteurs.all())
out_data = {
'success': True,
'sondages': SondageApiSerializer(livre, context={'request': self.request,'presouscription_transform':presouscription_transform}).data,
}
return out_data
@allow_remote_invocation
def follow_auteur(self, in_data):
user = self.request.user
if not user.is_authenticated():
out_data = {
'success': False,
'message': unicode(_("Vous devez etre authentifie pour suivre quelqu'un")),
}
else:
print in_data['auteur'][0]
print in_data['auteur'][0]
livre = Livre.objects.filter(auteurs__id=in_data['auteur'][0], is_active=True).first()
for auteur in livre.auteurs.all():
if user!=auteur:
f=Follow.objects.filter(qui=user,suit=auteur).first()
if f:
css_follow=f.lien.lower()
else:
css_follow="non"
if not in_data.has_key('question'):
f,created=Follow.objects.get_or_create(qui=user,suit=auteur)
if created or f.lien=="ENN":
f.lien='AMI'
else:
f.lien='ENN'
f.save()
css_follow=f.lien.lower()
if css_follow=="non":
txt_follow="Suivre"
if css_follow=="ami":
txt_follow="Ne plus suivre"
if css_follow=="enn":
txt_follow="Suivre"
else:
css_follow = force_text(_("non"))
txt_follow = force_text(_("Vous ne pouvez pas vous follower vous meme"))
out_data = {
'css_follow': css_follow,
'txt_follow': txt_follow,
'success': True,
}
return out_data
class StaffJsonView(JSONResponseMixin, View):
'''
est connecté au Controlleur LivreCtrl
'''
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
return super(StaffJsonView, self).dispatch(*args, **kwargs)
@allow_remote_invocation
def getStatVentesJour(self, in_data):
"""
renvoie les statistiques de vente
"""
try:
date_jour = in_data['date_jour']
dt = dateutil.parser.parse(date_jour)
except:
out_data = {
'success': False
}
return out_data
localtime = dt.astimezone (pytz.timezone('Europe/Paris'))
debut = datetime(localtime.year, localtime.month, localtime.day)
commandes=[]
ventes=[]
ca = 0
nb_commandes = 0
nb_souscriptions = 0
for heure in range(0,24) :
time_debut = debut + timedelta(hours=heure)
timestamp = calendar.timegm(time_debut.timetuple()) * 1000
time_fin = time_debut + timedelta(hours=1)
# ch_list = CommandeHistory.objects.filter(etat='PAY',date__gte=time_debut, date__lt=time_fin)
c_list = Commande.objects.filter(etat='PAY',date__gte=time_debut,date__lt=time_fin).distinct()
total_euros = 0
total_commandes = 0
total_souscriptions = 0
for commande in c_list:
total_euros += commande.montant
for souscription in commande.souscription_set.all():
total_souscriptions += souscription.quantite
total_commandes += 1
ca += total_euros
nb_souscriptions += total_souscriptions
nb_commandes += total_commandes
commandes.append([timestamp,total_commandes])
ventes.append([timestamp,total_euros])
serie_list = [
{
'label': "commandes",
'data': commandes,
'yaxis': 1
},
{
'label': "€",
'data': ventes,
'yaxis': 2
}
]
options = {
"series": {
"lines": {
"show": True,
"fill": True
},
"points": { "show": True }
},
'axisLabels': {
'show': True
},
"xaxis": {
"mode": "time",
"timeformat": "%Hh"
},
"yaxes": [
{
'axisLabel': 'commandes',
"tickColor":["#fff"],
"tickDecimals": 0,
"min":0
},
{
'axisLabel': "CA",
"position": "right",
"tickDecimals": 0,
"min":0
}
],
"grid": {
"hoverable": True,
"borderWidth": 1,
"markings": [ { "yaxis": { "from": 0, "to": 300 }, "color": "#fff" }]
},
"colors": ["rgb(138,75,117)", "rgb(71,160,62)"],
"tooltip":True,
"tooltipOpts": {
"content": "%x : %y %s"
},
"legend": {
"show": True,
"labelFormatter": None, # null or (fn: string, series object -> string)
#"labelBoxBorderColor": color,
#noColumns: number
#'position': "ne" or "nw" or "se" or "sw"
#margin: number of pixels or [x margin, y margin]
#backgroundColor: null or color
#backgroundOpacity: number between 0 and 1
#container: null or jQuery object/DOM element/jQuery expression
#sorted: null/false, true, "ascending", "descending", "reverse", or a comparator
}
};
out_data = {
'success': True,
'souscriptions': serie_list,
'options': options,
'ca':ca,
'nb_commandes':nb_commandes,
'nb_souscriptions':nb_souscriptions
}
return out_data
@allow_remote_invocation
def getStatVentesMois(self, in_data):
"""
renvoie les statistiques de vente
"""
try:
date_debut = in_data['date_debut']
dt_debut = dateutil.parser.parse(date_debut)
date_fin = in_data['date_fin']
dt_fin = dateutil.parser.parse(date_fin)
except:
out_data = {
'success': False
}
return out_data
local_dt_debut = dt_debut.astimezone (pytz.timezone('Europe/Paris'))
debut = datetime(local_dt_debut.year, local_dt_debut.month, local_dt_debut.day)
local_dt_fin = dt_fin.astimezone (pytz.timezone('Europe/Paris'))
fin = datetime(local_dt_fin.year, local_dt_fin.month, local_dt_fin.day) + timedelta(days=1)
commandes=[]
ventes=[]
day = 0
stop = False
ca = 0
nb_commandes = 0
nb_souscriptions = 0
while not stop :
time_debut = debut + timedelta(days=day)
timestamp = calendar.timegm(time_debut.timetuple()) * 1000
time_fin = time_debut + timedelta(days=1)
c_list = Commande.objects.filter(etat='PAY',date__gte=time_debut,date__lt=time_fin).distinct()
# ch_list = CommandeHistory.objects.filter(etat='PAY',date__gte=time_debut, date__lt=time_fin)
total_euros = 0
total_souscriptions = 0
total_commandes = 0
for commande in c_list:
total_euros += commande.montant
for souscription in commande.souscription_set.all():
total_souscriptions += souscription.quantite
total_commandes += 1
ca+=total_euros
nb_souscriptions+=total_souscriptions
nb_commandes+=total_commandes
commandes.append([timestamp,total_commandes])
ventes.append([timestamp,total_euros])
day += 1
if (debut + timedelta(days=day))>=fin:
stop=True
serie_list = [
{
'label': "commandes",
'data': commandes,
'yaxis': 1
},
{
'label': "€",
'data': ventes,
'yaxis': 2
}
]
options = {
"series": {
"lines": {
"show": True,
"fill": True
},
"points": { "show": True }
},
'axisLabels': {
'show': True
},
"xaxis": {
"mode": "time",
"timeformat": "%e %b",
"monthNames": ["jan", "fev", "mar", "avr", "mai", "juin", "juil", "aout", "sept", "oct", "nov", "dec"]
},
"yaxes": [
{
'axisLabel': 'commandes',
"tickColor":["#fff"],
"tickDecimals": 0,
"min":0
},
{
'axisLabel': "CA",
"position": "right",
"tickColor":["#fff"],
"tickDecimals": 0,
"min":0
}
],
"grid": {
"hoverable": True,
"borderWidth": 1
},
"colors": ["rgb(138,75,117)", "rgb(71,160,62)"],
"tooltip":True,
"tooltipOpts": {
"content": "%x : %y %s"
},
"legend": {
"show": True,
"labelFormatter": None, # null or (fn: string, series object -> string)
#"labelBoxBorderColor": color,
#noColumns: number
#'position': "ne" or "nw" or "se" or "sw"
#margin: number of pixels or [x margin, y margin]
#backgroundColor: null or color
#backgroundOpacity: number between 0 and 1
#container: null or jQuery object/DOM element/jQuery expression
#sorted: null/false, true, "ascending", "descending", "reverse", or a comparator
}
};
out_data = {
'success': True,
'souscriptions': serie_list,
'options': options,
'ca':ca,
'nb_commandes':nb_commandes,
'nb_souscriptions':nb_souscriptions
}
return out_data
@allow_remote_invocation
def getStatVentesAnnee(self, in_data):
"""
renvoie les statistiques de vente
"""
try:
date_debut = in_data['date_debut']
dt_debut = dateutil.parser.parse(date_debut)
date_fin = in_data['date_fin']
dt_fin = dateutil.parser.parse(date_fin)
except:
out_data = {
'success': False
}
return out_data
local_dt_debut = dt_debut.astimezone (pytz.timezone('Europe/Paris'))
debut = datetime(local_dt_debut.year, local_dt_debut.month,1)
local_dt_fin = dt_fin.astimezone (pytz.timezone('Europe/Paris'))
fin = datetime(local_dt_fin.year, local_dt_fin.month,1) + relativedelta(months=+1)
commandes=[]
ventes=[]
month = 0
stop = False
ca = 0
nb_commandes = 0
nb_souscriptions = 0
while not stop :
time_debut = debut + relativedelta(months=+month)
timestamp = calendar.timegm(time_debut.timetuple()) * 1000
time_fin = time_debut + relativedelta(months=+1)
# ch_list = CommandeHistory.objects.filter(etat='PAY',date__gte=time_debut, date__lt=time_fin)
c_list = Commande.objects.filter(etat='PAY',date__gte=time_debut,date__lt=time_fin).distinct()
total_euros = 0
total_souscriptions = 0
total_commandes = 0
for commande in c_list:
total_euros += commande.montant
for souscription in commande.souscription_set.all():
total_souscriptions += souscription.quantite
total_commandes += 1
ca+=total_euros
nb_souscriptions+=total_souscriptions
nb_commandes+=total_commandes
commandes.append([timestamp,total_commandes])
ventes.append([timestamp,total_euros])
month += 1
if (debut + relativedelta(months=+month))>=fin:
stop=True
serie_list = [
{
'label': "commandes",
'data': commandes,
'yaxis': 1
},
{
'label': "€",
'data': ventes,
'yaxis': 2
}
]
options = {
"series": {
"lines": {
"show": True,
"fill": True
},
"points": { "show": True }
},
'axisLabels': {
'show': True
},
"xaxis": {
"mode": "time",
"timeformat": "%b %y",
"monthNames": ["jan", "fev", "mar", "avr", "mai", "juin", "juil", "aout", "sept", "oct", "nov", "dec"]
},
"yaxes": [
{
'axisLabel': 'commandes',
"tickColor":["#fff"],
"tickDecimals": 0,
"min":0
},
{
'axisLabel': "CA",
"position": "right",
"tickDecimals": 0,
"min":0
}
],
"grid": {
"hoverable": True,
"borderWidth": 1
},
"colors": ["rgb(138,75,117)", "rgb(71,160,62)"],
"tooltip":True,
"tooltipOpts": {
"content": "%x : %y %s"
},
"legend": {
"show": True,
"labelFormatter": None, # null or (fn: string, series object -> string)
#"labelBoxBorderColor": color,
#noColumns: number
#'position': "ne" or "nw" or "se" or "sw"
#margin: number of pixels or [x margin, y margin]
#backgroundColor: null or color
#backgroundOpacity: number between 0 and 1
#container: null or jQuery object/DOM element/jQuery expression
#sorted: null/false, true, "ascending", "descending", "reverse", or a comparator
}
};
out_data = {
'success': True,
'souscriptions': serie_list,
'options': options,
'ca':ca,
'nb_commandes':nb_commandes,
'nb_souscriptions':nb_souscriptions
}
return out_data
class LivreFilter(django_filters.FilterSet):
class Meta:
model = Livre
fields = ['category', 'genre', 'etat','phase','a_la_une',\
'type_titres','type_prix','type_couvertures',\
'type_extraits','type_biographies','titre']
class SouscriptionFilter(django_filters.FilterSet):
class Meta:
model = Livre
fields = ['category','a_la_une','genre','etat','phase']
class SouscriptionViewset(viewsets.ReadOnlyModelViewSet):
serializer_class = SouscriptionApiSerializer
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
filter_backends = (filters.DjangoFilterBackend,filters.SearchFilter,)
filter_class = SouscriptionFilter
search_fields = ('titre',)
def get_queryset(self):
return Livre.objects.filter(is_active=True,phase='GETMONEY').annotate(nb_souscription=Count('souscription')).filter(nb_souscription__gt=4,souscription__etat='ENC').order_by('-date_souscription')
class LivreViewset(viewsets.ReadOnlyModelViewSet):
queryset = Livre.objects.filter(is_active=True)
serializer_class = LivreApiSerializer
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
filter_backends = (filters.DjangoFilterBackend,filters.SearchFilter,)
filter_class = LivreFilter
search_fields = ('titre',)
class CommandeViewset(viewsets.ReadOnlyModelViewSet):
queryset = Commande.objects.all().order_by('-date')
serializer_class = PanierApiSerializer
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
filter_backends = (filters.DjangoFilterBackend,)
class TagsViewset(viewsets.ReadOnlyModelViewSet):
queryset = Tag.objects.all()
serializer_class = TagSerializer
filter_backends = (filters.DjangoFilterBackend,)
class TimelineViewset(viewsets.ReadOnlyModelViewSet):
serializer_class = TimelineApiSerializer
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
model = Timeline
def get_queryset(self):
try:
if self.request.QUERY_PARAMS.has_key('user_id'):
user_id = self.request.QUERY_PARAMS['user_id']
user = BiblioUser.objects.get(id=user_id, is_active=True)
if user.is_active:
if self.request.user == user:
return Timeline.objects.filter(Q(user__id=user_id)| Q(partage__id=user_id)).order_by('-timestamp').distinct()
else:
return Timeline.objects.filter(Q(user__id=user_id)| Q(partage__id=user_id),private=False).order_by('-timestamp').distinct()
else:
return []
return Timeline.objects.all()
except:
return []
class BiblioUserViewset(viewsets.ReadOnlyModelViewSet):
serializer_class = BiblioUserSerializer
queryset = BiblioUser.objects.filter(is_active=True)
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
class UserFilter(django_filters.FilterSet):
class Meta:
model = BiblioUser
fields = ['email','username']
class BiblioStaffUserViewset(viewsets.ReadOnlyModelViewSet):
serializer_class = BiblioStaffUserSerializer
queryset = BiblioUser.objects.all()
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
filter_backends = (filters.SearchFilter,)
search_fields = ('email',)
class CommandeStaffViewset(viewsets.ReadOnlyModelViewSet):
queryset = Commande.objects.all().order_by("-no_commande")
serializer_class = CommandeSerializer
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
filter_backends = (filters.SearchFilter,)
search_fields = ('etat','no_commande','pays_livraison','client__adresses__nom','client__adresses__prenom')
class CommentaireStaffViewset(viewsets.ReadOnlyModelViewSet):
queryset = Commentaire.objects.all().order_by("-id")
serializer_class = CommentaireSerializer
paginate_by = 10
paginate_by_param = 'page_size'
max_paginate_by = 100
filter_backends = (filters.SearchFilter,)
search_fields = ('id','user','contenu','date','livre','reponses')
class BiblioUserView(NgCRUDView):
model = BiblioUser
| [
"B@MacBook-Air-de-B.local"
] | B@MacBook-Air-de-B.local |
91cd296fa5741cfcebc94e7927b78d1ff38eebc5 | 030aadc06eba914dbc9f7e774d54cafd5acc0ae6 | /docker/wsgi_docker.py | 4f4d5ae871cf9698ce31dd779b010d807bd24fde | [] | no_license | torchbox/wagtail-template | 985047e917031cf033f61c0c2480870da430aa15 | 4c0cb34d28ccbc03a96ca9f1ff0499a3554ba5e6 | refs/heads/develop | 2016-09-06T14:55:28.078233 | 2015-08-11T12:03:08 | 2015-08-11T12:03:08 | 21,358,329 | 9 | 5 | null | 2015-05-06T09:29:53 | 2014-06-30T16:42:33 | Python | UTF-8 | Python | false | false | 124 | py | from whitenoise.django import DjangoWhiteNoise
from .wsgi import application
application = DjangoWhiteNoise(application)
| [
"karlhobley10@gmail.com"
] | karlhobley10@gmail.com |
4aa6b906dd40411d628bb4b7b011f06c7e98e353 | 4fda3bb30e5612ba006abb31924084e4290de136 | /Conditional_DCGANs/conditional_gans.py | 0b6dda451f4fa2a6492114ba3f16ec92e2cc2f1b | [] | no_license | dbasso98/GANs | ed3901aace99a3fddbf78fe9e97ae69eb2af3d78 | 40dcafd00d2fb5510573de7d3a866dfdd0062da7 | refs/heads/main | 2023-07-30T05:33:55.876663 | 2021-10-05T08:54:06 | 2021-10-05T08:54:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,003 | py | import argparse
import os
import numpy as np
import math
import torchvision
import torchvision.transforms as transforms
from torchvision.utils import save_image
from torch.utils.data import DataLoader
from torchvision import datasets
import torch
import torch.nn as nn
from torch.backends import cudnn
from torch import optim
img_save_path = 'images-conditional_dcgan'
os.makedirs(img_save_path, exist_ok=True)
parser = argparse.ArgumentParser(description='Our Implementation of Conditional GANs')
parser.add_argument('--num_epochs', type=int, default=50)
parser.add_argument('--batchSize', type=int, default=64, help='input batch size')
parser.add_argument('--lr', type=float, default=0.0002)
parser.add_argument('--beta1', type=float, default=0.5) # momentum1 in Adam
parser.add_argument('--beta2', type=float, default=0.999) # momentum2 in Adam
parser.add_argument('--latent_dim', type=int, default=100)
parser.add_argument('--n_classes', type=int, default=10)
parser.add_argument('--img_size', type=int, default=32)
parser.add_argument('--channels', type=int, default=1)
parser.add_argument('--sample_interval', type=int, default=400)
parser.add_argument('--log_step', type=int, default=100)
args = parser.parse_args()
C,H,W = args.channels, args.img_size, args.img_size
##### Custom weights initialization called on discrim and generator
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)
##### Building block of the generator, it is made up of:
##### • A deconvolution layer;
##### • batch normalization layer;
##### • ReLU activation.
class gen_block(nn.Module):
def __init__(self, in_channels, out_channels, stride, padding, kernel_size=4):
super().__init__()
self.layers = nn.Sequential(
nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding, bias=False),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True)
)
def forward(self, X):
out = self.layers(X)
return out
##### In order to build the generator we will follow the specifics present in the
##### paper.
##### We will concatenate the 10-dimensional encoding (1 per digit) and the noise
##### to get a 110-dimensional input that will be fed to the first hidden layer.
##### In the last layer we won't apply any batch normalization and the activation
##### function that we use is a the Tanh function.
class Generator(nn.Module):
def __init__(self, dim_latent=args.latent_dim, base_width=128, input_ch=C):
super().__init__()
self.deconv_z1 = gen_block(dim_latent, base_width*2, stride=1, padding=0)
self.deconv_y1 = gen_block(10, base_width*2, stride=1, padding=0)
self.deconv_2 = gen_block(base_width*4, base_width*2, stride=2, padding=1)
self.deconv_3 = gen_block(base_width*2, base_width, stride=2, padding=1)
self.deconv_4 = nn.Sequential(
nn.ConvTranspose2d(base_width, input_ch, kernel_size=4, stride=2, padding=1, bias=False),
nn.Tanh()
)
def forward(self, X, label):
out_z = self.deconv_z1(X)
out_y = self.deconv_y1(label)
out = torch.cat((out_z,out_y), dim=1)
out = self.deconv_2(out)
out = self.deconv_3(out)
out = self.deconv_4(out)
return out
##### Building block of the discriminator, it is made up of:
##### • A convolution layer;
##### • batch normalization layer;
##### • LeakyReLU activation with alpha=0.2.
class discr_block(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=4, stride=2, padding=1, norm=True):
super().__init__()
if norm is True:
self.layers = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, bias=False),
nn.BatchNorm2d(out_channels),
nn.LeakyReLU(negative_slope=0.2, inplace=True)
)
else:
self.layers = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, bias=False),
nn.LeakyReLU(negative_slope=0.2, inplace=True)
)
def forward(self, X):
out = self.layers(X)
return out
##### Discriminator follows the same idea of the generator. We can notice that in
##### this case for the last layer we've substituted the LeakyReLU with the sigmoid.
class Discriminator(nn.Module):
def __init__(self, base_width=128, input_ch=C):
super().__init__()
self.conv_x1 = discr_block(input_ch, base_width//2, norm=False)
self.conv_y1 = discr_block(10, base_width//2, norm=False)
self.conv_2 = discr_block(base_width, base_width*2)
self.conv_3 = discr_block(base_width*2, base_width*4)
self.conv_4 = nn.Sequential(
nn.Conv2d(base_width*4, 1, kernel_size=4, stride=1, padding=0),
nn.Sigmoid()
)
def forward(self, X, label):
out_z = self.conv_x1(X)
out_y = self.conv_y1(label)
out = torch.cat((out_z,out_y), dim=1)
out = self.conv_2(out)
out = self.conv_3(out)
out = self.conv_4(out)
return out
##### Let's load now the MNIST dataset
transform = transforms.Compose([
transforms.Resize(args.img_size),
transforms.ToTensor(),
# Normalization for better training performances
transforms.Normalize((0.5), (0.5))
])
dataloader = torch.utils.data.DataLoader(
datasets.MNIST(
"datasets",
train=True,
download=True,
transform=transform
),
batch_size=args.batchSize,
shuffle=True,
drop_last=True
)
##### Checking for GPU availability
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
cudnn.benchmark = True
##### We can initialize both generator and discriminator with random weights
##### and pass them to the GPU, if available.
generator = Generator()
generator.apply(weights_init)
generator.to(device)
discriminator = Discriminator()
discriminator.apply(weights_init)
discriminator.to(device)
##### Loss function is the usual Binary Cross Entropy
loss_fn = nn.BCELoss().to(device)
##### Let's set up also the optimizers with the correspondent hyperparameters
g_optimizer = optim.Adam(generator.parameters(), lr=args.lr, betas=(args.beta1, args.beta2))
d_optimizer = optim.Adam(discriminator.parameters(), lr=args.lr, betas=(args.beta1, args.beta2))
##### And now we can start with the training itself
generator.train()
discriminator.train()
total_step = len(dataloader)
for epoch in range(args.num_epochs):
for i, (imgs, labels) in enumerate(dataloader):
batch_size = args.batchSize
n_class = args.n_classes
img_size = args.img_size
# Defining ground truth for real and fake data
true_label = torch.full([batch_size], 1.0, dtype=torch.float).to(device)
fake_label = torch.full([batch_size], 0.0, dtype=torch.float).to(device)
imgs = imgs.to(device)
# Creating an image to pass as real one to the generator (filled with ones)
real_y = torch.zeros(batch_size, n_class)
real_y = real_y.scatter_(1, labels.view(batch_size, 1), 1).view(batch_size, n_class, 1, 1).contiguous()
real_y = real_y.expand(-1, -1, img_size, img_size).to(device)
# Generating the noise
noise = torch.randn(batch_size, args.latent_dim, 1, 1).to(device)
# Creating an image to pass as fake one to the generator (filled with zeros)
gen_labels = (torch.rand(batch_size, 1) * n_class).type(torch.LongTensor)
gen_y = torch.zeros(batch_size, n_class)
gen_y = gen_y.scatter_(1, gen_labels.view(batch_size, 1), 1).view(batch_size, n_class,1,1).to(device)
# Synthetic data from generator
synthetic_data = generator(noise, gen_y)
# Finally we can procede with the training of the discriminator
d_optimizer.zero_grad()
pred_real = discriminator(imgs, real_y)
error_real = loss_fn(pred_real.squeeze(), true_label)
gen_y_for_D = gen_y.view(batch_size, n_class, 1, 1).contiguous().expand(-1, -1, img_size, img_size)
pred_fake = discriminator(synthetic_data.detach(), gen_y_for_D)
error_fake = loss_fn(pred_fake.squeeze(), fake_label)
loss_D = (error_fake + error_real)
loss_D.backward()
d_optimizer.step()
# And then with the generator
generator.zero_grad()
pred_fake = discriminator(synthetic_data, gen_y_for_D)
loss_G = loss_fn(pred_fake.squeeze(), true_label)
loss_G.backward()
g_optimizer.step()
# print some informations
if (i + 1) % args.log_step == 0:
print(f'Epoch [{epoch+1}/{args.num_epochs}], BatchStep[{i + 1}/{total_step}], D_Real_loss: {error_real.item():.4f}, D_Fake_loss: {error_fake.item():.4f}, G_loss: {loss_G.item():.4f}')
# We can now save the output of generated image
batches_done = epoch * total_step + i
if batches_done % args.sample_interval == 0:
noise = torch.FloatTensor(np.random.normal(0, 1, (n_class**2, args.latent_dim,1,1))).to(device)
#fixed labels
y_ = torch.LongTensor(np.array([num for num in range(n_class)])).view(n_class,1).expand(-1,n_class).contiguous()
y_fixed = torch.zeros(n_class**2, n_class)
y_fixed = y_fixed.scatter_(1,y_.view(n_class**2,1),1).view(n_class**2, n_class,1,1).to(device)
gen_imgs = generator(noise, y_fixed).view(-1,C,H,W)
# saving the generated images in a grid, in the i-th row we place the i-th digit (0-9)
save_image(gen_imgs.data, img_save_path + f'/{epoch}-{batches_done}.png', nrow=n_class, normalize=True) | [
"noreply@github.com"
] | noreply@github.com |
f1a84740d0a5c3bf1ba1441ba380dc64176cbe97 | d7ad696cd1b550bb41d20f87b83c984ec7f19aa7 | /practice/design_pattern/03_abstract_factory/abstract_factory.py | 5fa712b16a1b0fb0cd9de79237fa18d370861894 | [] | no_license | mida-hub/hobby | 2947d10da7964d945e63d57b549c1dcb90ef7305 | 6e6f381e59fc2b0429fab36474d867aa3855af77 | refs/heads/master | 2022-12-21T23:33:14.857931 | 2022-12-19T16:30:34 | 2022-12-19T16:30:34 | 147,890,434 | 0 | 0 | null | 2021-03-20T04:31:58 | 2018-09-08T01:31:59 | Jupyter Notebook | UTF-8 | Python | false | false | 1,129 | py | # Abstract Factory
# abstract_factory.py
from abc import ABC, abstractmethod
class AbcItem(ABC):
def __init__(self, caption):
self.caption = caption
@abstractmethod
def make_html(self):
pass
class PageItem(AbcItem):
def __init__(self, title, author):
self.title = title
self.author = author
self.content = []
def add(self, item):
self.content.append(item)
def write_html(self, file_name):
with open(file_name, 'w', encoding='utf-8') as fh:
fh.write(self.make_html())
class LinkItem(AbcItem):
def __init__(self, caption, url):
super().__init__(caption)
self.url = url
class ListItem(AbcItem):
def __init__(self, caption):
super().__init__(caption)
self.items = []
def add(self, item):
self.items.append(item)
class Factory(ABC):
@abstractmethod
def create_page_item(self, title, author):
pass
@abstractmethod
def create_link_item(self, caption ,url):
pass
@abstractmethod
def create_list_item(self, caption):
pass
| [
"rusuden0106@gmail.com"
] | rusuden0106@gmail.com |
fbfa67e13f4e3b582c3f527ca04a052856265881 | 2ebdf9ba60785636a130151ba89dba2b140c5c34 | /demo.py | 3b55281a87e7e56c16e4f786aa4ed1470fb6c565 | [] | no_license | xq222/yuanfeng | f62c0451c6142e09af68a20bbc280a7fb4dfb95e | 6714987e26f07f221d345c767307fc5e6d47631c | refs/heads/master | 2020-09-18T11:24:51.626775 | 2019-12-18T10:47:06 | 2019-12-18T10:47:06 | 217,978,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,570 | py | from selenium import webdriver
import time
driver = webdriver.Chrome()
driver.get("http://yf.a99.live/")
driver.maximize_window()
driver.find_element_by_id("username").send_keys("18055779893")
driver.find_element_by_id("pwd").send_keys("123456789")
driver.find_element_by_xpath("/html/body/div/form/input[3]").click()
time.sleep(3)
# driver.find_element_by_xpath("//*[@id='LAY-system-side-menu']/li[1]/dl/dd[3]/a").click()
time.sleep(3)
# 切换"环境"
# iframe = driver.find_elements_by_tag_name("iframe")[0]
# driver.switch_to.frame("iframe")
# driver.switch_to.frame(driver.find_element_by_xpath("//*[@id='LAY_app_body']/div[2]/iframe"))
#
# driver.find_element_by_xpath("/html/body/form/div[1]/div/input").send_keys("111")
# driver.find_element_by_xpath("/html/body/form/div[2]/div/input").send_keys("111")
# driver.find_element_by_xpath("/html/body/form/div[3]/div/input").send_keys("111")
# time.sleep(3)
# driver.find_element_by_xpath("/html/body/form/div[4]/div/button[1]").click()
#
# # 回到原始的"环境"
# driver.switch_to.default_content()
# 客户管理
driver.find_element_by_xpath("//*[@id='LAY-system-side-menu']/li[5]/a/cite").click()
time.sleep(3)
driver.find_element_by_xpath("//*[@id='LAY-system-side-menu']/li[5]/dl/dd/a").click()
# 切换"环境"
driver.switch_to.frame(driver.find_element_by_xpath("//*[@id='LAY_app_body']/div[2]/iframe"))
# iframe = driver.find_elements_by_tag_name("iframe")[0]
# driver.switch_to.frame(iframe)
time.sleep(10)
driver.find_element_by_xpath("/html/body/div[1]/div/div[1]/div/div[4]/button[3]").click()
| [
"995583710@qq.com"
] | 995583710@qq.com |
0ee4ccbc23fc537b520fdb1b1ba7646da31fa8af | aa534dd11a258dca3b0ab6c0e49355891b046d90 | /.idea/Control_Statements/if.py | 1d9d4d76a2255cf8ff7c83e4f13d0c13c5674f60 | [] | no_license | Mrpool96/Python-2020 | b0650171b6d28b1ba7769158d8875dfe9a543d12 | 122b2fc12c1fcd862d53f94f9e30e541640fdc7d | refs/heads/master | 2023-01-03T21:30:35.306675 | 2020-11-01T09:26:16 | 2020-11-01T09:26:16 | 288,363,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | num=int(input("Enter the Number:-"))
if num<200:
print(num) | [
"53513296+Mrpool96@users.noreply.github.com"
] | 53513296+Mrpool96@users.noreply.github.com |
14865293bb7ae0c559d8e3fd11d8c8fec9889319 | 40a3b9f93cb52d478845596610861a95ed12ef36 | /db.py | 1ed97d53760ae8765c1fe4f919bb7e33a4de8026 | [] | no_license | aoifebyrne/grader | 4653d8130c607f526c5ae61327272dc7dc935484 | 005d5d8a6c1c14a3ca0a93e5fc210ffbe9157f66 | refs/heads/master | 2021-01-11T18:03:10.947848 | 2017-01-24T15:49:43 | 2017-01-24T15:49:43 | 79,478,550 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,469 | py | # -*- coding: utf-8 -*-
db.define_table('course_person',
Field('course_id', requires=IS_IN_SET(['A1', 'A2', 'B1', 'B2', 'B3', 'M1', 'M2', 'M3', 'M4', 'H1', 'G1', 'C1','CS1', 'R1']), unique=True),
Field('person', 'reference auth_user'),
)
db.define_table('subject',
Field('course_id', unique=True),
Field('subject', requires=IS_IN_SET(['Art', 'Biology', 'Maths', 'History', 'Geography', 'Computer science', 'Chemistry', 'Religion'])
)
)
db.define_table('grades',
Field('teacher', 'reference auth_user', writable=False, default= auth.user_id if auth.user else None),
Field('student', 'reference auth_user'),
Field('course_id',requires=IS_IN_DB(db(db.course_person.person == auth.user_id), 'course_person.course_id')),
Field('score', type='decimal(3,0)')
)
# -------------------------------------------------------------------------
# This scaffolding model makes your app work on Google App Engine too
# File is released under public domain and you can use without limitations
# -------------------------------------------------------------------------
if request.global_settings.web2py_version < "2.14.1":
raise HTTP(500, "Requires web2py 2.13.3 or newer")
# -------------------------------------------------------------------------
# if SSL/HTTPS is properly configured and you want all HTTP requests to
# be redirected to HTTPS, uncomment the line below:
# -------------------------------------------------------------------------
# request.requires_https()
# -------------------------------------------------------------------------
# app configuration made easy. Look inside private/appconfig.ini
# -------------------------------------------------------------------------
from gluon.contrib.appconfig import AppConfig
# -------------------------------------------------------------------------
# once in production, remove reload=True to gain full speed
# -------------------------------------------------------------------------
myconf = AppConfig(reload=True)
if not request.env.web2py_runtime_gae:
# ---------------------------------------------------------------------
# if NOT running on Google App Engine use SQLite or other DB
# ---------------------------------------------------------------------
db = DAL(myconf.get('db.uri'),
pool_size=myconf.get('db.pool_size'),
migrate_enabled=myconf.get('db.migrate'),
check_reserved=['all'])
else:
# ---------------------------------------------------------------------
# connect to Google BigTable (optional 'google:datastore://namespace')
# ---------------------------------------------------------------------
db = DAL('google:datastore+ndb')
# ---------------------------------------------------------------------
# store sessions and tickets there
# ---------------------------------------------------------------------
session.connect(request, response, db=db)
# ---------------------------------------------------------------------
# or store session in Memcache, Redis, etc.
# from gluon.contrib.memdb import MEMDB
# from google.appengine.api.memcache import Client
# session.connect(request, response, db = MEMDB(Client()))
# ---------------------------------------------------------------------
# -------------------------------------------------------------------------
# by default give a view/generic.extension to all actions from localhost
# none otherwise. a pattern can be 'controller/function.extension'
# -------------------------------------------------------------------------
response.generic_patterns = ['*'] if request.is_local else []
# -------------------------------------------------------------------------
# choose a style for forms
# -------------------------------------------------------------------------
response.formstyle = myconf.get('forms.formstyle') # or 'bootstrap3_stacked' or 'bootstrap2' or other
response.form_label_separator = myconf.get('forms.separator') or ''
# -------------------------------------------------------------------------
# (optional) optimize handling of static files
# -------------------------------------------------------------------------
# response.optimize_css = 'concat,minify,inline'
# response.optimize_js = 'concat,minify,inline'
# -------------------------------------------------------------------------
# (optional) static assets folder versioning
# -------------------------------------------------------------------------
# response.static_version = '0.0.0'
# -------------------------------------------------------------------------
# Here is sample code if you need for
# - email capabilities
# - authentication (registration, login, logout, ... )
# - authorization (role based authorization)
# - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
# - old style crud actions
# (more options discussed in gluon/tools.py)
# -------------------------------------------------------------------------
from gluon.tools import Auth, Service, PluginManager
# host names must be a list of allowed host names (glob syntax allowed)
auth = Auth(db, host_names=myconf.get('host.names'))
service = Service()
plugins = PluginManager()
# -------------------------------------------------------------------------
# create all tables needed by auth if not custom tables
# -------------------------------------------------------------------------
auth.define_tables(username=False, signature=False)
# -------------------------------------------------------------------------
# configure email
# -------------------------------------------------------------------------
mail = auth.settings.mailer
mail.settings.server = 'logging' if request.is_local else myconf.get('smtp.server')
mail.settings.sender = myconf.get('smtp.sender')
mail.settings.login = myconf.get('smtp.login')
mail.settings.tls = myconf.get('smtp.tls') or False
mail.settings.ssl = myconf.get('smtp.ssl') or False
# -------------------------------------------------------------------------
# configure auth policy
# -------------------------------------------------------------------------
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
# -------------------------------------------------------------------------
# Define your tables below (or better in another model file) for example
#
# >>> db.define_table('mytable', Field('myfield', 'string'))
#
# Fields can be 'string','text','password','integer','double','boolean'
# 'date','time','datetime','blob','upload', 'reference TABLENAME'
# There is an implicit 'id integer autoincrement' field
# Consult manual for more options, validators, etc.
#
# More API examples for controllers:
#
# >>> db.mytable.insert(myfield='value')
# >>> rows = db(db.mytable.myfield == 'value').select(db.mytable.ALL)
# >>> for row in rows: print row.id, row.myfield
# -------------------------------------------------------------------------
# -------------------------------------------------------------------------
# after defining tables, uncomment below to enable auditing
# -------------------------------------------------------------------------
# auth.enable_record_versioning(db)
| [
"noreply@github.com"
] | noreply@github.com |
daf0dae020d433fa831bceb033eab239d48c9455 | c7a849ccc87cd3922c930df74b3e2c693cff9eb0 | /chemvae/train_vae.py | b74184c4f9d1859f147fe174a457d15aa7c4cab7 | [
"Apache-2.0"
] | permissive | dung98pt/chemical_vae-branch | f82990df25d25bc25f35d60be248b5cabcb39a69 | a1d64ad9177902eff8903bf74f6c2cc1251ef333 | refs/heads/master | 2023-04-03T00:17:27.368193 | 2019-11-08T12:23:12 | 2019-11-08T12:23:12 | 219,891,713 | 0 | 0 | Apache-2.0 | 2023-03-24T21:55:11 | 2019-11-06T02:08:15 | Python | UTF-8 | Python | false | false | 12,431 | py | """
This version of autoencoder is able to save weights and load weights for the
encoder and decoder portions of the network
"""
# from gpu_utils import pick_gpu_lowest_memory
# gpu_free_number = str(pick_gpu_lowest_memory())
#
# import os
# os.environ['CUDA_VISIBLE_DEVICES'] = '{}'.format(gpu_free_number)
import argparse
import numpy as np
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.5
config.gpu_options.allow_growth = True
import yaml
import time
import os
from keras import backend as K
from keras.models import Model
from keras.optimizers import SGD, Adam, RMSprop
import hyperparameters
import mol_utils as mu
import mol_callbacks as mol_cb
from keras.callbacks import CSVLogger
from models import encoder_model, load_encoder
from models import decoder_model, load_decoder
from models import property_predictor_model, load_property_predictor
from models import variational_layers
from functools import partial
from keras.layers import Lambda
from keras.utils import to_categorical
import numpy as np
DICT = {'5': 29, '=': 22, 'N': 31, 'l': 16, 'H': 18, ']': 3, '@': 21, '6': 1, 'O': 17, 'c': 19, '2': 27, '8': 25, '3': 4, '7': 0, 'I': 15, 'C': 26, 'F': 28, '-': 7, 'P': 24, '/': 9, ')': 13, ' ': 34, '#': 14, 'r': 30, '\\': 33, '1': 20, 'n': 23, '+': 32, '[': 12, 'o': 2, 's': 5, '4': 11, 'S': 8, '(': 6, 'B': 10}
str = "CC(C)(C)c1ccc2occ(CC(=O)Nc3ccccc3F)c2c1"
def one_hot(str, LEN_MAX = 120):
str = list(str)
if len(str) < LEN_MAX:
for i in range(LEN_MAX - len(str)):
str.append(" ")
hot = []
for char in list(str):
hot.append(DICT[char])
return to_categorical(hot)
import pandas as pd
def load_data:
link1 = '250k_rndm_zinc_drugs_clean_3.csv'
df1 = pd.read_csv(link1, delimiter=',', names = ['smiles','1','2','3'])
smiles = list(df1.smiles)[1:]
X = []
for smile in smiles:
try:
X.append(one_hot(smile[:-1]))
except:
print ("ahihi do ngoc")
X = np.array(X)
print(X.shape)
id = int (X.shape[0] / 20)
idx = int (id * 0.8)
X_train = X[:idx,:,:]
X_val = X[idx:id,:,:]
X_test = X[id:id+100,:,:]
print(X_train.shape)
print(X_test.shape)
return X_train, X_val
def load_models(params):
def identity(x):
return K.identity(x)
# def K_params with kl_loss_var
kl_loss_var = K.variable(params['kl_loss_weight'])
if params['reload_model'] == True:
encoder = load_encoder(params)
decoder = load_decoder(params)
else:
encoder = encoder_model(params)
decoder = decoder_model(params)
x_in = encoder.inputs[0]
z_mean, enc_output = encoder(x_in)
z_samp, z_mean_log_var_output = variational_layers(z_mean, enc_output, kl_loss_var, params)
# Decoder
if params['do_tgru']:
x_out = decoder([z_samp, x_in])
else:
x_out = decoder(z_samp)
x_out = Lambda(identity, name='x_pred')(x_out)
model_outputs = [x_out, z_mean_log_var_output]
AE_only_model = Model(x_in, model_outputs)
if params['do_prop_pred']:
if params['reload_model'] == True:
property_predictor = load_property_predictor(params)
else:
property_predictor = property_predictor_model(params)
if (('reg_prop_tasks' in params) and (len(params['reg_prop_tasks']) > 0 ) and
('logit_prop_tasks' in params) and (len(params['logit_prop_tasks']) > 0 )):
reg_prop_pred, logit_prop_pred = property_predictor(z_mean)
reg_prop_pred = Lambda(identity, name='reg_prop_pred')(reg_prop_pred)
logit_prop_pred = Lambda(identity, name='logit_prop_pred')(logit_prop_pred)
model_outputs.extend([reg_prop_pred, logit_prop_pred])
# regression only scenario
elif ('reg_prop_tasks' in params) and (len(params['reg_prop_tasks']) > 0 ):
reg_prop_pred = property_predictor(z_mean)
reg_prop_pred = Lambda(identity, name='reg_prop_pred')(reg_prop_pred)
model_outputs.append(reg_prop_pred)
# logit only scenario
elif ('logit_prop_tasks' in params) and (len(params['logit_prop_tasks']) > 0 ):
logit_prop_pred = property_predictor(z_mean)
logit_prop_pred = Lambda(identity, name='logit_prop_pred')(logit_prop_pred)
model_outputs.append(logit_prop_pred)
else:
raise ValueError('no logit tasks or regression tasks specified for property prediction')
# making the models:
AE_PP_model = Model(x_in, model_outputs)
return AE_only_model, AE_PP_model, encoder, decoder, property_predictor, kl_loss_var
else:
return AE_only_model, encoder, decoder, kl_loss_var
def kl_loss(truth_dummy, x_mean_log_var_output):
x_mean, x_log_var = tf.split(x_mean_log_var_output, 2, axis=1)
print('x_mean shape in kl_loss: ', x_mean.get_shape())
kl_loss = - 0.5 * \
K.mean(1 + x_log_var - K.square(x_mean) -
K.exp(x_log_var), axis=-1)
return kl_loss
def main_no_prop(params):
start_time = time.time()
X_train, X_test = load_data
print("---------------------------")
print(X_train)
print(X_test.shape)
print("---------------------------")
AE_only_model, encoder, decoder, kl_loss_var = load_models(params)
# compile models
if params['optim'] == 'adam':
optim = Adam(lr=params['lr'], beta_1=params['momentum'])
elif params['optim'] == 'rmsprop':
optim = RMSprop(lr=params['lr'], rho=params['momentum'])
elif params['optim'] == 'sgd':
optim = SGD(lr=params['lr'], momentum=params['momentum'])
else:
raise NotImplemented("Please define valid optimizer")
model_losses = {'x_pred': params['loss'],
'z_mean_log_var': kl_loss}
# vae metrics, callbacks
vae_sig_schedule = partial(mol_cb.sigmoid_schedule, slope=params['anneal_sigmod_slope'],
start=params['vae_annealer_start'])
vae_anneal_callback = mol_cb.WeightAnnealer_epoch(
vae_sig_schedule, kl_loss_var, params['kl_loss_weight'], 'vae' )
csv_clb = CSVLogger(params["history_file"], append=False)
callbacks = [ vae_anneal_callback, csv_clb]
def vae_anneal_metric(y_true, y_pred):
return kl_loss_var
xent_loss_weight = K.variable(params['xent_loss_weight'])
print("---------------------------")
print(X_train)
model_train_targets = {'x_pred':X_train,
'z_mean_log_var':np.ones((np.shape(X_train)[0], params['hidden_dim'] * 2))}
model_test_targets = {'x_pred':X_test,
'z_mean_log_var':np.ones((np.shape(X_test)[0], params['hidden_dim'] * 2))}
AE_only_model.compile(loss=model_losses,
loss_weights=[xent_loss_weight,
kl_loss_var],
optimizer=optim,
metrics={'x_pred': ['categorical_accuracy',vae_anneal_metric]}
)
keras_verbose = params['verbose_print']
print("=======================")
print(X_train)
print(X_test)
print("=======================")
AE_only_model.fit(X_train, model_train_targets,
batch_size=params['batch_size'],
epochs=params['epochs'],
initial_epoch=params['prev_epochs'],
callbacks=callbacks,
verbose=keras_verbose,
validation_data=[ X_test, model_test_targets]
)
encoder.save(params['encoder_weights_file'])
decoder.save(params['decoder_weights_file'])
print('time of run : ', time.time() - start_time)
print('**FINISHED**')
print(encoder.summary())
print("---------------------------")
print(decoder.summary())
print("--------------------------")
print(AE_only_model.summary())
return
def main_property_run(params):
start_time = time.time()
# load data
X_train, X_test, Y_train, Y_test = vectorize_data(params)
# load full models:
AE_only_model, AE_PP_model, encoder, decoder, property_predictor, kl_loss_var = load_models(params)
# compile models
if params['optim'] == 'adam':
optim = Adam(lr=params['lr'], beta_1=params['momentum'])
elif params['optim'] == 'rmsprop':
optim = RMSprop(lr=params['lr'], rho=params['momentum'])
elif params['optim'] == 'sgd':
optim = SGD(lr=params['lr'], momentum=params['momentum'])
else:
raise NotImplemented("Please define valid optimizer")
model_train_targets = {'x_pred':X_train,
'z_mean_log_var':np.ones((np.shape(X_train)[0], params['hidden_dim'] * 2))}
model_test_targets = {'x_pred':X_test,
'z_mean_log_var':np.ones((np.shape(X_test)[0], params['hidden_dim'] * 2))}
model_losses = {'x_pred': params['loss'],
'z_mean_log_var': kl_loss}
xent_loss_weight = K.variable(params['xent_loss_weight'])
ae_loss_weight = 1. - params['prop_pred_loss_weight']
model_loss_weights = {
'x_pred': ae_loss_weight*xent_loss_weight,
'z_mean_log_var': ae_loss_weight*kl_loss_var}
prop_pred_loss_weight = params['prop_pred_loss_weight']
if ('reg_prop_tasks' in params) and (len(params['reg_prop_tasks']) > 0 ):
model_train_targets['reg_prop_pred'] = Y_train[0]
model_test_targets['reg_prop_pred'] = Y_test[0]
model_losses['reg_prop_pred'] = params['reg_prop_pred_loss']
model_loss_weights['reg_prop_pred'] = prop_pred_loss_weight
if ('logit_prop_tasks' in params) and (len(params['logit_prop_tasks']) > 0 ):
if ('reg_prop_tasks' in params) and (len(params['reg_prop_tasks']) > 0 ):
model_train_targets['logit_prop_pred'] = Y_train[1]
model_test_targets['logit_prop_pred'] = Y_test[1]
else:
model_train_targets['logit_prop_pred'] = Y_train[0]
model_test_targets['logit_prop_pred'] = Y_test[0]
model_losses['logit_prop_pred'] = params['logit_prop_pred_loss']
model_loss_weights['logit_prop_pred'] = prop_pred_loss_weight
# vae metrics, callbacks
vae_sig_schedule = partial(mol_cb.sigmoid_schedule, slope=params['anneal_sigmod_slope'],
start=params['vae_annealer_start'])
vae_anneal_callback = mol_cb.WeightAnnealer_epoch(
vae_sig_schedule, kl_loss_var, params['kl_loss_weight'], 'vae' )
csv_clb = CSVLogger(params["history_file"], append=False)
callbacks = [ vae_anneal_callback, csv_clb]
def vae_anneal_metric(y_true, y_pred):
return kl_loss_var
# control verbose output
keras_verbose = params['verbose_print']
if 'checkpoint_path' in params.keys():
callbacks.append(mol_cb.EncoderDecoderCheckpoint(encoder, decoder,
params=params, prop_pred_model = property_predictor,save_best_only=False))
AE_PP_model.compile(loss=model_losses,
loss_weights=model_loss_weights,
optimizer=optim,
metrics={'x_pred': ['categorical_accuracy',
vae_anneal_metric]})
AE_PP_model.fit(X_train, model_train_targets,
batch_size=params['batch_size'],
epochs=params['epochs'],
initial_epoch=params['prev_epochs'],
callbacks=callbacks,
verbose=keras_verbose,
validation_data=[X_test, model_test_targets]
)
encoder.save(params['encoder_weights_file'])
decoder.save(params['decoder_weights_file'])
property_predictor.save(params['prop_pred_weights_file'])
print('time of run : ', time.time() - start_time)
print('**FINISHED**')
return
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-e', '--exp_file',
help='experiment file', default='exp.json')
parser.add_argument('-d', '--directory',
help='exp directory', default='/home/ntd/Downloads/chemical_vae-master/models/zinc')
args = vars(parser.parse_args())
if args['directory'] is not None:
args['exp_file'] = os.path.join(args['directory'], args['exp_file'])
params = hyperparameters.load_params(args['exp_file'])
print("All params:", params)
if params['do_prop_pred'] :
main_property_run(params)
else:
main_no_prop(params)
| [
"dung98pt@gmail.com"
] | dung98pt@gmail.com |
5224ea480b5920a91b6d02c5fb96e32077e150ff | 0342e079cfd055b1ca26ca2a9963d6daa5260720 | /dev/egocentriccoord.py | 8b75f5a19464c7640d3c78cf2eccbf06100ccef6 | [] | no_license | rltonoli/MScTonoli | 9de864f32831c96146a2f388044b9b4d25beebe9 | eab52c01c45025daa1b2ef11861a2b66ab205638 | refs/heads/master | 2023-04-19T07:33:07.025597 | 2021-05-03T20:15:33 | 2021-05-03T20:15:33 | 228,877,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95,011 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Feb 21 21:44:27 2019
@author: Rodolfo L. Tonoli
"""
import numpy as np
import mathutils
import time
import skeletonmap
class EgocentricCoordinate:
"""
Objects of this class holds the egocentric coordinates of a joint. It contains the joint, its name,
and a list of reference (length = frame) for the coordinates data of that joint for every frame.
"""
egolist = []
def __init__(self, joint, frame):
self.joint = joint
self.name = joint.name
self.egolist.append(self)
self.target = []
self.frame = frame
self.importance = [] #lambda
self.refpoint = [] #x
self.dispvector = [] #v
self.normcoef = [] #C
self.angle = [] #B
self.distroot = [] #path distance to root
self.triangle = [] #triangulo associado a essa coordenada
self.normal = []
self.targets = []
self.tau = []#debbug tau
self.ortho = [] #debbug importance
self.proxi = [] #debbug importance
# def reset(self):
# """
# Clear all the coordinate data of every frame, but not this class instance
# """
# self.framecoord = []
# def addCoordFrame(self, frame):
# """
# Create a CoordFrame object to hold the egocentric coordinate data for a new frame
# """
# coord = CoordFrame(frame)
# self.framecoord.append(coord)
# return coord
# def getCoordFrame(self, framedesired):
# """
# Return the CoordFrame object that holds the data in the frame desired
# """
# if self.framecoord[framedesired].frame == framedesired:
# return self.framecoord[framedesired]
# for coord in self.framecoord:
# if coord.frame == framedesired:
# return coord
def getTarget(self, frame):
# coord = self.getCoordFrame(frame)
# if coord:
return self.importance.dot(self.targets)
# else:
# raise Exception('Egocentric Coordinates unavailable for this frame')
# @classmethod
# def getCoord(cls, jointname):
# for ego in cls.egolist:
# if jointname == ego.name:
# return ego
# print('Egocentric Coordinates not found')
@classmethod
def clean(cls):
cls.egolist = []
# class CoordFrame:
# def __init__(self, frame):
def getVectors(animation, frame):
"""
Get vectors to calculate the kinematic path
:type animation: pyanimation.Animation
:param animation: Animation (skeleton) to get the distance between mapped joints
"""
skmap = animation.getskeletonmap()
lvec_fore = skmap.vecLForearm(frame)
rvec_fore = skmap.vecRForearm(frame)
lvec_arm = skmap.vecLArm(frame)
rvec_arm = skmap.vecRArm(frame)
lvec_clavicle = skmap.vecLClavicle(frame)
rvec_clavicle = skmap.vecRClavicle(frame)
vec_neck = skmap.vecNeck(frame)
vec_spine = skmap.vecSpine(frame)
lvec_femur = skmap.vecLFemur(frame)
rvec_femur = skmap.vecRFemur(frame)
lvec_upleg = skmap.vecLUpleg(frame)
rvec_upleg = skmap.vecRUpleg(frame)
lvec_lowleg = skmap.vecLLowleg(frame)
rvec_lowleg = skmap.vecRLowleg(frame)
return lvec_fore, rvec_fore, lvec_arm, rvec_arm, lvec_clavicle, rvec_clavicle, vec_neck, vec_spine, lvec_femur, rvec_femur, lvec_upleg, rvec_upleg, lvec_lowleg, rvec_lowleg
def getJointsPositions(animation, frame):
skmap = animation.getskeletonmap()
jointlist = skmap.getJointsNoRoot()
positions = []
for joint in jointlist:
if joint:
positions.append(joint.getPosition(frame))
else:
positions.append(None)
#pos_hips, pos_spine, pos_spine1, pos_spine2, pos_spine3, pos_neck, pos_neck1, pos_head, pos_lshoulder,pos_larm, pos_lforearm, pos_lhand, pos_rshoulder, pos_rarm, pos_rforearm, pos_rhand, pos_lupleg, pos_llowleg, pos_lfoot, pos_rupleg, pos_rlowleg, pos_rfoot
#print(positions)
return positions
def getMeshPositions(animation, surface, frame):
mesh = [[triangle[0].getPosition(animation, frame) ,triangle[1].getPosition(animation, frame),triangle[2].getPosition(animation, frame)] for triangle in surface.headmesh+surface.bodymesh]
return mesh
def AdjustExtremityOrientation(animation, surface, ego, sourceanim, frame):
# TODO: NOT WORKING
#O calculo da superficie parece estar OK, então acredito que o erro esteja aqui
lhand, rhand = animation.getskeletonmap().lhand, animation.getskeletonmap().rhand
lfoot, rfoot = animation.getskeletonmap().lfoot, animation.getskeletonmap().rfoot
headmesh = surface.headmesh
bodymesh = surface.bodymesh
start=time.time()
#print('Adjusting extremities orientation')
#for frame in range(animation.frames):
vectors = getVectors(animation, frame)
jointpositions = getJointsPositions(animation, frame)
lvec_fore, rvec_fore, lvec_arm, rvec_arm, lvec_clavicle, rvec_clavicle, vec_neck, vec_spine, lvec_femur, rvec_femur, lvec_upleg, rvec_upleg, lvec_lowleg, rvec_lowleg = vectors
# if np.mod(frame+1,100) == 0:
# print('%i frames done. %s seconds.' % (int((frame+1)/100)*100,time.time()-start))
# start=time.time()
for joint,egoindex in zip([rhand, lhand], range(2)):
#Get the ego coordinates of the srcAnim animation joint
# aux_jointname = skeletonmap.getmatchingjoint(joint.name, sourceanim).name
# ego = EgocentricCoordinate.egolist[egoindex].getCoordFrame(frame)
ego = EgocentricCoordinate.egolist[egoindex]
currentJointSurfaceNormal = extremityNormal(animation, joint, frame)
# if frame==170:
# print('Current Joint Surface Normal:')
# print(currentJointSurfaceNormal)
# print('Components Surface Normal:')
newJointSurfaceNormals = []
for i in range(len(bodymesh)+len(headmesh)):
if i<len(headmesh):
_, componentSurfaceNormal = mathutils.getCentroid(headmesh[i][0].getPosition(animation, frame),headmesh[i][1].getPosition(animation, frame), headmesh[i][2].getPosition(animation, frame))
else:
j = i-len(headmesh)
_, componentSurfaceNormal = mathutils.getCentroid(bodymesh[j][0].getPosition(animation, frame),bodymesh[j][1].getPosition(animation, frame), bodymesh[j][2].getPosition(animation, frame))
#Get the axis of rotation to align the component surface normal
axis = np.cross(componentSurfaceNormal,currentJointSurfaceNormal)
axis_norm = axis/np.linalg.norm(axis)
#Rotate the component surface normal and get a joint surface normal regarding that component
matrix = mathutils.matrixRotation(ego.angle[i]*180/np.pi, axis_norm[0],axis_norm[1],axis_norm[2], shape=3)
newJointSurfaceNormals.append(np.dot(matrix, componentSurfaceNormal))
# if frame==170:
# print(newJointSurfaceNormals[-1])
# for values in DenormEgoLimb(joint, animation, surface, frame, vectors, jointpositions, ego, i+1):
# _, _, _, componentSurfaceNormal = values
# i = i+1
# #Get the axis of rotation to align the component surface normal
# axis = np.cross(componentSurfaceNormal,currentJointSurfaceNormal)
# axis_norm = axis/np.linalg.norm(axis)
# #Rotate the component surface normal and get a joint surface normal regarding that component
# matrix = mathutils.matrixRotation(ego.angle[i]*180/np.pi, axis_norm[0],axis_norm[1],axis_norm[2], shape=3)
# newJointSurfaceNormals.append(np.dot(matrix, componentSurfaceNormal))
if joint == rfoot or joint == lfoot:
#Handle foot contact
componentSurfaceNormal = [0,1,0]
#Get the axis of rotation to align the component surface normal
axis = np.cross(componentSurfaceNormal,currentJointSurfaceNormal)
axis_norm = axis/np.linalg.norm(axis)
#Rotate the component surface normal and get a joint surface normal regarding that component
matrix = mathutils.matrixRotation(ego.angle[-1]*180/np.pi, axis_norm[0],axis_norm[1],axis_norm[2], shape=3)
newJointSurfaceNormals.append(np.dot(matrix, componentSurfaceNormal))
# if frame == 170:
# print('Soma:')
# print((np.asarray(newJointSurfaceNormals)*ego.importance[:,None]).sum(axis=0))
#Get the mean of the new joint surface normals
normals = np.asarray(newJointSurfaceNormals)
importance = ego.importance[:len(normals),None]/ego.importance[:len(normals),None].sum()
newJointSurfaceNormal = (normals*importance).sum(axis=0)
#Get the matrix to rotate the current joint surface normal to the new one
matrix = mathutils.alignVectors(currentJointSurfaceNormal, newJointSurfaceNormal)
#Apply this rotation to the joint:
#Get global rotation matrix
glbRotationMat = mathutils.shape4ToShape3(joint.getGlobalTransform(frame))
#Rotate joint
newGblRotationMat = np.dot(matrix, glbRotationMat)
#Get new local rotation matrix
parentGblRotationMat = mathutils.shape4ToShape3(joint.parent.getGlobalTransform(frame))
newLclRotationMat = np.dot(parentGblRotationMat.T, newGblRotationMat)
#Get new local rotation euler angles
newAngle, warning = mathutils.eulerFromMatrix(newLclRotationMat, joint.order)
#joint.rotation[frame] = newAngle[:]
joint.setRotation(frame, newAngle[:])
def AdjustExtremityOrientation2(animation, sourceanim):
# TODO: NOT WORKING
#O calculo da superficie parece estar OK, então acredito que o erro esteja aqui
lhand, rhand = animation.getskeletonmap().lhand, animation.getskeletonmap().rhand
lfoot, rfoot = animation.getskeletonmap().lfoot, animation.getskeletonmap().rfoot
srclhand, srcrhand = sourceanim.getskeletonmap().lhand, sourceanim.getskeletonmap().rhand
start=time.time()
print('Adjusting extremities orientation')
for frame in range(animation.frames):
if np.mod(frame+1,100) == 0:
print('%i frames done. %s seconds.' % (int((frame+1)/100)*100,time.time()-start))
start=time.time()
for joint, srcjoint in zip([rhand, lhand], [srcrhand, srclhand]):
srcNormal = extremityNormal(sourceanim, srcjoint, frame)
currentNormal = extremityNormal(animation, joint, frame)
matrix = mathutils.alignVectors(currentNormal, srcNormal)
#Apply this rotation to the joint:
#Get global rotation matrix
glbRotationMat = mathutils.shape4ToShape3(joint.getGlobalTransform(frame))
#Rotate joint
newGblRotationMat = np.dot(matrix, glbRotationMat)
#Get new local rotation matrix
parentGblRotationMat = mathutils.shape4ToShape3(joint.parent.getGlobalTransform(frame))
newLclRotationMat = np.dot(parentGblRotationMat.T, newGblRotationMat)
#Get new local rotation euler angles
newAngle, warning = mathutils.eulerFromMatrix(newLclRotationMat, joint.order)
#joint.rotation[frame] = newAngle[:]
joint.setRotation(frame, newAngle[:])
def DenormEgoLimb(joint, animation, surface, frame, vectors, jointpositions, egocoord, index):
"""
Denormalize egocentric coordinates for the Limbs
"""
assert joint is not None
assert animation is not None
assert surface is not None
assert frame is not None
assert vectors is not None
assert index is not None
lvec_fore, rvec_fore, lvec_arm, rvec_arm, lvec_clavicle, rvec_clavicle, vec_neck, vec_spine, lvec_femur, rvec_femur, lvec_upleg, rvec_upleg, lvec_lowleg, rvec_lowleg = vectors
p_hips, p_spine, p_spine1, p_spine2, p_spine3, p_neck, p_neck1, p_head, p_lshoulder,p_larm, p_lforearm, p_lhand, p_rshoulder, p_rarm, p_rforearm, p_rhand, p_lupleg, p_llowleg, p_lfoot, p_rupleg, p_rlowleg, p_rfoot = jointpositions
if joint == animation.getskeletonmap().rhand:
#Right hand in respect to
#LEFT FOREARM LIMB
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_arm, lvec_clavicle, rvec_clavicle, rvec_arm, rvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT ARM LIMB
index += 1
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_clavicle, rvec_clavicle, rvec_arm, rvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT LOW LEG LIMB
index += 1
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_upleg, rvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT UP LEG LIMB
index += 1
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_upleg, lvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore]
tau = 0
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
elif joint == animation.getskeletonmap().lhand:
#Left hand in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_arm, rvec_clavicle, lvec_clavicle, lvec_arm, lvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT ARM LIMB
index += 1
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_clavicle, lvec_clavicle, lvec_arm, lvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT LOW LEG LIMB
index += 1
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_upleg, rvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT UP LEG LIMB
index += 1
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_upleg, lvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore]
tau = 0
for coef,vector in zip(egocoord.normcoef[index],path):
tau += np.linalg.norm(vector)*coef
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
elif joint == animation.getskeletonmap().rforearm:
#Right elbow in respect to
#LEFT FOREARM LIMB
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_arm, lvec_clavicle, rvec_clavicle, rvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT ARM LIMB
index += 1
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_clavicle, rvec_clavicle, rvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT LOW LEG LIMB
index += 1
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_upleg, rvec_femur, vec_spine, rvec_clavicle, rvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT UP LEG LIMB
index += 1
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_femur, vec_spine, rvec_clavicle, rvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_upleg, lvec_femur, vec_spine, rvec_clavicle, rvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_femur, vec_spine, rvec_clavicle, rvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
elif joint == animation.getskeletonmap().lforearm:
#Left elbow in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_arm, rvec_clavicle, lvec_clavicle, lvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT ARM LIMB
index += 1
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_clavicle, lvec_clavicle, lvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT LOW LEG LIMB
index += 1
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_upleg, rvec_femur, vec_spine, lvec_clavicle, lvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT UP LEG LIMB
index += 1
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [rvec_femur, vec_spine, lvec_clavicle, lvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_upleg, lvec_femur, vec_spine, lvec_clavicle, lvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = [lvec_femur, vec_spine, lvec_clavicle, lvec_arm]
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
elif joint == animation.getskeletonmap().rfoot:
#Right foot in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT ARM LIMB
index += 1
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT FOREARM LIMB
index += 1
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT ARM LIMB
index += 1
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([ - lvec_upleg,- lvec_femur, rvec_femur, rvec_upleg, rvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_femur, rvec_femur, rvec_lowleg, rvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
elif joint == animation.getskeletonmap().lfoot:
#Left foot in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT ARM LIMB
index += 1
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT FOREARM LIMB
index += 1
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT ARM LIMB
index += 1
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([ - lvec_upleg,- lvec_femur, lvec_femur, lvec_upleg, lvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_femur, lvec_femur, lvec_upleg, lvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
elif joint == animation.getskeletonmap().rlowleg:
#Right knee in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT ARM LIMB
index += 1
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT FOREARM LIMB
index += 1
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT ARM LIMB
index += 1
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([ - lvec_upleg,- lvec_femur, rvec_femur, rvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_femur, rvec_femur, rvec_lowleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
elif joint == animation.getskeletonmap().llowleg:
#Left foot in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#RIGHT ARM LIMB
index += 1
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT FOREARM LIMB
index += 1
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT ARM LIMB
index += 1
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT LOW LEG LIMB
index += 1
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([ - lvec_upleg,- lvec_femur, lvec_femur, lvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
#LEFT UP LEG LIMB
index += 1
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightRight').radius
de_refpoint, normal = mathutils.capsuleCartesian(egocoord.refpoint[index], p0, p1, r)
path = np.asarray([- lvec_femur, lvec_femur, lvec_upleg])
tau = (np.linalg.norm(path, axis=1)*egocoord.normcoef[index]).sum()
de_displacement= egocoord.dispvector[index]*tau
yield de_displacement, de_refpoint, tau, normal
def extremityNormal(animation, joint, frame):
"""
Returns the surface normal
Estimate the direction of a surface normal for the extrimity joints (hands and feet).
Based on the TPose in frame = 0, the initial surface normal is computed through:
Get the direction of the bone in the first frame (not the joint's orientation!)
Set a rotation axis equal to the cross product of this direction and the Y-axis [0,1,0]
The initial surface normal is the result of a 90 degrees rotation around this axis.
With the initial surface normal computed, apply the same transforms of the joint
in the initial surface normal, resulting in the current surface normal.
"""
skmap = animation.getskeletonmap()
try:
initnormal = joint.initNormal
except:
#The joint still does not have a initial normal
#Get the direction of the bone
if joint == skmap.rhand:
child = skmap.rhandmiddle
if not child:
print('Right hand middle base not mapped, using bone direction = [-1,0,0]')
bonedirection = [-1,0,0]
elif joint == skmap.lhand:
child = skmap.lhandmiddle
if not child:
print('Left hand middle base not mapped, using bone direction = [1,0,0]')
bonedirection = [1,0,0]
elif joint == skmap.rfoot:
child = skmap.rtoebase
if not child:
print('Right toe base not mapped, using bone direction = [0,0,1]')
bonedirection = [0,0,1]
elif joint == skmap.lfoot:
child = skmap.ltoebase
if not child:
print('Left toe base not mapped, using bone direction = [0,0,1]')
bonedirection = [0,0,1]
else:
raise Exception('This is not a extrimity joint.')
if child:
bonedirection = child.getPosition(frame=0) - joint.getPosition(frame=0)
bonedirection = mathutils.unitVector(bonedirection)
#Get the rotation axis
axis = np.cross( [0,1,0], bonedirection )
#Get rotation matrix
matrix = mathutils.matrixRotation(90, axis[0], axis[1], axis[2], shape = 3)
initnormal = np.dot( matrix, bonedirection )
initnormal = mathutils.unitVector(initnormal)
joint.initNormal = initnormal[:]
if frame == 0:
return initnormal
else:
#Get the rotation from frame zero from current frame of the joint
glbTransformMat = joint.getGlobalTransform(frame)
glbRotationMat = mathutils.shape4ToShape3(glbTransformMat)
glbInitTransformMat = joint.getGlobalTransform(frame = 0)
glbInitRotationMat = mathutils.shape4ToShape3(glbInitTransformMat)
transform = np.dot(glbRotationMat, glbInitRotationMat.T)
#Rotate initial surface normal
currentnormal = np.dot( transform, initnormal )
return currentnormal
def importanceCalc(dispvector, normal, handthick = 3.5):
"""
Calcula a importância da contribuição desse triangulo para a posição da junta
"""
epsilon = 0.01
normdispvector = np.linalg.norm(dispvector)-handthick
if normdispvector <= epsilon:
proximity = 1/epsilon
else:
proximity = 1/normdispvector
normal_unit = normal/np.linalg.norm(normal)
dispvector_unit = dispvector/normdispvector
orthogonality = np.clip(np.dot(normal_unit, dispvector_unit), -1.0, 1.0)
#TODO: CHECK
orthogonality = (orthogonality+1)/2
#TODO: No artigo fala para substituir por cos(epsilon), mas isso
#iria alterar o valor que estava chegando em zero para um.
if orthogonality < epsilon:
orthogonality = epsilon
orthogonality = np.abs(orthogonality)
return orthogonality*proximity, orthogonality, proximity
def importanceCalcLimb(vectors, limbname, dispvector, normal):
"""
Compute the importance for the limbs (without the surface normal vector)
"""
lvec_fore, rvec_fore, lvec_arm, rvec_arm, lvec_clavicle, rvec_clavicle, vec_neck, vec_spine, lvec_femur, rvec_femur, lvec_upleg, rvec_upleg, lvec_lowleg, rvec_lowleg = vectors
if limbname == 'rarm':
bone = rvec_arm
elif limbname == 'larm':
bone = lvec_arm
elif limbname == 'rfore':
bone = rvec_fore
elif limbname == 'lfore':
bone = lvec_fore
elif limbname == 'rlowleg':
bone = rvec_lowleg
elif limbname == 'llowleg':
bone = lvec_lowleg
elif limbname == 'rupleg':
bone = rvec_upleg
elif limbname == 'lupleg':
bone = lvec_upleg
else:
print('Unknown limb name')
return None
# dispvector_unit = dispvector/np.linalg.norm(dispvector)
bone = bone/np.linalg.norm(bone)
importance, orthogonality, proximity = importanceCalc(dispvector, normal)
return importance, orthogonality, proximity
def pathnormCalc(joint, animation, mesh, frame, refpoint, vectors, jointpositions):
"""
Calcula a normalização do caminho cinemático. Recebe a junta e sobe na
hierarquia. Caminho cinemático utilizado: Mão - Cotovelo - Ombro -
Espinha - Cabeça ou Quadris.
Retorna o vetor de deslocamento normalizado e o vetor de cossenos
"""
#TODO: Fazer o Ground
#Por enquanto, se não for mão, não faz nada
#Eray Molla Fig. 9
#Get bone vectors
lvec_fore, rvec_fore, lvec_arm, rvec_arm, lvec_clavicle, rvec_clavicle, vec_neck, vec_spine, lvec_femur, rvec_femur, lvec_upleg, rvec_upleg, lvec_lowleg, rvec_lowleg = vectors
#Get pre-computed joint positions
pos_hips, _, _, _, _, _, _, pos_head, _, _, _, _, _, _, _, _, _, _, _, _, _, _ = jointpositions
#Get mapped joints
lhand, rhand, lforearm, rforearm = animation.getskeletonmap().lhand, animation.getskeletonmap().rhand, animation.getskeletonmap().lforearm, animation.getskeletonmap().rforearm
lfoot, rfoot, llowleg, rlowleg = animation.getskeletonmap().lfoot, animation.getskeletonmap().rfoot, animation.getskeletonmap().llowleg, animation.getskeletonmap().rlowleg
#Defines the kinematic path for each joint
if joint == lhand:
kinpath = np.asarray([lvec_clavicle, lvec_arm, lvec_fore])
elif joint == rhand:
kinpath = np.asarray([rvec_clavicle, rvec_arm, rvec_fore])
elif joint == lforearm:
kinpath = np.asarray([lvec_clavicle, lvec_arm])
elif joint == rforearm:
kinpath = np.asarray([rvec_clavicle, rvec_arm])
elif joint == lfoot:
kinpath = np.asarray([lvec_femur, lvec_upleg, lvec_lowleg])
elif joint == rfoot:
kinpath = np.asarray([rvec_femur, rvec_upleg, rvec_lowleg])
elif joint == llowleg:
kinpath = np.asarray([lvec_femur, lvec_upleg])
elif joint == rlowleg:
kinpath = np.asarray([rvec_femur, rvec_upleg])
#Get vector displacement
if joint == lhand or joint == rhand or joint == lforearm or joint == rforearm:
cos = np.empty(len(kinpath)+1)
#Upper limb
if mesh == 'head':
vec_displacement = -(refpoint - pos_head) + vec_neck
vec_displacement = vec_displacement + kinpath.sum(axis = 0)
cos[0] = mathutils.cosBetween(vec_displacement, vec_neck)
tau = np.linalg.norm(vec_neck)*cos[0]
elif mesh == 'body':
vec_displacement = -(refpoint - pos_hips) + vec_spine
vec_displacement = vec_displacement + kinpath.sum(axis = 0)
cos[0] = mathutils.cosBetween(vec_displacement, vec_spine)
tau = np.linalg.norm(vec_spine)*cos[0]
else:
raise Exception('Upper limb joints only accept meshes from the head and body.')
#Get tau (Eray Molla Eq 5)
for i in range(1,len(cos)):
cos[i] = mathutils.cosBetween(vec_displacement, kinpath[i-1])
tau = tau + np.linalg.norm(kinpath[i-1])*cos[i]
else:
#Lower limbs
if mesh == 'head':
cos = np.empty(len(kinpath)+2)
vec_displacement = -(refpoint - pos_head) + vec_neck - vec_spine
vec_displacement = vec_displacement + kinpath.sum(axis = 0)
cos[0] = mathutils.cosBetween(vec_displacement, vec_neck)
cos[1] = mathutils.cosBetween(vec_displacement, -vec_spine)
tau = np.linalg.norm(vec_neck)*cos[0] + np.linalg.norm(-vec_spine)*cos[1]
for i in range(2,len(cos)):
cos[i] = mathutils.cosBetween(vec_displacement, kinpath[i-2])
tau = tau + np.linalg.norm(kinpath[i-2])*cos[i]
elif mesh == 'body':
cos = np.empty(len(kinpath))
vec_displacement = -(refpoint - pos_hips)
vec_displacement = vec_displacement + kinpath.sum(axis = 0)
tau = 0
for i in range(len(cos)):
cos[i] = mathutils.cosBetween(vec_displacement, kinpath[i])
tau = tau + np.linalg.norm(kinpath[i])*cos[i]
elif mesh == 'ground':
assert joint == rfoot or joint == lfoot, 'Foot contact should only be randled with the right and left foot'
hipsGround = np.asarray([pos_hips[0], 0, pos_hips[2]])
hipsHeight = np.asarray([0, pos_hips[1], 0])
vec_displacement = -(refpoint - hipsGround) + hipsHeight
vec_displacement = vec_displacement+ kinpath.sum(axis = 0)
cos = np.empty(len(kinpath)+1)
cos[0] = mathutils.cosBetween(vec_displacement, hipsHeight)
tau = 0
for i in range(1,len(cos)):
cos[i] = mathutils.cosBetween(vec_displacement, kinpath[i-1])
tau = tau + np.linalg.norm(kinpath[i-1])*cos[i]
return vec_displacement/tau, cos, tau
def pathnormCalcLimb(joint, animation, mesh, frame, vectors, jointpositions, surface):
lvec_fore, rvec_fore, lvec_arm, rvec_arm, lvec_clavicle, rvec_clavicle, vec_neck, vec_spine, lvec_femur, rvec_femur, lvec_upleg, rvec_upleg, lvec_lowleg, rvec_lowleg = vectors
p_hips, p_spine, p_spine1, p_spine2, p_spine3, p_neck, p_neck1, p_head, p_lshoulder,p_larm, p_lforearm, p_lhand, p_rshoulder, p_rarm, p_rforearm, p_rhand, p_lupleg, p_llowleg, p_lfoot, p_rupleg, p_rlowleg, p_rfoot = jointpositions
# TODO: Fazer para cada junta para cada um dos membros
jointPosition = joint.getPosition(frame)
if joint == animation.getskeletonmap().rhand:
#Right hand in respect to
#LEFT FOREARM LIMB
p1 = p_lhand
p0 = p_lforearm
r = surface.getPoint('foreLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_arm, - lvec_clavicle, rvec_clavicle, rvec_arm, rvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lfore', normal, cylindric, refpoint
#LEFT ARM LIMB
p1 = p0[:]
p0 = p_larm
r = surface.getPoint('armLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([ - lvec_clavicle, rvec_clavicle, rvec_arm, rvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'larm', normal, cylindric, refpoint
#RIGHT LOW LEG LIMB
p1 = p_rfoot
p0 = p_rlowleg
r = surface.getPoint('shinRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_upleg, - rvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rlowleg', normal, cylindric, refpoint
#RIGHT UP LEG LIMB
p1 = p0[:]
p0 = p_rupleg
r = surface.getPoint('thightRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rupleg', normal, cylindric, refpoint
#LEFT LOW LEG LIMB
p1 = p_lfoot
p0 = p_llowleg
r = surface.getPoint('shinLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_upleg, - lvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement,cos, tau, 'llowleg', normal, cylindric, refpoint
#LEFT UP LEG LIMB
p1 = p0[:]
p0 = p_lupleg
r = surface.getPoint('thightLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_femur, vec_spine, rvec_clavicle, rvec_arm, rvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
elif joint == animation.getskeletonmap().lhand:
#Left hand in respect to
#RIGHT FOREARM LIMB
p1 = p_rhand
p0 = p_rforearm
r = surface.getPoint('foreRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_arm, - rvec_clavicle, lvec_clavicle, lvec_arm, lvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rfore', normal, cylindric, refpoint
#RIGHT ARM LIMB
p1 = p0[:]
p0 = p_rarm
r = surface.getPoint('armRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_clavicle, lvec_clavicle, lvec_arm, lvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rarm', normal, cylindric, refpoint
#RIGHT LOW LEG LIMB
p1 = p_rfoot
p0 = p_rlowleg
r = surface.getPoint('shinRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_upleg, - rvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rlowleg', normal, cylindric, refpoint
#RIGHT UP LEG LIMB
p1 = p0[:]
p0 = p_rupleg
r = surface.getPoint('thightRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rupleg', normal, cylindric, refpoint
#LEFT LOW LEG LIMB
p1 = p_lfoot
p0 = p_llowleg
r = surface.getPoint('shinLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([ - lvec_upleg, - lvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'llowleg', normal, cylindric, refpoint
#LEFT UP LEG LIMB
p1 = p0[:]
p0 = p_lupleg
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
r = surface.getPoint('thightLeft').radius
path = np.asarray([- lvec_femur, vec_spine, lvec_clavicle, lvec_arm, lvec_fore])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
elif joint == animation.getskeletonmap().rforearm:
#Right elbow in respect to
#LEFT FOREARM LIMB
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_arm, - lvec_clavicle, rvec_clavicle, rvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lfore', normal, cylindric, refpoint
#LEFT ARM LIMB
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_clavicle, rvec_clavicle, rvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'larm', normal, cylindric, refpoint
#RIGHT LOW LEG LIMB
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_upleg, - rvec_femur , vec_spine , rvec_clavicle , rvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rlowleg', normal, cylindric, refpoint
#RIGHT UP LEG LIMB
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_femur, vec_spine, rvec_clavicle, rvec_arm])
vec_displacement = -(refpoint -p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rupleg', normal, cylindric, refpoint
#LEFT LOW LEG LIMB
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_upleg, - lvec_femur, vec_spine, rvec_clavicle, rvec_arm])
vec_displacement = -(refpoint - p0)+ path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'llowleg', normal, cylindric, refpoint
#LEFT UP LEG LIMB
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_femur, vec_spine, rvec_clavicle, rvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
elif joint == animation.getskeletonmap().lforearm:
#Left elbow in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_arm, - rvec_clavicle, lvec_clavicle, lvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement,cos, tau, 'rfore', normal, cylindric, refpoint
#RIGHT ARM LIMB
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_clavicle, lvec_clavicle, lvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rarm', normal, cylindric, refpoint
#RIGHT LOW LEG LIMB
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_upleg, - rvec_femur, vec_spine, lvec_clavicle, lvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rlowleg', normal, cylindric, refpoint
#RIGHT UP LEG LIMB
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_femur, vec_spine, lvec_clavicle, lvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rupleg', normal, cylindric, refpoint
#LEFT LOW LEG LIMB
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([ - lvec_upleg,- lvec_femur, vec_spine, lvec_clavicle, lvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'llowleg', normal, cylindric, refpoint
#LEFT UP LEG LIMB
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_femur, vec_spine, lvec_clavicle, lvec_arm])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
elif joint == animation.getskeletonmap().rfoot:
#Right foot in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement,cos, tau, 'rfore', normal, cylindric, refpoint
#RIGHT ARM LIMB
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rfore', normal, cylindric, refpoint
#LEFT FOREARM LIMB
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lfore', normal, cylindric, refpoint
#LEFT ARM LIMB
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'larm', normal, cylindric, refpoint
#LEFT LOW LEG LIMB
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([ - lvec_upleg,- lvec_femur, rvec_femur, rvec_upleg, rvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'llowleg', normal, cylindric, refpoint
#LEFT UP LEG LIMB
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_femur, rvec_femur, rvec_lowleg, rvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
elif joint == animation.getskeletonmap().lfoot:
#Left foot in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement,cos, tau, 'rfore', normal, cylindric, refpoint
#RIGHT ARM LIMB
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rfore', normal, cylindric, refpoint
#LEFT FOREARM LIMB
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lfore', normal, cylindric, refpoint
#LEFT ARM LIMB
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'larm', normal, cylindric, refpoint
#RIGHT LOW LEG LIMB
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([ - rvec_upleg,- rvec_femur, lvec_femur, lvec_upleg, lvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'llowleg', normal, cylindric, refpoint
#RIGHT UP LEG LIMB
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_femur, lvec_femur, lvec_upleg, lvec_lowleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
elif joint == animation.getskeletonmap().rlowleg:
#Right knee in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement,cos, tau, 'rfore', normal, cylindric, refpoint
#RIGHT ARM LIMB
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rfore', normal, cylindric, refpoint
#LEFT FOREARM LIMB
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lfore', normal, cylindric, refpoint
#LEFT ARM LIMB
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_clavicle, - vec_spine, rvec_femur, rvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'larm', normal, cylindric, refpoint
#LEFT LOW LEG LIMB
p0 = p_llowleg
p1 = p_lfoot
r = surface.getPoint('shinLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([ - lvec_upleg,- lvec_femur, rvec_femur, rvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'llowleg', normal, cylindric, refpoint
#LEFT UP LEG LIMB
p0 = p_lupleg
p1 = p_llowleg
r = surface.getPoint('thightLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_femur, rvec_femur, rvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
elif joint == animation.getskeletonmap().llowleg:
#Left knee in respect to
#RIGHT FOREARM LIMB
p0 = p_rforearm
p1 = p_rhand
r = surface.getPoint('foreRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_arm, - rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement,cos, tau, 'rfore', normal, cylindric, refpoint
#RIGHT ARM LIMB
p0 = p_rarm
p1 = p_rforearm
r = surface.getPoint('armRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'rfore', normal, cylindric, refpoint
#LEFT FOREARM LIMB
p0 = p_lforearm
p1 = p_lhand
r = surface.getPoint('foreLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_arm, - lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lfore', normal, cylindric, refpoint
#LEFT ARM LIMB
p0 = p_larm
p1 = p_lforearm
r = surface.getPoint('armLeft').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- lvec_clavicle, - vec_spine, lvec_femur, lvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'larm', normal, cylindric, refpoint
#LEFT LOW LEG LIMB
p0 = p_rlowleg
p1 = p_rfoot
r = surface.getPoint('shinRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([ - rvec_upleg,- rvec_femur, lvec_femur, lvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'llowleg', normal, cylindric, refpoint
#LEFT UP LEG LIMB
p0 = p_rupleg
p1 = p_rlowleg
r = surface.getPoint('thightRight').radius
cylindric, refpoint, normal = mathutils.capsuleCollision(jointPosition,p0,p1,r)
path = np.asarray([- rvec_femur, lvec_femur, lvec_upleg])
vec_displacement = -(refpoint - p0) + path.sum(axis=0)
cos = np.asarray([mathutils.cosBetween(vec_displacement, path[i]) for i in range(len(path))])
tau = (np.linalg.norm(path, axis = 1)*cos).sum()
yield vec_displacement, cos, tau, 'lupleg', normal, cylindric, refpoint
def GetEgocentricCoordinatesTargets(srcAnim, surfacesrcAnim, tgtAnim, surfacetgtAnim, frame, checkLimbDistanceFlag=True):
headmesh = surfacesrcAnim.headmesh
bodymesh = surfacesrcAnim.bodymesh
headmesh_tgtAnim = surfacetgtAnim.headmesh
bodymesh_tgtAnim = surfacetgtAnim.bodymesh
ego = None
EgocentricCoordinate.clean()
#Get source skeleton map
srcAnim_skmap = srcAnim.getskeletonmap()
lhand, rhand = srcAnim_skmap.lhand, srcAnim_skmap.rhand
lforearm, rforearm = srcAnim_skmap.lforearm, srcAnim_skmap.rforearm
larm, rarm = srcAnim_skmap.larm, srcAnim_skmap.rarm
lupleg, rupleg = srcAnim_skmap.lupleg, srcAnim_skmap.rupleg
llowleg, rlowleg = srcAnim_skmap.llowleg, srcAnim_skmap.rlowleg
lfoot, rfoot = srcAnim_skmap.lfoot, srcAnim_skmap.rfoot
#Get target skeleton map
ava_skmap = tgtAnim.getskeletonmap()
lhand_ava, rhand_ava = ava_skmap.lhand, ava_skmap.rhand
lforearm_ava, rforearm_ava = ava_skmap.lforearm, ava_skmap.rforearm
larm_ava, rarm_ava = ava_skmap.larm, ava_skmap.rarm
lupleg_ava, rupleg_ava = ava_skmap.lupleg, ava_skmap.rupleg
llowleg_ava, rlowleg_ava = ava_skmap.llowleg, ava_skmap.rlowleg
lfoot_ava, rfoot_ava = ava_skmap.lfoot, ava_skmap.rfoot
start=time.time()
ground_normal = np.asarray([0,1,0])
# EgocentricCoordinate(rhand, frame)
# EgocentricCoordinate(lhand, frame)
# EgocentricCoordinate(rforearm, frame)
# EgocentricCoordinate(lforearm, frame)
# EgocentricCoordinate(rfoot, frame)
# EgocentricCoordinate(lfoot, frame)
# EgocentricCoordinate(rlowleg, frame)
# EgocentricCoordinate(llowleg, frame)
#Para cada frame
#for frame in range(srcAnim.frames):
# if np.mod(frame+1,100) == 0:
# print('%i frames done. %s seconds.' % (int((frame+1)/100)*100,time.time()-start))
# start=time.time()
vectors = getVectors(srcAnim, frame)
jointpositions = getJointsPositions(srcAnim, frame)
mesh = getMeshPositions(srcAnim, surfacesrcAnim, frame)
#Para cada junta
#for joint in [rhand, lhand, rforearm, lforearm, rfoot, lfoot, rlowleg, llowleg]:
#for joint in [rhand, lhand]:
start = time.time()
for joint in [rhand, lhand, rfoot, lfoot]:
# ego = EgocentricCoordinate.getCoord(joint.name).addCoordFrame(frame)
ego = EgocentricCoordinate(joint, frame)
jointPosition = joint.getPosition(frame)
#Eray Molla Equation 3
#Get the surface normal of extrimities joints
if joint == rhand or joint == lhand or joint == rfoot or joint == lfoot:
jointSurfaceNormal = extremityNormal(srcAnim, joint, frame)
start_aux = time.time()
#Mesh components
for i in range(len(bodymesh)+len(headmesh)):
if i<len(headmesh):
#refpoint, dispvector, normal = mathutils.distFromCentroid(jointPosition, mesh[i][0], mesh[i][1], mesh[i][2])
normal, refpoint, dispvector, refpoint_cartesian, _ = mathutils.clampedBarycentric(jointPosition, mesh[i][0], mesh[i][1], mesh[i][2])
#dispvector_norm, normcoef, tau = pathnormCalc(joint, srcAnim, 'head', frame, refpoint, vectors, jointpositions)
dispvector_norm, normcoef, tau = pathnormCalc(joint, srcAnim, 'head', frame, refpoint_cartesian, vectors, jointpositions)
else:
j = i-len(headmesh)
#refpoint, dispvector, normal = mathutils.distFromCentroid(jointPosition, mesh[i][0], mesh[i][1], mesh[i][2])
normal, refpoint, dispvector, refpoint_cartesian, _ = mathutils.clampedBarycentric(jointPosition, mesh[i][0], mesh[i][1], mesh[i][2])
#dispvector_norm, normcoef, tau = pathnormCalc(joint, srcAnim, 'body', frame, refpoint, vectors, jointpositions)
dispvector_norm, normcoef, tau = pathnormCalc(joint, srcAnim, 'body', frame, refpoint_cartesian, vectors, jointpositions)
importance, ortho, proxi = importanceCalc(dispvector, normal)
#Importance
ego.ortho.append(ortho)
ego.proxi.append(proxi)
ego.importance.append(importance)
#Reference point (triangle mesh)
ego.refpoint.append(refpoint)
#Displacement Vector (distance from refpoint to the joint position)
ego.dispvector.append(dispvector_norm)
#Cosines between each bone and the displacement vector Eray Molla Eq 4
ego.normcoef.append(normcoef)
#Normalization factor Eray Molla Eq 5
ego.tau.append(tau)
ego.normal.append(normal)
#Eray Molla Equation 3
if joint == rhand or joint == lhand or joint == rfoot or joint == lfoot:
angle,_ = mathutils.angleBetween(normal, jointSurfaceNormal)
ego.angle.append(angle)
#TODO: DEBUG
#print(' mesh: %.4f seconds.' % (time.time()-start_aux))
start_aux = time.time()
#Limbs components
for values_returned in pathnormCalcLimb(joint, srcAnim, 'limb', frame, vectors, jointpositions, surfacesrcAnim):
dispvector, normcoef, tau, limbname, normal, refpoint, refpoint_aux = values_returned
importance, ortho, proxi = importanceCalcLimb(vectors, limbname, dispvector, normal)
ego.ortho.append(ortho)
ego.proxi.append(proxi)
ego.importance.append(importance)
ego.refpoint.append(refpoint)
ego.dispvector.append(dispvector/tau)
ego.normcoef.append(normcoef)
ego.tau.append(tau)
ego.normal.append(normal)
#Eray Molla Equation 3
if joint == rhand or joint == lhand or joint == rfoot or joint == lfoot:
angle,_ = mathutils.angleBetween(normal, jointSurfaceNormal)
ego.angle.append(angle)
#TODO: DEBUG
# print(' limb: %.4f seconds.' % (time.time()-start_aux))
#Add the ground projection as a reference point
if joint == rfoot or joint == lfoot:
refpoint = np.asarray([jointPosition[0], 0,jointPosition[2]])
dispvector_norm, normcoef, tau = pathnormCalc(joint, srcAnim, 'ground', frame, refpoint, vectors, jointpositions)
importance, ortho, proxi = importanceCalc(dispvector, ground_normal)
ego.ortho.append(ortho)
ego.proxi.append(proxi)
ego.importance.append(importance)
ego.refpoint.append(refpoint)
ego.dispvector.append(dispvector_norm)
ego.normcoef.append(normcoef)
ego.tau.append(tau)
ego.normal.append(normal)
angle,_ = mathutils.angleBetween(ground_normal, jointSurfaceNormal)
ego.angle.append(angle)
#distance between point p0=jointPosition and line passing through p1 and p2:
# d = |(p0 - p1) x (p0 - p2)|/|p2-p1|
# distance = np.linalg.norm(np.cross(jointPosition - p1,jointPosition - p2))/np.linalg.norm(p2 - p1)
# dispvector = distance - surfacesrcAnim.getPoint('foreRight').radius
#Normaliza a importancia
sumimp = sum(ego.importance)
ego.importance = np.asarray([ego.importance[element]/sumimp for element in range(len(ego.importance))])
#TODO: DEBUG
# print(' get: %.4f seconds.' % (time.time()-start))
#####################################################################################
# Desnormalizando a cada frame
#####################################################################################
vectors = getVectors(tgtAnim, frame)
jointpositions = getJointsPositions(tgtAnim, frame)
mesh = getMeshPositions(tgtAnim, surfacetgtAnim, frame)
lvec_fore, rvec_fore, lvec_arm, rvec_arm, lvec_clavicle, rvec_clavicle, vec_neck, vec_spine, lvec_femur, rvec_femur, lvec_upleg, rvec_upleg, lvec_lowleg, rvec_lowleg = vectors
start = time.time()
#For each EE (each hand)
#for joint,egoindex in zip([rhand_ava, lhand_ava, rforearm_ava, lforearm_ava, rfoot_ava, lfoot_ava, rlowleg_ava, llowleg_ava],range(6)):
# for joint,egoindex in zip([rhand_ava, lhand_ava],range(2)):
for egoindex,joint in enumerate([rhand_ava, lhand_ava, rfoot_ava, lfoot_ava]):
#Get the ego coordinates of the srcAnim animation joint
# aux_jointname = skeletonmap.getmatchingjoint(joint.name, srcAnim).name
# ego = EgocentricCoordinate.egolist[egoindex].getCoordFrame(frame)
ego = EgocentricCoordinate.egolist[egoindex]
#For each mesh triangle
vec_displacement = []
de_refpoint = []
position = []
taulist = []
normallist = []
for i in range(len(bodymesh_tgtAnim)+len(headmesh_tgtAnim)):
if i<len(headmesh_tgtAnim):
#de_refpoint_aux, normal = mathutils.getCentroid(mesh[i][0], mesh[i][1], mesh[i][2])
de_refpoint_aux, normal = mathutils.barycentric2cartesian(ego.refpoint[i], mesh[i][0], mesh[i][1], mesh[i][2])
if joint == lhand_ava: kinpath = np.asarray([vec_neck, lvec_clavicle, lvec_arm, lvec_fore])
elif joint == rhand_ava: kinpath = np.asarray([vec_neck, rvec_clavicle, rvec_arm, rvec_fore])
elif joint == lforearm_ava: kinpath = np.asarray([vec_neck, lvec_clavicle, lvec_arm])
elif joint == rforearm_ava: kinpath = np.asarray([vec_neck, rvec_clavicle, rvec_arm])
elif joint == lfoot_ava: kinpath = np.asarray([vec_neck, vec_spine, lvec_femur, lvec_upleg, lvec_lowleg])
elif joint == rfoot_ava: kinpath = np.asarray([vec_neck, vec_spine, rvec_femur, rvec_upleg, rvec_lowleg])
elif joint == llowleg_ava: kinpath = np.asarray([vec_neck, vec_spine, lvec_femur, lvec_upleg])
elif joint == rlowleg_ava: kinpath = np.asarray([vec_neck, vec_spine, rvec_femur, rvec_upleg])
else:
j = i-len(headmesh_tgtAnim)
#de_refpoint_aux, normal = mathutils.getCentroid(mesh[i][0], mesh[i][1], mesh[i][2])
de_refpoint_aux, normal = mathutils.barycentric2cartesian(ego.refpoint[i], mesh[i][0], mesh[i][1], mesh[i][2])
if joint == lhand_ava: kinpath = np.asarray([vec_spine, lvec_clavicle, lvec_arm, lvec_fore])
elif joint == rhand_ava: kinpath = np.asarray([vec_spine, rvec_clavicle, rvec_arm, rvec_fore])
elif joint == lforearm_ava: kinpath = np.asarray([vec_spine, lvec_clavicle, lvec_arm])
elif joint == rforearm_ava: kinpath = np.asarray([vec_spine, rvec_clavicle, rvec_arm])
elif joint == lfoot_ava: kinpath = np.asarray([lvec_femur, lvec_upleg, lvec_lowleg])
elif joint == rfoot_ava: kinpath = np.asarray([rvec_femur, rvec_upleg, rvec_lowleg])
elif joint == llowleg_ava: kinpath = np.asarray([lvec_femur, lvec_upleg])
elif joint == rlowleg_ava: kinpath = np.asarray([rvec_femur, rvec_upleg])
# if joint == rfoot_ava or joint == lfoot_ava:
# tau = (np.linalg.norm(kinpath, axis = 1)*ego.normcoef[i][:-1]).sum()
# vec_displacement_aux = ego.dispvector[i][:-1]*tau
# else:
tau = (np.linalg.norm(kinpath, axis = 1)*ego.normcoef[i]).sum()
vec_displacement_aux = ego.dispvector[i]*tau
taulist.append(tau)
vec_displacement.append(vec_displacement_aux)
de_refpoint.append(de_refpoint_aux)
position.append(vec_displacement_aux+de_refpoint_aux)
normallist.append(normal)
#Get limb coordinates
for values_returned in DenormEgoLimb(joint, tgtAnim, surfacetgtAnim, frame, vectors, jointpositions, ego, i+1):
vec_displacement_aux, de_refpoint_aux, tau, normal = values_returned
taulist.append(tau)
vec_displacement.append(vec_displacement_aux)
de_refpoint.append(de_refpoint_aux)
position.append(vec_displacement_aux+de_refpoint_aux)
normallist.append(normal)
if joint == rfoot_ava or joint == lfoot_ava:
jointPosition = joint.getPosition(frame)
hipsPosition = tgtAnim.getskeletonmap().hips.getPosition(frame)
hipsGround = np.asarray([hipsPosition[0], 0, hipsPosition[2]])
hipsHeight = np.asarray([0, hipsPosition[1], 0])
de_refpoint_aux = np.asarray([jointPosition[0], 0, jointPosition[2]])
if joint == rfoot:
kinpath = np.asarray([-de_refpoint_aux, -hipsGround, hipsHeight, rvec_femur, rvec_upleg, rvec_lowleg])
else:
kinpath = np.asarray([-de_refpoint_aux, -hipsGround, hipsHeight, lvec_femur, lvec_upleg, lvec_lowleg])
vec_displacement_aux = kinpath.sum(axis = 0)
cos = np.empty(len(kinpath))
tau = 0
for i in range(len(cos)):
cos[i] = mathutils.cosBetween(vec_displacement_aux, kinpath[i])
tau = tau + np.linalg.norm(kinpath[i])*cos[i]
vec_displacement_aux = ego.dispvector[-1]*tau
taulist.append(tau)
vec_displacement.append(vec_displacement_aux)
de_refpoint.append(de_refpoint_aux)
position.append(vec_displacement_aux+de_refpoint_aux)
normallist.append([0,1,0])
ego.tgt_dispvector = np.asarray(vec_displacement)
ego.tgt_tau = np.asarray(taulist)
ego.tgt_refpoint = np.asarray(de_refpoint)
ego.targets = np.asarray(position)
ego.tgt_normal = np.asarray(normallist)
# if frame>200:
# return ego.egolist, targets#, taulist, vec_displacement
#TODO: DEBUG
# print(' set: %.4f seconds.' % (time.time()-start))
return ego.egolist#targets, taulist, vec_displacement
| [
"rltonoli@gmail.com"
] | rltonoli@gmail.com |
37761b569d22615d0f0e51e0a0b27f66188a80ce | aff732682d12192e163e18e57c4dbc832c81ffe7 | /week0/TwentyFortyEight_test.py | 5b758d3572a59a96ddadad5c8f85dad90eb1405d | [] | no_license | EarlMatthews/principlescomputing | 4ad0d0736bb1fe4468d60a56adfa4b5ec58f1d39 | 9ebf70815b512e79fa1ef8f7aafbbfee82632196 | refs/heads/master | 2021-01-22T01:10:24.816886 | 2014-07-19T15:30:03 | 2014-07-19T15:30:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,472 | py | '''
A simple test for twentyfortyeight
'''
import poc_simpletest
from TwentyFortyEight import TwentyFortyEight
from TwentyFortyEight import DOWN, LEFT, RIGHT, UP
def merge_test(suite):
'''
Test method merge
'''
from TwentyFortyEight import merge
suite.run_test(str(merge([2, 0, 2, 4])), str([4, 4, 0, 0]), "merge 1")
suite.run_test(str(merge([0, 0, 2, 2])), str([4, 0, 0, 0]), "merge 2")
suite.run_test(str(merge([2, 2, 0, 0])), str([4, 0, 0, 0]), "merge 3")
suite.run_test(str(merge([2, 2, 2, 2])), str([4, 4, 0, 0]), "merge 4")
suite.run_test(str(merge([8, 16, 16, 8])), str([8, 32, 8, 0]), "merge 5")
def initial_test(suite):
"""
Test class initialize
"""
game = TwentyFortyEight(4, 4)
result = [[(0, 0), (0, 1), (0, 2), (0, 3)], \
[(3, 0), (3, 1), (3, 2), (3, 3)], \
[(0, 0), (1, 0), (2, 0), (3, 0)], [(0, 3), (1, 3), (2, 3), (3, 3)]]
suite.run_test(str(game.get_direction()), str(result), "initial 1")
def move_test(suite):
"""
Test move function
"""
game = TwentyFortyEight(4, 4)
grid = [[2, 4, 2, 4], [0, 2, 16, 2], [4, 16, 2, 4], [2, 4, 2, 4]]
result = [[0, 4, 0, 0], [2, 2, 2, 4], [4, 16, 16, 2], [2, 4, 4, 8]]
game.set_grid(grid)
game.move(DOWN)
suite.run_test(str(game), str(result), "Move 1")
game.reset()
result = [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
suite.run_test(str(game), str(result), "move 2")
grid = [[8, 4, 0, 0], [2, 4, 2, 0], [4, 0, 4, 0], [2, 4, 2, 0]]
result = [[8, 8, 2, 0], [2, 4, 4, 0], [4, 0, 2, 0], [2, 0, 0, 0]]
game.set_grid(grid)
game.move(UP)
suite.run_test(str(game), str(result), "Move 3")
grid = [[8, 8, 2, 0], [2, 4, 4, 0], [4, 0, 2, 0], [2, 0, 0, 2]]
result = [[16, 2, 0, 0], [2, 8, 0, 0], [4, 2, 0, 0], [4, 0, 0, 0]]
game.set_grid(grid)
game.move(LEFT)
suite.run_test(str(game), str(result), "Move 4")
grid = [[16, 2, 0, 0], [2, 8, 0, 0], [4, 2, 0, 2], [4, 0, 0, 0]]
result = [[0, 0, 16, 2], [0, 0, 2, 8], [0, 0, 4, 4], [0, 0, 0, 4]]
game.set_grid(grid)
game.move(RIGHT)
suite.run_test(str(game), str(result), "Move 4")
def move_rectangle_test(suite):
"""
Test rectange game.
"""
game = TwentyFortyEight(4, 5)
grid = [[0, 2, 4, 2, 4], [2, 2, 4, 0, 0], [2, 4, 0, 0, 0], [2, 2, 2, 0, 4]]
result = [[4, 4, 8, 2, 8], [2, 4, 2, 0, 0], [0, 2, 0, 0, 0], [0, 0, 0, 0, 0]]
game.set_grid(grid)
game.move(UP)
suite.run_test(str(game), str(result), "Move rectange 1")
grid = [[4, 4, 8, 2, 8], [2, 4, 2, 0, 0], [0, 2, 0, 0, 0], [0, 0, 0, 0, 0]]
result = [[8, 8, 2, 8, 0], [2, 4, 2, 0, 0], [2, 0, 0, 0, 0], [0, 0, 0, 0, 0]]
game.set_grid(grid)
game.move(LEFT)
suite.run_test(str(game), str(result), "Move rectange 2")
grid = [[8, 16, 8, 16, 8],
[16, 8, 16, 8, 16],
[8, 16, 8, 16, 8],
[16, 8, 16, 8, 16]]
game.set_grid(grid)
game.move(UP)
def new_tile_test():
"""
tile test.
"""
game = TwentyFortyEight(4, 4)
game.reset()
game.new_tile()
print game
game.new_tile()
print game
def run_test():
"""
Some informal testing code
"""
suite = poc_simpletest.TestSuite()
merge_test(suite)
initial_test(suite)
move_test(suite)
move_rectangle_test(suite)
suite.report_results()
new_tile_test()
if __name__ == '__main__':
run_test()
| [
"honestmanxin@gmail.com"
] | honestmanxin@gmail.com |
47d989387223d5588151d939827027b00f77b308 | 42683813d6fcb6df11d24e851d411633ab200a67 | /regression/__init__.py | 27bcac306ec8a347d6571a77521ca9c03efc5f76 | [
"MIT"
] | permissive | sahitpj/machine-learning | aa1ac76ee31615872e0a9ae1c9c41e0be59a5423 | 2ce5a337ec432daff64a216df6847ef834bcb8d7 | refs/heads/master | 2020-04-16T10:30:42.873896 | 2019-04-22T14:47:54 | 2019-04-22T14:47:54 | 165,506,786 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 466 | py | from .linear_np import normalEquationRegression, gradientDescentRegression, gradientDescentAutogradRegression
from .linear_torch import TorchNormalEquationRegression, TorchGradientDescentRegression, TorchGradientDescentAutogradRegression
from .cordinate import coordinateDescent
from .lasso import coordinateDescentLASSO, coordinateDescentLASSOAutoGrad
from .ridge import normalEquationRidgeRegression, TorchridgeRegression
from .sgd import stochasticGradientDescent | [
"jayakrishna.sahit@iitgn.ac.in"
] | jayakrishna.sahit@iitgn.ac.in |
3aad9a6ae36c97d0c6944d6c0ef7981fcbd7a0ee | 5a9cad0e55708a25aa77296fba867cd06bf80a20 | /day7/handy_haversacks_part2.py | d84c36a405f6852e4c124859b3013e317d073435 | [] | no_license | PlaybackSwede/advent-of-code-2020 | d10420eff54fe390e88fdaa72764b555c36d7d4b | 3c805715e0f3677ca55424ef709d82f8139a6f09 | refs/heads/master | 2023-02-13T17:06:50.951304 | 2020-12-19T22:51:39 | 2020-12-19T22:51:39 | 320,923,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,144 | py | import functools
#Define global index
bag_tree_index = {}
def recursive_bags_in_bag(bag_key):
bag_nbr_pairs = bag_tree_index[bag_key].items()
if len(bag_nbr_pairs) == 0:
return 1
nbr_bags = 0
for key, nbr in bag_nbr_pairs:
if len(bag_tree_index[key]) == 0:
nbr_bags += recursive_bags_in_bag(key)*nbr
else:
nbr_bags += nbr + recursive_bags_in_bag(key)*nbr
return nbr_bags
file = open('input.txt', 'r')
lines = file.readlines()
i = 0
for line in lines:
words = line.strip().split("bags contain")
color_key = words[0].strip()
bags_str = words[1].strip()
if not bag_tree_index.get(color_key):
bag_tree_index[color_key] = {}
if bags_str == "no other bags.":
continue
for bag_str in bags_str.split(", "):
color_bags = bag_str.strip().strip('.').strip('bag').strip('bags').split(' ')
bag_nbr = int(color_bags[0])
bag_color_key = color_bags[1] + ' ' + color_bags[2]
bag_tree_index[color_key][bag_color_key] = bag_nbr
print(recursive_bags_in_bag('shiny gold'))
| [
"pontus.ovhagen@tidal.com"
] | pontus.ovhagen@tidal.com |
a4430652ef1ea303385485cb2b86a2b526ff541a | d4e05a65f18865b47573359ea6865fe840e09c58 | /1225133-95.py | e8dd64d29edfc278f4a1df433e7c116b1debe534 | [] | no_license | ZlatanTheGreat/HelloZlatan | c5ed4bb60f1b521f73085982d948d51e538a7123 | f139a1365cfd6d18be70a75b93678ba74cbb4a34 | refs/heads/master | 2021-05-11T17:22:06.903054 | 2018-12-19T18:04:30 | 2018-12-19T18:04:30 | 117,795,044 | 0 | 0 | null | 2018-10-07T20:46:11 | 2018-01-17T06:33:43 | null | UTF-8 | Python | false | false | 427 | py | import math
class Circle:
def __init__(self, radius):
Circle.radius = radius
@classmethod
def circumference(cls, radius):
circumference = (2*radius) * math.pi
print(f"Circumference = {round(circumference)}")
@classmethod
def area(cls, radius):
area = math.pi * (radius**2)
print(f'Area = {round(area)}')
Circle.circumference(10)
Circle.area(10) | [
"noreply@github.com"
] | noreply@github.com |
272f213f76b5bb604a5b11e9b98f8b174098e41b | 4112399d77c8cd8d699d5053017a55e27250268c | /food_picker/migrations/0001_initial.py | b87e4e3413359cbd115fd7771be5cc74ba09cec2 | [] | no_license | Bencabe/food_picker | 8d76be7b32cdbe09b69e3de5cfb63d7d998d389c | 923d5c0bbcc4df791cf06dab7fe9ea0a3366a204 | refs/heads/main | 2023-03-31T01:53:28.711706 | 2021-03-31T14:49:11 | 2021-03-31T14:49:11 | 339,167,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,543 | py | # Generated by Django 3.1.1 on 2021-02-15 18:02
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Ingredient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, unique=True)),
('calories_per_unit', models.DecimalField(decimal_places=2, default=0, max_digits=5)),
('protein_per_unit', models.DecimalField(decimal_places=2, default=0, max_digits=5)),
('carbs_per_unit', models.DecimalField(decimal_places=2, default=0, max_digits=5)),
('fat_per_unit', models.DecimalField(decimal_places=2, default=0, max_digits=5)),
('is_staple', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Meal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, unique=True)),
('instructions', models.JSONField()),
('star_rating', models.IntegerField()),
('minutes', models.IntegerField()),
('ingredients', models.ManyToManyField(related_name='meal_ingredient', to='food_picker.Ingredient')),
],
),
]
| [
"bencabe93@gmail.com"
] | bencabe93@gmail.com |
592b00cfa6ac75662cbf56700da1692c4c8168b9 | 844e548c362184da0def9a0fe736c8c68b5d4893 | /venv/bin/wheel | ddbcc54161b3adc60b5a780ea889c92d702041a8 | [] | no_license | atallini/admin_facilito | 23604e0d758d6847134cb81b549234122b266ac9 | 88a1342ed4e969dc42e8e82517b8291f1280d848 | refs/heads/master | 2021-09-03T11:22:01.737512 | 2018-01-01T18:38:39 | 2018-01-01T18:38:39 | 115,933,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | #!/home/anibal/admin_facilito/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"anibal.tallini@gmail.com"
] | anibal.tallini@gmail.com | |
bb48285834ee29beb7a898493b7d407dafdf7dd6 | 8c7a187ebfe858ff3f840602585d166b29fce576 | /appstore/regulate_underscores.py | db0232fa39df3b96f78c3dc29fa2e15e90914bc1 | [] | no_license | ohannes/pythonScripts | b756faa2e6d5314cb04c7afc0ca07f69027f59b2 | 5249b2735d8b2a9a2c6ad8a1ae625cb47f50d0b5 | refs/heads/master | 2020-04-06T04:20:29.565042 | 2015-07-19T17:40:39 | 2015-07-19T17:40:39 | 34,119,366 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 821 | py | import sys
sys.path.append(os.environ["ohannes"])
from ohannes import *
input_file = getStrArg(1, 1)
output_file = input_file + ".regulated"
lines = getFileLines(input_file)
ftw = open(output_file, write_mode)
for line in lines:
sharp_found = False
equal_found = False
line_regulated = False
if not "=>" in line or not "#" in line or not "_" in line:
ftw.write(line)
continue
index = 0
while True:
if index == len(line) - 1:
ftw.write(line[index])
break
if line[index] == "#":
sharp_found = True
if line[index] == "=" and line[index+1] == ">":
equal_found = True
if line[index] == "_" and (not sharp_found) and equal_found and (not line_regulated):
ftw.write(line[index+1].upper())
index += 1
line_regulated = True
else:
ftw.write(line[index])
index += 1
ftw.close()
| [
"yasinyildiza@gmail.com"
] | yasinyildiza@gmail.com |
2e77842e863422f2ffdaefdc8d6d8126892ba1d3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03347/s144374882.py | 8ce3352dfe431d952e676130950485ebdc55dc2e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | import sys,queue,math,copy,itertools,bisect,collections,heapq
def main():
sys.setrecursionlimit(10**7)
INF = 10**18
MOD = 10**9 + 7
LI = lambda : [int(x) for x in sys.stdin.readline().split()]
NI = lambda : int(sys.stdin.readline())
SI = lambda : sys.stdin.readline().rstrip()
N = NI()
A = [NI() for _ in range(N)]
ans = 0
cnt = 0
for i in range(N-1,-1,-1):
if cnt == 0:
ans += A[i]
cnt = A[i]
elif A[i] < cnt -1:
print(-1)
return
elif A[i] >= cnt:
ans += A[i]
cnt = A[i]
else:
cnt -= 1
if cnt > 0:
print(-1)
else:
print(ans)
if __name__ == '__main__':
main() | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
1e9a39eb7181f21d6cbbf89028fda4374c0c2886 | 75f13a7149741707cd827ed419a10a74aec355a7 | /CS 2043 - Unix Tools & Scripting/Project 4/ereader.py | d0658b58a06b10425b01d0751bd0b663aef5989a | [] | no_license | ava9/Class-Projects | 0a7a1d4c6f4c19bc4c7112b1120124a2bfe31781 | 9be7e6d9054e0d6d2eef21794222255617db1536 | refs/heads/master | 2020-12-24T15:31:27.747939 | 2015-04-01T03:46:14 | 2015-04-01T03:46:14 | 31,793,758 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,067 | py | #! /usr/bin/python
#display use case for user
print('Example case : python ereader.py [-n #someNumber] (n: next page, p: previous page, q: quit)')
print('User controls n: next page, p: previous page, q: quit (case sensative)')
import sys
import os
import hashlib
import re
import termios
import contextlib
#starting directory
startDirectory = os.getcwd()
#set current directory to home
os.chdir(os.path.expanduser("~"))
#key listener setup - taken from http://stackoverflow.com/questions/11918999/key-listeners-in-python (as mentioned in piazza post)
@contextlib.contextmanager
def raw_mode(file):
old_attrs = termios.tcgetattr(file.fileno())
new_attrs = old_attrs[:]
new_attrs[3] = new_attrs[3] & ~(termios.ECHO | termios.ICANON)
try:
termios.tcsetattr(file.fileno(), termios.TCSADRAIN, new_attrs)
yield
finally:
termios.tcsetattr(file.fileno(), termios.TCSADRAIN, old_attrs)
#main method
def main():
#first part: open correct line number and display file
#if ereader.py -n flag is given
if len(sys.argv) >= 3:
numLines = int(sys.argv[2])
inputFile = sys.argv[3]
#no flag is given
else:
inputFile = sys.argv[1]
numLines = 40
#compute md5 hash
fileHash = hashlib.md5(inputFile).hexdigest()
#first line of file
startLine = 0;
# (md5 hash in .reader_rc)? 1 : 0
exists = 0;
#open ~/.reader_rc if it exists
if os.path.isfile('.reader_rc'):
startFile = file('.reader_rc','r')
rcFile = startFile.readlines()
#searuserInputrcFile for file hash
for a in rcFile:
if re.search(fileHash, a):
#if found, startLine = rcFile line number
startLine = int(re.split(',', a, maxsplit = 1) [1])
exists = 1;
#close file (improve efficieny)
startFile.close()
#create.reader_rc
else:
open('.reader_rc', 'w+').close()
#hash not found
if exists == 0:
#add hash to .reader_rc
add = '\n'+ fileHash +','+ str(startLine)
with open('.reader_rc','a') as f:
f.write(add)
f.close()
#find text to display
display = open(startDirectory + "/" + inputFile,'r')
displayLines = display.readlines()
displayLinesTotal = len(displayLines)
display.close()
#display text
for a in range(startLine, (startLine + numLines), 1):
print(displayLines[a])
#second part: key listener to change text displayed (process user input)
# key listener
with raw_mode(sys.stdin):
try:
while True:
#find text to display
display = open(startDirectory + "/" + inputFile,'r')
displayLines = display.readlines()
display.close()
userInput= sys.stdin.read(1)
# if 'q' is pressed, quit
if userInput== 'q':
break
reader.close()
# if 'n' is pressed, next page
if userInput== 'n':
if (startLine + numLines) >= displayLinesTotal:
startLine = displayLinesTotal
else:
startLine = startLine + numLines;
#display text
for a in range((startLine), (startLine + numLines), 1):
print(displayLines[a])
#update .reader_rc
currentFile = file('.reader_rc','r')
rcFile = currentFile.readlines()
currentFile = file('.reader_rc','w')
for a in rcFile:
if re.search(fileHash, a):
currentFile.write(fileHash +','+ str(startLine) +'\n')
else:
currentFile.write(a)
# if 'p' is pressed, previous page
if userInput== 'p':
if (startLine - numLines) < 0:
startLine = 0;
else:
startLine = startLine - numLines
#display text
for a in range((startLine), (startLine + numLines), 1):
print(displayLines[a])
#update .reader_rc
currentFile = file('.reader_rc','r')
rcFile = currentFile.readlines()
currentFile = file('.reader_rc','w')
for a in rcFile:
if re.search(fileHash, a):
currentFile.write(fileHash +','+ str(startLine) +'\n')
else:
currentFile.write(a)
except (KeyboardInterrupt, EOFError):
pass
if __name__ == '__main__':
main()
| [
"spyguy101@gmail.com"
] | spyguy101@gmail.com |
e0406bccdd58cced9e2cf9f4510da9f8da2321cb | 1cde75aa1ae01e54484fd8df596ee1975b0a7a2d | /abstract_service/models.py | 0202fd04fd2b02f2095d2ce4cedf3aeff2117d7f | [] | no_license | dkeye/qr_service | 3ede8ff98d05c0decdf77d2e37483f7e6f86b93c | b9e994a8923b262d97508bed3d1b48de7722ae0f | refs/heads/master | 2023-08-04T19:29:05.664502 | 2021-09-21T18:46:26 | 2021-09-21T18:46:26 | 400,872,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | from sqlalchemy import Boolean, Column, Integer, String
from .database import Base
class Codes(Base):
__tablename__ = "codes"
id = Column(Integer, primary_key=True, index=True)
code = Column(String, unique=True, index=True)
is_activated = Column(Boolean, default=False)
| [
"ldifmo@gmail.com"
] | ldifmo@gmail.com |
7098f4dd04eee66744539b132a94f353fab0fbdd | 3373b2bbe6303dcee3ae7f7f3e715ce674878a7b | /packages/hyperk/wcsim_dev.py | 7187f72a10ce70577639daba6aa414e563e94670 | [
"MIT"
] | permissive | pgjones/nusoft | f3515a6e2fc90622638cde0b8712ba6fcea2aa8e | 442c7bca2f921892ecf9eb3ff6821e2a9da7b156 | refs/heads/master | 2020-09-12T21:44:54.453633 | 2014-10-03T20:22:09 | 2014-10-03T20:22:09 | 17,223,474 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,401 | py | #!/usr/bin/env python
#
# WCSimDev
#
# The HyperK WCSim development version
#
# Author P G Jones - 2014-06-20 <p.g.jones@qmul.ac.uk> : New file.
####################################################################################################
import nusoft.package.local as local_package
import os
import nusoft.envfile
class WCSimDev(local_package.LocalPackage):
""" The WCSimDev installation package.
:param _root: version of ROOT this is dependent on
:param _geant4: version of Geant4 this is dependent on
"""
def __init__(self, system, repository):
""" Initialise this wcsim installation package.
:param system: class that manages system commands
:type system: :class:`nusoft.system.System` instance
:param repository: local name of the repository the package is from
"""
super(WCSimDev, self).__init__("wcsim-dev", system, repository)
self._root = "root_v5.34.10"
self._geant4 = "geant4.9.4.p04"
self._clhep = "clhep-2.1.0.1"
def get_dependencies(self):
""" Return a list of dependency names
:returns: list of dependency package names
:rtype: list
"""
return ["make", "g++", "gcc", "ld", "python", "python-dev", self._root, self._geant4,
self._clhep]
def _download(self):
""" Git clone the wcsim repository file."""
self._system.git_clone("ssh://git@poset.ph.qmul.ac.uk/hk-WCSim", self.get_install_path())
def _install(self):
""" Write an environment file and install wcsim."""
# Now write the environment file
self.write_env_file()
commands = ["source " + os.path.join(self._system.get_install_path(), "env_wcsim-dev.sh"),
"cd " + self.get_install_path(),
"make rootcint",
"make "]
self._system.execute_commands(commands)
def write_env_file(self):
""" Write an environment file for this package."""
env_file = nusoft.envfile.EnvFile("#wcsim environment\n")
env_file.add_source(os.path.join(self._dependencies[self._root].get_install_path(), "bin"), "thisroot")
env_file.add_source(os.path.join(self._dependencies[self._geant4].get_install_path(),
"share/geant4-9.4.4/config"),
"geant4-9.4.4")
env_file.add_environment("CLHEP_BASE_DIR", self._dependencies[self._clhep].get_install_path())
env_file.add_environment("G4WORKDIR", os.path.join(self.get_install_path(), "exe"))
env_file.write(self._system.get_install_path(), "env_wcsim-dev")
def _update(self):
""" Update the git repository."""
if not self._system.git_update(self.get_install_path()):
raise Exception("Cannot update, repository has changes")
self._install() # Now reinstall (compile)
def _remove(self):
""" Remove the install directory."""
self._system.remove(self.get_install_path())
def _is_installed(self):
""" Check if root is installed by looking for the root executable in the bin directory.
:return: True if installed
"""
sys = os.uname()[0]
return False
# The versions of WCSimDev that can be installed (only one, WCSimDev)
# [Although potentially more if the user wants].
versions = [WCSimDev]
| [
"p.g.jones@qmul.ac.uk"
] | p.g.jones@qmul.ac.uk |
9b937c35f42ccd5bb2c64e7139a9e0d690ea887e | 232494ea6abe85c8751681ab19a6482b09baeb47 | /Scripts/dramarama/admin.py | 192408d2819fc93a2e1c6720372841b37fb6cb2f | [] | no_license | mjkcool/DramaRama-dev | 4cd9cda5767a724c7727021bbf4c641aa05b4ac9 | b9392c3401042c321b42e6865d66884992833af2 | refs/heads/master | 2023-03-24T08:24:34.374970 | 2021-03-25T00:58:17 | 2021-03-25T00:58:17 | 320,464,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 162 | py | from django.contrib import admin
from .models import Drama, Survey, R_Survey
admin.site.register(Drama)
admin.site.register(Survey)
admin.site.register(R_Survey) | [
"mjkimdelta@gmail.com"
] | mjkimdelta@gmail.com |
5287b4ef9ff7aa1f39aee1ce5d077e70afdb737e | cd0d86966c57e6883e7c2e000802a67122d0192a | /src/agents/random_agent.py | 3e25f4118e3d27d785903b0cf2265a916dd801e6 | [
"MIT"
] | permissive | Mithrandir2k18/seminar-paper-learning-via-competition | ff365677846680b249270bb6e70c99fb0db54d70 | e081f6ba89b8c3c50da218060dcd1ec0107e2dc0 | refs/heads/master | 2023-04-05T14:47:09.859776 | 2021-03-31T04:47:52 | 2021-03-31T04:47:52 | 353,218,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | from typing import List
from environments.environment_abc import Action, State
from agents.agent_abc import Agent
import random
class RandomAgent(Agent):
def __init__(self,
player_id: int = -1,
agent_name: str = "RandomAgent"):
self.agent_name = agent_name
self.player_id = player_id
def get_action_choice(self,
reward: float,
current_state: State,
possible_actions: List[Action]) -> Action:
return random.choice(possible_actions)
| [
"alexander.zincke@gmail.com"
] | alexander.zincke@gmail.com |
6b9b0d35589596bc43d6d9a17bcfb43d86e17886 | 117e2fab53a39e14d4aa1c8c60d146c942118ac6 | /dev_support/settings.py | c12140cea7a5914d4b2237f490fb55bfbd730d6d | [] | no_license | j1210030/case-portal | 6e46ae3adf9c7d0905de27c78a4aba830d8eccb3 | 257ca1bdc7760db60a4c9102e59920574ec3975d | refs/heads/master | 2020-03-27T15:14:25.504719 | 2018-08-30T06:53:10 | 2018-08-30T06:53:10 | 146,705,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,762 | py | """
Django settings for dev_support project.
Generated by 'django-admin startproject' using Django 1.11.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import time
import socket
import sys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
#from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
SETTINGS_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_PATH = os.path.join(SETTINGS_DIR, os.pardir ,'dev_support')
#PROJECT_PATH = os.path.abspath(BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nvwd^)y1$rfck&ekqp07a%8^h6q^=^l9cng1r9$14-%v3ihe#@'
HOSTNAME = socket.gethostname()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_PATH = os.path.join(PROJECT_PATH, 'templates')
TEMPLATE_DIRS = (
#join(BASE_DIR, 'templates'),
(TEMPLATE_PATH),
##'/srv/www/goragaku/goragaku/templates'
)
ALLOWED_HOSTS = ['35.196.214.31','35.190.153.225', 'localhost', '127.0.0.1']
UPLOAD_PATH = '/srv/www/gcase_tok/dev_support' #os.path.join(PROJECT_PATH, 'upload')
MEDIA_ROOT = "%s/upload/" % PROJECT_PATH
MEDIA_URL = '/upload/'
LOGIN_URL = '/user/login/'
LOGIN_EXEMPT_URLS = (
r'^user/login/$',
r'^user/logout/$',
r'^admin/$'
)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'gcase',
'django.contrib.humanize',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'dev_support.login_required_middleware.LoginRequiredMiddleware'
]
#TEMPLATE_CONTEXT_PROCESSORS = TEMPLATE_CONTEXT_PROCESSORS + (
# 'django.template.context_processors.debug',
# 'django.template.context_processors.request',
# 'django.contrib.auth.context_processors.auth',
#'django.contrib.messages.context_processors.messages',
#)
ROOT_URLCONF = 'dev_support.urls'
DATE_INPUT_FORMATS = ('%d/%m/%Y')
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [(TEMPLATE_PATH),],
'OPTIONS': {
'debug':DEBUG,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media'
],
},
},
]
WSGI_APPLICATION = 'dev_support.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
#'USER': 'school_db_user',
#'PASSWORD': 'nU6E7RE3',
#'HOST': '54.248.218.27',
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'gcase',
'USER': 'gcase_tok_user',
'PASSWORD': 'nU6E7RE3',
'HOST': 'localhost',
'PORT': '3306',
}
}
FILE_UPLOAD_HANDLERS = ("django_excel.ExcelMemoryFileUploadHandler",
"django_excel.TemporaryExcelFileUploadHandler")
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Tokyo'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_PATH = os.path.join(PROJECT_PATH,'gcase/static')
#STATIC_ROOT = '/Users/suhasg/Devel/python.proj/dev_support/gcase/static'
STATIC_ROOT = '/srv/www/gcase_tok/dev_support/gcase/static'
STATIC_URL = '/static/'
STATICFILES_DIRS = (('%s/gcase/assets' % PROJECT_PATH),)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
U_LOGFILE_SIZE = 1 * 1024 * 1024
U_LOGFILE_COUNT = 2
U_LOGFILE_APP1 = 'gcase'
#log_file_dir = '/Users/suhasg/Devel/python.proj/dev_support/logs/' #os.path.join(os.path.dirname(PROJECT_PATH),'logs')
log_file_dir = os.path.join(os.path.dirname(PROJECT_PATH),'logs/')
if not os.path.exists(log_file_dir):
os.makedirs(log_file_dir)
log_file = log_file_dir + "gcase.log"
sql_log_file = log_file_dir + "gcase_sql.log"
console_log_file = log_file_dir + "gcase_console.log"
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format' : "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s",
'datefmt' : "%d/%b/%Y %H:%M:%S"
},
},
'handlers': {
'null': {
'level':'DEBUG',
'class':'logging.NullHandler',
},
'logfile': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': log_file, #"/logs/admin_%d.log",
#'filename': PROJECT_PATH + "/logs/admin.log",
'maxBytes': U_LOGFILE_SIZE,
'backupCount': U_LOGFILE_COUNT,
'formatter': 'standard',
},
'logfile4sql': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': sql_log_file,
'maxBytes': U_LOGFILE_SIZE,
'backupCount': U_LOGFILE_COUNT,
'formatter': 'standard',
},
'console':{
'level':'INFO',
'class':'logging.StreamHandler',
'formatter': 'standard'
},
#'console':{
# 'level':'INFO',
# 'class':'logging.handlers.RotatingFileHandler',
# 'filename': console_log_file,
#'maxBytes': U_LOGFILE_SIZE,
#'backupCount': U_LOGFILE_COUNT,
#'formatter': 'standard',
#},
},
'loggers': {
'django': {
'handlers':['console'],
'propagate': True,
'level':'WARN',
},
'django.db.backends': {
'handlers': ['logfile4sql'],
'level': 'DEBUG',
'propagate': False,
},
'gcase': {
'handlers': ['console', 'logfile'],
'level': 'DEBUG',
},
}
}
from .conf.constants import *
| [
"noreply@github.com"
] | noreply@github.com |
fdf34b26ad998c9cef7cdd3a4ce2b6e71b2497c1 | 8e466a28c04cbc682a3b5ab24918beeb09a8ee7f | /deeplearn/migrations/0002_auto_20190926_1159.py | a33d7a32ddecffe6c74fa8f026b914499b42bbd1 | [] | no_license | vaibhav1202/VehicleClassification | eefe95d939b68ba4f80cec40860f73d47a88b409 | db966edfd5ab5a5b6d3d9117aa286af375b6d2e5 | refs/heads/master | 2020-08-02T05:11:18.438230 | 2019-09-27T06:00:43 | 2019-09-27T06:00:43 | 211,244,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | # Generated by Django 2.2.4 on 2019-09-26 11:59
import deeplearn.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('deeplearn', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='deep',
name='img',
field=models.ImageField(null=True, upload_to='images\\', validators=[deeplearn.models.validate_img]),
),
]
| [
"agrawalvaibhav12@gmail.com"
] | agrawalvaibhav12@gmail.com |
fe53f6f0a9858f39c58fbbdfc21e6f2f9aa5bc41 | 546d0ce70b8f5b479a493e799846082bbe686bb5 | /symmetric-ciphers/substitution-cipher.py | a1b45f491c9f65e75d5813b729c4bd66212e4426 | [
"MIT"
] | permissive | ninobaldo/cryptographyI | f19ba27971bdd2258d65b5847cb87c7ce3c1d8e8 | 9828cab1b4c69d8ed2a42dc0867f11f4ff3bc8d6 | refs/heads/master | 2021-01-19T16:57:38.379041 | 2014-09-11T19:39:33 | 2014-09-11T19:39:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | def encrypt(plaintext, key):
CIPHER = list(key)
ALPHABETIC = list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
ciphertext = ''
for i, l in enumerate(plaintext):
index = ALPHABETIC.index(l);
ciphertext += CIPHER[index];
print plaintext, ' ---> ', ciphertext
def decrypt(ciphertext, key):
CIPHER = list(key)
ALPHABETIC = list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
plaintext = ''
for i, l in enumerate(ciphertext):
index = CIPHER.index(l);
plaintext += ALPHABETIC[index];
print ciphertext, ' ---> ', plaintext
encrypt('HELLOWORLD', "ZEBRASCDFGHIJKLMNOPQTUVWXY")
decrypt('DAIILVLOIR', "ZEBRASCDFGHIJKLMNOPQTUVWXY") | [
"ninobaldo@gmail.com"
] | ninobaldo@gmail.com |
17ea9fd1916009532f486f0223fe33c3a6f09caa | aabed3688686d034dff01b7153b7ec8a6af42d4c | /python_fundamentals-master/01_python_fundamentals/01_01_run_it.py | 1f671f1f74435f1ac45b9cb2368a4050b85caffb | [] | no_license | jorien-witjas/python-labs | da83a69d0103e6ac7f7605173fd46b54b23cb91a | c79ff093a7ae61ad50760adae30de972a7470e47 | refs/heads/master | 2023-06-16T20:55:03.896614 | 2021-07-06T14:47:37 | 2021-07-06T14:47:37 | 363,151,987 | 0 | 0 | null | 2021-07-06T14:47:38 | 2021-04-30T13:37:33 | Python | UTF-8 | Python | false | false | 613 | py | '''
1 - Write and execute a script that prints "hello world" to the console.
2 - Using the interpreter, print "hello world!" to the console.
3 - Explore the interpreter.
- Execute lines with syntax error and see what the response is.
* What happens if you leave out a quotation or parentheses?
* How helpful are the error messages?
- Use the help() function to explore what you can do with the interpreter.
For example execute help('print').
press q to exit.
- Use the interpreter to perform simple math.
- Calculate how many seconds are in a year.
'''
print("Hello world") | [
"jorienwitjas@MacBook-Pro-van-JH.local"
] | jorienwitjas@MacBook-Pro-van-JH.local |
f3b7f0280900dbb82a53d5f1149b6cf9d643ef65 | 0951918d92e64464bf56a059f743d4986a2977fb | /dusk/ssm.py | fb3273c0b967d09c9c674bb315962afc1f1e65dc | [] | no_license | raids/dusk | 5150d2c5c536b1b1262fdef81200f63410cf66ff | f6ab0a17b16906a68a8021ba1b9825ff487b5095 | refs/heads/master | 2021-01-19T23:39:52.539923 | 2017-04-23T15:52:10 | 2017-04-23T15:52:10 | 89,001,389 | 1 | 0 | null | 2017-04-23T15:50:16 | 2017-04-21T16:04:18 | Python | UTF-8 | Python | false | false | 250 | py | # -*- coding: utf-8 -*-
"""
ssm run the EC2 Systems Manager on the target instance.
"""
import contextlib
def run(instance_id):
# Run SSM
pass
@contextlib.contextmanager
def ssm_doc():
# Create then delete the SSM document
pass
| [
"jroutley@gmail.com"
] | jroutley@gmail.com |
303766a3257634ef8790974fe507abbdabd88b1b | 2853e675fe8e83925e6318310e5239da0bbb3f73 | /runMES/MQTT/mq_qry_lot_record_srv.py | 997d576c5db27f3059bee1c1ea8700d1f9fd7960 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | runMES/runMES_1908.2 | 60f6272ad543609fd67411f760e4712b628fd19d | acae4bb86acf62206d6e26b82144c68677a893f9 | refs/heads/master | 2020-07-26T23:56:09.393975 | 2019-10-01T09:09:16 | 2019-10-01T09:09:16 | 208,802,769 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,650 | py | import paho.mqtt.client as mqtt
import logging
import ast
import time
from runMES import trans
import threading
from MQTT import log_EAP_IF
mylog=logging.getLogger('EAP')
subscribe_topic="runMES/qry_lot_record_srv"
srv_name='mq_qry_lot_record_srv'
def synchronized(func):
func.__lock__=threading.Lock()
def synced_func(*args,**kws):
with func.__lock__:
return func(*args,**kws)
return synced_func
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client,userdata,flags,rc):
# print("Connected with result code "+str(rc))
#log_EAP.to_debug({'MQTT':srv_name,'STATUS':'on_connect','RC':rc,'CLIENT':client,'USERDATA':userdata,'FLAGS':flags})
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
try:
client.subscribe(subscribe_topic)
except Exception as e:
mylog.exception(e)
mylog.error({'MQTT':srv_name,'STATUS':'subscribe','ERR':e})
# The callback for when a PUBLISH message is received from the server.
def on_message(client,userdata,msg):
# print(msg.topic+" "+str(msg.payload))
try:
mylog.info({'MQTT':srv_name,'STATUS':'on-message','TOPIC':msg.topic,'MSG':msg.payload})
payload=bytes.decode(msg.payload)
#payload=msg.payload.decode('utf8')
log_EAP_IF.to_debug({'MQTT':'mq_qry_lot_record_srv-on_message','payload bytes decode':payload})
d=ast.literal_eval(payload)
#log_EAP.to_debug({'d':d})
tid=d['TID_TXT']
rtn=d['RTN_TXT']
step=d['STEP_TXT']
op=d['OP_TXT']
log_EAP_IF.to_debug({'MQTT':srv_name,'STATUS':'on-message','TID':tid,'RTN':rtn,'STEP':step,'OP':op})
# qry_lot_record(step_txt,op_txt)
reply=trans.qry_lot_record(step,op)
msg={'TID_TXT':tid,'RTN_TXT':rtn,'RPY_TXT':reply}
mylog.info({'MQTT':srv_name,'STATUS':'tns reply','msg':msg})
client.publish(rtn,str(msg))
time.sleep(0.1)
except Exception as e:
mylog.exception(e)
mylog.error({'MQTT':'mq_qry_lot_record_srv','ERR':e})
@synchronized
def main():
log_EAP_IF.to_info({'MQTT':srv_name,'STATUS':'active'})
try:
client=mqtt.Client(client_id=srv_name)
client.on_connect=on_connect
client.on_message=on_message
client.connect("localhost",1883,60)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever()
except Exception as e:
mylog.exception(e)
mylog.error({'MQTT':srv_name,'STATUS':'loop','ERR':e})
if __name__=='__main__':
main()
| [
"joshua.chin@steptech.io"
] | joshua.chin@steptech.io |
1df816ad8cfee83dae7e3cb566b5599ecf231cfc | c663559b30712907ccf7dd8acb078718e8579bbc | /week5/listsum.py | 480fb72e1c4b7735bbfa46187bc18fd7e30672c6 | [] | no_license | Mly-T/MOOC_Data_structure_and_algorithm | 33aaf66dc6ccf4a2c6c6578e8b5f6453f2edd2d3 | cdca8340190136f577d02fc7237e70ffd3249517 | refs/heads/master | 2021-05-23T15:51:42.779577 | 2020-04-17T03:22:16 | 2020-04-17T03:22:16 | 253,366,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | def listsum(numList):
if len(numList) == 1:
return numList[0]
else:
return numList[0] + listsum(numList[1:])
print(listsum([1,2,3])) | [
"lin_he_01@163.com"
] | lin_he_01@163.com |
7bc3b3a1834c1f657039fcff98ce912abb2f8078 | 301d74cbe63ecff72a942e04ff77c1165dd02f9f | /tests/test_crawler_process.py | 5c793a1c282985813ab0e760de8a5f64cfdaf4b0 | [
"BSD-3-Clause"
] | permissive | wusir2001/galaxy | dd10864e48c4c303fa3562b7f590d4bd45b3b52e | cb66633132a8c1affd8f103c466589d392409361 | refs/heads/master | 2020-05-24T20:04:25.090846 | 2018-06-12T13:37:59 | 2018-06-12T13:37:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,733 | py | # -*- coding: utf-8 -*-
from mock import MagicMock
from scrapy import signals
from twisted.internet.defer import Deferred
from twisted.trial import unittest
from rest.core import CrawlManager, GalaxyCrawlerProcess
from .spiders import MetaSpider
from .utils import get_settings
class CralwerProcessTestCase(unittest.TestCase):
def _mock_method(self, obj, method):
msg = "can't mock, class {} doesn't have method {}".format(
obj.__class__.__name__, method)
assert hasattr(obj, method), msg
setattr(obj, method, MagicMock(spec=lambda: None))
def test_signals(self):
"""Need to be sure that all signals are bind to appropriate handlers
right after crawler is created.
"""
crawl_manager = CrawlManager('test', {'url': 'http://localhost'})
signals_and_handlers = [
('item_scraped', 'get_item'),
('item_dropped', 'collect_dropped'),
('spider_idle', 'spider_idle'),
('spider_error', 'handle_spider_error'),
('request_scheduled', 'handle_scheduling'),
]
for _, handler in signals_and_handlers:
self._mock_method(crawl_manager, handler)
settings = get_settings()
crawler_process = GalaxyCrawlerProcess(settings, crawl_manager)
dfd = crawler_process.crawl(MetaSpider)
self.assertIsInstance(dfd, Deferred)
crawler = crawl_manager.crawler
for signal, handler in signals_and_handlers:
crawler.signals.send_catch_log(
signal=getattr(signals, signal), spider=crawler.spider)
handler_mock = getattr(crawl_manager, handler)
self.assertEquals(handler_mock.call_count, 1)
| [
"markhuyong@gmail.com"
] | markhuyong@gmail.com |
68d8c35e5fbad07bc4f7755a167a5ce85247e30e | 3ce592352627591346ea33ea0c2665ad879414e2 | /References/search/3-3.soduku.py | 8c6629c01cce7fc88632fdba5af2f6689c63a47c | [
"MIT"
] | permissive | royqh1979/python_libs_usage | 113df732ef106f4a5faae1343493756fd703c8c0 | 57546d5648d8a6b7aca7d7ff9481aa7cd4d8f511 | refs/heads/master | 2021-04-16T18:14:43.835482 | 2021-01-11T03:55:25 | 2021-01-11T03:55:25 | 249,374,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,139 | py | """
数独求解
使用简化的启发式回溯搜索
使用递归实现
每次优先尝试填写可行数字最少的格子
"""
import numpy as np
from easygraphics import *
from dataclasses import dataclass
import copy
from typing import Set
FONT_WIDTH = 30
BOARD_TOP = 10
BOARD_LEFT = 10
SQUARE_WIDTH = 50
SPEED = 100
# 棋盘,为了方便定义为[10][10],实际只用[1][1]-[9][9]
board = np.zeros((10, 10),dtype="int32")
# 行、列、小九宫已使用数字集合
cols = [set() for i in range(10)] # 各列数字集合
rows = [set() for i in range(10)] # 各行数字集合
blks = [set() for i in range(10)] # 各小九宫格数字集合
# 绘图相关函数
def draw_number_at(i, j, number, color):
"""
Draw a number at cell(i,j) with the specified color
:param i: the row
:param j: the column
:param number: the number
:param color: the color
"""
left = BOARD_LEFT + (j - 1) * SQUARE_WIDTH
top = BOARD_TOP + (i - 1) * SQUARE_WIDTH
set_color(color)
if number != 0:
draw_rect_text(left + 5, top + 5, FONT_WIDTH, FONT_WIDTH, number)
else:
set_color(Color.WHITE)
fill_rect(left+1, top+1, left + SQUARE_WIDTH-2, top + SQUARE_WIDTH-2)
def draw_board():
clear_device()
for i in range(1, 10):
for j in range(1, 10):
left = BOARD_LEFT + (j - 1) * SQUARE_WIDTH
top = BOARD_TOP + (i - 1) * SQUARE_WIDTH
set_color(Color.LIGHT_GRAY)
rect(left, top, left + SQUARE_WIDTH, top + SQUARE_WIDTH)
draw_number_at(i, j, board[i][j], Color.RED)
# 画小九宫格边框
set_color(Color.BLACK)
for i in range(1, 4):
for j in range(1, 4):
left = BOARD_LEFT + (j - 1) * 3 * SQUARE_WIDTH
top = BOARD_TOP + (i - 1) * 3 * SQUARE_WIDTH
rect(left, top, left + 3 * SQUARE_WIDTH, top + 3 * SQUARE_WIDTH)
def init():
init_graph(800, 600)
set_color(Color.BLACK)
set_background_color(Color.WHITE)
set_line_width(2)
set_fill_color(Color.WHITE)
set_render_mode(RenderMode.RENDER_MANUAL)
set_font_size(FONT_WIDTH)
DATA_FILE = "10soduku.board"
# 候选格子, canPut[n]=1表示该格可以放数字n,否则不行
@dataclass()
class CandiateSquare:
x: int = 0
y: int = 0
possibles = set()
def which_block(i, j):
"""
计算当前方格属于哪一宫
:param i: 格子所在行
:param j: 格子所在列
:return: 格子所在的宫编号
"""
return ((i - 1) // 3) * 3 + ((j - 1) // 3)+1
def tag(i, j, number):
"""
在本列、本行、本宫中标记数字number已被使用
:param i: 格子所在的行
:param j: 格子所在的列
:param number: 格子中填写的数字
"""
rows[i].add(number)
cols[j].add(number)
block = which_block(i,j)
blks[block].add(number)
def untag(i, j, number):
"""
在本列、本行、本宫中取消数字val的使用标记
:param i: 格子所在的行
:param j: 格子所在的列
:param number: 格子中填写的数字
"""
rows[i].remove(number)
cols[j].remove(number)
block = which_block(i,j)
blks[block].remove(number)
def fill(i, j, number):
"""
将数字val填写到方格(i,j)中
:param i: 格子所在的行
:param j: 格子所在的列
:param number: 格子中填写的数字
"""
board[i][j] = number
tag(i, j, number)
def unfill(i, j):
"""
清除方格(i,j)中的数字
:param i: 格子所在的行
:param j: 格子所在的列
"""
number = board[i][j]
untag(i, j, number)
board[i][j] = 0
def load_board(boardFile):
"""
从数据文件中读取数独初始状态
:param boardFile: 数据文件名
"""
global board
try:
with open(boardFile, mode="r") as file:
board = [ [0]*10 for i in range(10)]
for line in file:
line = line.strip()
numbers = line.split(',')
if len(numbers) != 3:
continue
i, j, k = int(numbers[0]), int(numbers[1]), int(numbers[2])
board[i][j] = k
except IOError :
clear_device()
draw_rect_text(10, 500, 700, 50, f"无法打开文件{boardFile}")
def count_unsolved():
"""
计算有多少个格子需要填
:return:
"""
count = 0
for i in range(1, 10):
for j in range(1, 10):
if board[i][j] == 0:
count += 1
return count
def can_fill(i, j, number):
"""
判断number能否填写在格子(i,j)中
:param i: 格子所在的行
:param j: 格子所在的列
:param number: 要填写的数字
"""
if number in rows[i]:
return False
if number in cols[j]:
return False
if number in blks[which_block(i, j)]:
return False
return True
def calculatePossible(i, j):
"""
找出格子(i,j)中所有可填的数字
:param i: 格子所在的行
:param j: 格子所在的列
"""
possibles = set()
for number in range(1, 10):
if can_fill(i, j, number):
possibles.add(number)
return possibles
def findSureSquareByBlock():
"""
排除法1:对于每一个数字,在每一个九宫看看它是否只有一个可填位置
"""
for number in range(1,10):
in_rows = copy.deepcopy(rows)
in_cols = copy.deepcopy(cols)
in_blks = copy.deepcopy(blks)
while True:
# print(in_rows)
# print(in_cols)
# print(in_blks)
found_one_row = False # 发现数字number只能在某九宫的某行上
found_one_col = False # 发现数字number只能在某九宫的某列上
for block in range(1,10):
if number not in in_blks[block]:
start_row = ((block-1) // 3 ) * 3 + 1
start_col = (block-1) % 3 * 3 +1
if block != which_block(start_row,start_col):
print(number,block,start_row,start_col,which_block(start_row,start_col))
can_rows = [] # 数字number能填在该九宫的哪几行
can_cols = [] # 数字number能填在该九宫的哪几列
for i in range(3):
for j in range(3):
row=start_row+i
col=start_col+j
if (board[row][col]==0) and (number not in in_rows[row]) and (number not in in_cols[col]):
if row not in can_rows:
can_rows.append(row)
if col not in can_cols:
can_cols.append(col)
# print(number,block,can_rows,can_cols)
if len(can_rows)==1 and len(can_cols)==1: #只能填在某行某格上
row=can_rows[0]
col=can_cols[0]
return number,row,col
if len(can_rows)==1:
found_one_row = True
row = can_rows[0]
in_blks[block].add(number)
in_rows[row].add(number)
if len(can_cols)==1:
found_one_col = True
col = can_cols[0]
in_blks[block].add(number)
in_cols[col].add(number)
if not found_one_row and not found_one_col:
break
return None,None,None
def findSureSquareByRow():
"""
排除法2:对于每一个数字,在每一行上看看它是否只有一个可填位置
"""
for number in range(1, 10):
for row in range(1,10):
if number not in rows[row]:
can_cols = []
for j in range(1,10):
block = which_block(row,j)
if number not in cols[j] and number not in blks[block] and board[row][j]==0:
can_cols.append(j)
if len(can_cols)==1: #只能填在row行某列上
col=can_cols[0]
return number,row,col
return None, None, None
def findSureSquareByCol():
"""
排除法3:对于每一个数字,在每一列上看看它是否只有一个可填位置
"""
for number in range(1, 10):
for col in range(1, 10):
if number not in cols[col]:
can_rows = []
for i in range(1, 10):
block = which_block(i, col)
if number not in rows[i] and number not in blks[block] and board[i][col]==0:
can_rows.append(i)
if len(can_rows) == 1: #只能填在某行col列上
row=can_rows[0]
return number,row,col
return None,None,None
def solve(unsolved):
if unsolved == 0:
return True
# 显示用
delay_fps(SPEED)
number,row,col=findSureSquareByBlock()
if number is not None:
# set_fill_color("white")
# fill_rect(500,10,800,80)
# draw_text(500, 40, f"规则1 {row},{col}只能填{number} {board[row][col]}")
# pause()
fill(row, col, number)
draw_number_at(row, col, number, Color.BLACK)
if solve(unsolved - 1):
return True
unfill(row, col)
draw_number_at(row, col, 0, Color.BLACK)
return False
number,row,col=findSureSquareByRow()
if number is not None:
# set_fill_color("white")
# fill_rect(500,10,800,80)
# draw_text(500, 40, f"规则2: {row},{col}只能填{number} {board[row][col]}")
# pause()
fill(row, col, number)
draw_number_at(row, col, number, Color.BLACK)
if solve(unsolved - 1):
return True
unfill(row, col)
draw_number_at(row, col, 0, Color.BLACK)
return False
number,row,col=findSureSquareByCol()
if number is not None:
# set_fill_color("white")
# fill_rect(500,10,800,80)
# draw_text(500, 40, f"规则3: {row},{col}只能填{number} {board[row][col]}")
# pause()
fill(row, col, number)
draw_number_at(row, col, number, Color.BLACK)
if solve(unsolved - 1):
return True
unfill(row, col)
draw_number_at(row, col, 0, Color.BLACK)
return False
# 找出可填的数字数量最少的格子
possibles,c = findMinPossibles1()
# 尝试填写该格子
if len(c.possibles)!=1:
# fill_rect(500,10,800,80)
# draw_text(500, 40, f"规则4 {c.x},{c.y}只能填{c.possibles}")
# pause()
# else:
possibles,c = findMinPossibles2(possibles,c)
# # 尝试填写该格子
# if len(c.possibles)==1:
# fill_rect(500,10,800,80)
# draw_text(500, 40, f"规则5 {c.x},{c.y}只能填{c.possibles}")
# pause()
# else:
# fill_rect(500, 10, 800, 80)
# draw_text(500, 40, f"{c.x},{c.y}只能填{c.possibles}")
# pause()
if len(c.possibles) > 1:
fill_rect(500, 10, 800, 80)
draw_text(500, 40, f"{c.x},{c.y}只能填{c.possibles}")
pause()
for v in c.possibles:
fill(c.x, c.y, v)
draw_number_at(c.x, c.y, v, Color.BLACK)
if solve(unsolved - 1):
return True
unfill(c.x, c.y)
draw_number_at(c.x, c.y, 0, Color.BLACK)
return False
def findMinPossibles1():
"""
找到能填的数字最少的格子
:return:
"""
c = CandiateSquare()
min_possible_count = 10
possibles = [[None for i in range(10)] for j in range(10)]
for i in range(1, 10):
for j in range(1, 10):
if board[i][j] == 0:
possibles[i][j] = calculatePossible(i, j)
if len(possibles[i][j]) < min_possible_count:
min_possible_count = len(possibles[i][j])
c.x = i
c.y = j
c.possibles = possibles[i][j]
if len(c.possibles)<2:
return None,c
return possibles,c
def findMinPossibles2(possibles,c):
"""
当同一行或者同一列有两个格同时只能填同样的两个数时,同一行/列上的其他格必然不能填这两个数
:param possibles:
:param c:
:return:
"""
if len(c.possibles)==2:
while True:
found = False
row = c.x
col = c.y
for i in range(10):
if i!=col and possibles[row][col] == possibles[row][i]:
for j in range(10):
if j !=i and j!=col and possibles[row][j] is not None:
possibles[row][j].difference_update(possibles[row][i])
found = True
if len(possibles[row][j])<2:
c.x=row
c.y=j
c.possibles = possibles[row][j]
return possibles,c
if not found:
break
return possibles,c
def main():
init()
load_board(DATA_FILE)
draw_board()
draw_rect_text(10, 550, 700, 50, "按任意键开始...")
pause()
fill_rect(10, 550, 710, 600)
draw_rect_text(10, 550, 700, 50, "正在穷举...")
# 将数独中已有的数字做标记
for i in range(1, 10):
for j in range(1, 10):
if board[i][j] != 0:
tag(i, j, board[i][j])
#初始化所有未填格的possible
for i in range(1,10):
for j in range(1,10):
if board[i][j] == 0:
tag(i, j, board[i][j])
solve(count_unsolved())
fill_rect(10, 550, 710, 600)
draw_rect_text(10, 550, 700, 50, "找到答案了!按任意键退出...")
pause()
close_graph()
easy_run(main) | [
"royqh1979@gmail.com"
] | royqh1979@gmail.com |
9e1bb9384cfbb09cb003f26a31ea6e708b58fbf8 | 8f860a338ccd64d40f2e64e921fedb1951b4468f | /file_utils.py | eece77b453ed337237db6c48c7ce0849b67d170b | [] | no_license | alexyang06/zp.buildout | 1d544963d857c98123823a02db3db43d51eb7886 | e81abe5bf1c38a40445831e993bedafb2adeb125 | refs/heads/master | 2021-06-18T13:23:46.136928 | 2016-12-23T05:06:26 | 2016-12-23T05:10:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | import os
def get_all_files(path):
#return all files' name in certain folder.
result = []
for name in os.listdir(path):
if os.path.isfile(os.path.join(path, name)):
result.append(name)
return result
| [
"noreply@github.com"
] | noreply@github.com |
87442be8bbbe2a7b6035dd33438de37687bad316 | 6cd4c07bedc38cb289dd565f414d2a3baa71c8c3 | /configs/wfcos/wfcos_hrnet_coco.py | 8b9c817f48dc30bd8755c6a8b03580c2994e2a08 | [
"Apache-2.0"
] | permissive | tuggeluk/mmdetection | 4516cc0f70d8f8ff244e1eb4d8b6589d83c67c8a | 669a535c944628a3ab43330cae5c77b643e13a4b | refs/heads/master | 2021-07-01T00:51:23.272205 | 2021-06-18T08:12:55 | 2021-06-18T08:12:55 | 196,585,169 | 2 | 2 | Apache-2.0 | 2020-10-06T13:46:25 | 2019-07-12T13:35:50 | Python | UTF-8 | Python | false | false | 4,876 | py | # model settings
model = dict(
type='WFCOS',
pretrained='open-mmlab://msra/hrnetv2_w32',
backbone=dict(
type='HRNet',
extra=dict(
stage1=dict(
num_modules=1,
num_branches=1,
block='BOTTLENECK',
num_blocks=(4, ),
num_channels=(64, )),
stage2=dict(
num_modules=1,
num_branches=2,
block='BASIC',
num_blocks=(4, 4),
num_channels=(32, 64)),
stage3=dict(
num_modules=4,
num_branches=3,
block='BASIC',
num_blocks=(4, 4, 4),
num_channels=(32, 64, 128)),
stage4=dict(
num_modules=3,
num_branches=4,
block='BASIC',
num_blocks=(4, 4, 4, 4),
num_channels=(32, 64, 128, 256)))),
neck=dict(
type='HRFPN',
in_channels=[32, 64, 128, 256],
out_channels=256,
stride=2,
num_outs=5),
bbox_head=dict(
type='WFCOSHead',
num_classes=81,
in_channels=256,
max_energy=20,
stacked_convs=4,
feat_channels=256,
strides=[8, 16, 32, 64, 128],
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.),
loss_bbox=dict(
type='IoULoss',
loss_weight=1.0
),
loss_energy=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
loss_weight=1.
),
split_convs=False,
r=500.
))
# training and testing settings
train_cfg = dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.4,
min_pos_iou=0,
ignore_iof_thr=-1),
allowed_border=-1,
pos_weight=-1,
debug=False)
test_cfg = dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.3,
nms=dict(type='nms', iou_thr=0.2),
max_per_img=1000)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(
type='Resize',
img_scale=[(1333, 640), (1333, 800)],
multiscale_mode='value',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=4,
workers_per_gpu=4,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'images/train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'images/val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'images/val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(
type='SGD',
lr=0.01,
momentum=0.9,
weight_decay=0.0001,
paramwise_options=dict(bias_lr_mult=2., bias_decay_mult=0.))
optimizer_config = dict(
grad_clip=dict(
max_norm=2.
))
# learning policy
lr_config = dict(
policy='step',
warmup='constant',
warmup_iters=500,
warmup_ratio=1.0/3,
step=[16, 22])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=10,
hooks=[
dict(type='TextLoggerHook'),
dict(type='WandbLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 40
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/wfcos_hrnet_coco'
load_from = None
# load_from = work_dir + '/epoch_4.pth'
resume_from = None
# resume_from = work_dir + '/epoch_4.pth'
workflow = [('train', 1)]
# wandb settings
wandb_cfg = dict(
entity='warp-net',
project='fcos-wfcos-baseline',
dryrun=False
)
| [
"y_satyawan@hotmail.com"
] | y_satyawan@hotmail.com |
d23b135a21dfd7c7b0427aa7f631fddf890451ee | 191b068186efaaee07358d0721d3027e81eef914 | /util.py | 6fd3e6cd3100930b0bcae9f47b2c7403d5cff7a7 | [] | no_license | kujing/git_cilog | 60eabbe2c56452b0e1045393b31995c5f41d5933 | acb864ef47d2d7bf9c2a55c900ea1a74f5c19c72 | refs/heads/master | 2021-01-10T21:39:14.533018 | 2018-10-24T11:30:47 | 2018-10-24T11:30:47 | 34,507,302 | 0 | 1 | null | 2015-06-27T07:09:02 | 2015-04-24T08:31:37 | Python | UTF-8 | Python | false | false | 2,615 | py | #!/usr/bin/env python3
#coding: utf-8
import os
import platform
import subprocess
import sys
import time
import re
ON_LINUX = (platform.system() == 'Linux')
conf = {
'max_domains': 10,
'max_ext_length': 10,
'style': 'gitstats.css',
'max_authors': 20,
'authors_top': 5,
'commit_begin': '',
'commit_end': 'HEAD',
'linear_linestats': 1,
'project_name': '',
'processes': 8,
'start_date': ''
}
class Util():
@staticmethod
def getpipeoutput(cmds, quiet = False):
global exectime_external
start = time.time()
if not quiet and ON_LINUX and os.isatty(1):
print ('~~~~~~~~ ' + ' | '.join(cmds),)
sys.stdout.flush()
p = subprocess.Popen(cmds[0], stdout = subprocess.PIPE, shell = True)
processes=[p]
for x in cmds[1:]:
p = subprocess.Popen(x, stdin = p.stdout, stdout = subprocess.PIPE, shell = True)
processes.append(p)
output = p.communicate()[0]
for p in processes:
p.wait()
end = time.time()
if not quiet:
if ON_LINUX and os.isatty(1):
#print ("\r",)
print("")
#print ('[%.5f] >> %s' % (end - start, ' | '.join(cmds)))
#exectime_external += (end - start)
return output.rstrip('\n')
@staticmethod
def getlogrange(defaultrange = 'HEAD', end_only = True):
commit_range = Util.getcommitrange(defaultrange, end_only)
if len(conf['start_date']) > 0:
return '--since=%s %s' % (conf['start_date'], commit_range)
return commit_range
@staticmethod
def getcommitrange(defaultrange = 'HEAD', end_only = False):
if len(conf['commit_end']) > 0:
if end_only or len(conf['commit_begin']) == 0:
return conf['commit_end']
return '%s..%s' % (conf['commit_begin'], conf['commit_end'])
return defaultrange
@staticmethod
def getstatsummarycounts(line):
numbers = re.findall('\d+', line)
if len(numbers) == 1:
# neither insertions nor deletions: may probably only happen for "0 files changed"
numbers.append(0);
numbers.append(0);
elif len(numbers) == 2 and line.find('(+)') != -1:
numbers.append(0); # only insertions were printed on line
elif len(numbers) == 2 and line.find('(-)') != -1:
numbers.insert(1, 0); # only deletions were printed on line
return numbers | [
"jingliangliang@foxmail.com"
] | jingliangliang@foxmail.com |
3ae2079875387f561dad5fbc4ea251ed85ed9d12 | fcef3602a044a82b75eb1bdee87a5eb347a56769 | /recolo/tests/test_coordinate_solver.py | d18af8c84528da0a59395aaf2880b71ea511ddb3 | [
"MIT"
] | permissive | PolymerGuy/recolo | 5cb9c6b01d7eeb4108710606341518aa13efc1d1 | 05b14f0834fa675579eabdf43fac046259df19bb | refs/heads/master | 2023-04-12T00:17:50.150126 | 2022-03-11T12:42:44 | 2022-03-11T12:42:44 | 343,329,602 | 4 | 1 | MIT | 2022-03-05T08:04:49 | 2021-03-01T07:39:40 | Python | UTF-8 | Python | false | false | 3,877 | py | from unittest import TestCase
from recolo.artificial_grid_deformation import find_coords_in_undef_conf, interpolated_disp_field
import numpy as np
def rms_diff(array1, array2):
return np.sqrt(np.nanmean((array1 - array2) ** 2.))
def biharmonic_disp_field(x, y, amp_scale=0.5):
return (amp_scale * 0.4 * np.cos(np.pi * x / 30) + amp_scale * 0.5 * np.sin(np.pi * y / 40)), (
amp_scale * 0.6 * np.cos(np.pi * x / 50) + amp_scale * 0.7 * np.sin(np.pi * y / 60))
class TestFindCoordinatesInUndefConf(TestCase):
# As X is needed for other calculations, check that we can determine X from x = X + u(X)
def test_analytical_disp_field(self):
tol = 1e-5
dx = 3.5
dy = 2.7
xs, ys = np.meshgrid(np.arange(0, 80, dx), np.arange(0, 100, dy))
Xs, Ys = find_coords_in_undef_conf(xs, ys, biharmonic_disp_field, tol=1e-9)
u_X, u_Y = biharmonic_disp_field(Xs, Ys)
errors_x = xs - Xs - u_X
errors_y = ys - Ys - u_Y
peak_error_x = np.max(np.abs(errors_x))
peak_error_y = np.max(np.abs(errors_y))
if peak_error_x > tol or peak_error_y > tol:
self.fail("Maximum error is %f and %f" % (peak_error_x, peak_error_y))
def test_interpolated_disp_field(self):
tol = 1e-5
dx = 3.5
dy = 2.7
xs, ys = np.meshgrid(np.arange(0, 80, dx), np.arange(0, 100, dy))
# Make an approximated displacement field
u_x, u_y = biharmonic_disp_field(xs, ys)
disp_func_interp = interpolated_disp_field(u_x, u_y, dx=2, dy=4, order=3)
X, Y = find_coords_in_undef_conf(xs, ys, disp_func_interp, tol=1e-9)
u_X, u_Y = disp_func_interp(X, Y)
errors_x = xs - X - u_X
errors_y = ys - Y - u_Y
peak_error_x = np.max(np.abs(errors_x))
peak_error_y = np.max(np.abs(errors_y))
if peak_error_x > tol or peak_error_y > tol:
self.fail("Maximum error is %f and %f" % (peak_error_x, peak_error_y))
def test_compare_interpolated_and_analytical(self):
# As there will always be minor error at the edges, we look at the mean error for the whole field
tol = 1.e-3
dx = 3.5
dy = 2.7
xs, ys = np.meshgrid(np.arange(0, 80, dx), np.arange(0, 100, dy))
# Make an approximated displacement field0
u_x, u_y = biharmonic_disp_field(xs, ys)
disp_func_interp = interpolated_disp_field(u_x, u_y, dx=dx, dy=dy, order=3, mode="nearest")
X_interp, Y_interp = find_coords_in_undef_conf(xs, ys, disp_func_interp, tol=1e-9)
X, Y = find_coords_in_undef_conf(xs, ys, biharmonic_disp_field, tol=1e-9)
rms_diff_X = rms_diff(X_interp, X)
rms_diff_Y = rms_diff(Y_interp, Y)
if rms_diff_X > tol or rms_diff_Y > tol:
self.fail("RMS error is %f and %f" % (rms_diff_X, rms_diff_Y))
def test_check_grid_sampling_independency(self):
# Ensure that the sampling of u_x and u_y does not have a large impact on the final results
tol = 1.e-3
dxs = [0.1,0.5,1.0,3.2]
for i,dx in enumerate(dxs):
dy = dx + 0.12
xs, ys = np.meshgrid(np.arange(0, 80, dx), np.arange(0, 100, dy))
# Make an approximated displacement field0
u_x, u_y = biharmonic_disp_field(xs, ys)
disp_func_interp = interpolated_disp_field(u_x, u_y, dx=dx, dy=dy, order=3, mode="nearest")
X_interp, Y_interp = find_coords_in_undef_conf(xs, ys, disp_func_interp, tol=1e-9)
X, Y = find_coords_in_undef_conf(xs, ys, biharmonic_disp_field, tol=1e-9)
rms_diff_X = rms_diff(X_interp, X)
rms_diff_Y = rms_diff(Y_interp, Y)
if rms_diff_X > tol or rms_diff_Y > tol:
self.fail("RMS error is %f and %f for dx=%f and dy=%f" % (rms_diff_X, rms_diff_Y,dx,dy)) | [
"sindre.n.olufsen@ntnu.no"
] | sindre.n.olufsen@ntnu.no |
7b0b902873182939175a307b409a94677e9c5ceb | f2b18dad16af5785267cf17e907b217622383f95 | /application.py | 8345815413695aaa3cd062c4be5b1deb66b0840b | [] | no_license | dxz6160/sensetime_project | cfb8e1a5dbe9e8e896b70d4df9b9302c2968fc55 | 66e84b5c2af37c051074d8329dceef2c68b94105 | refs/heads/master | 2022-12-30T18:21:56.917165 | 2020-10-20T08:36:19 | 2020-10-20T08:36:19 | 286,803,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 594 | py | import tornado.web
from views import sensetime
import config
import os
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r'/home', sensetime.HomeHandler),
(r'/post_pic', sensetime.PicHandler),
(r'/post_video', sensetime.VideoHandler),
(r'/play_video', sensetime.PVideoHandler),
(r'/(.*)$', tornado.web.StaticFileHandler,{"path": os.path.join(config.BASE_DIRS, "static/html"), "default_filename": "index.html"})
]
super(Application, self).__init__(handlers, **config.settings) | [
"1957769588@qq.com"
] | 1957769588@qq.com |
85a2de9a877a4f50a4bde42d98b1125658915379 | 38cbe2cb35157d0feb7fc879405eafb8622404af | /string_reverse.py | dd251421d6ef28c8d196b0b3b2e56b854ab92b4b | [] | no_license | Ankita-githubFW/python_basics_files | 0258709415b668f32bfa42d0263956febe81408a | fb4f70b9cfdebbd65f94a7ce204a891bcf67597a | refs/heads/master | 2023-08-11T16:13:39.288451 | 2021-10-07T10:00:49 | 2021-10-07T10:00:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | s = 'NOOR BASHA WELCOME TO CODING'
result = s[::-1]
print(f"The reversed string is {result}")
print("The reversed string is {}".format(result))
| [
"noreply@github.com"
] | noreply@github.com |
0ece535aff6154c0f5fee45c1a1815526bd3cc9b | b8ae3ca933727784afb031938a799401ff0c545c | /etl_fact/etl_fact_draw/transform.py | 27192872698faa95654dfd77465e1ac32734df20 | [] | no_license | zhangguo7/data_warehouse | 9a772e4b3bbbecd9ffc276b8491a55e36577819b | 6af7d9d574df7f0e1fd7317cf855fd341d153988 | refs/heads/master | 2021-01-20T00:56:22.056135 | 2017-07-07T05:46:11 | 2017-07-07T05:46:11 | 89,213,214 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,704 | py | # -*- coding:utf-8 -*-
"""
绘图事实表的数据转换模块
定义了一个class Transform
用于完成对fact_draw表的数据转换与清理
内部构建了一个 transform_main 方法
以及其他内部调用方法:
行业提取 _filter_ind1_ind2
行业并入 _merge_ind_draw
转换经营状态 _deal_operatingState
增加日期和时间键 _trans_DT
拼接电话和手机 _concat_tel
删除多余的变量 _del_unnecessary_vars
重命名 _rename
"""
import re
import sys
sys.path.append('../../tools')
import numpy as np
import pandas as pd
from tool_funcs import other2int,angle2half
class Transform(object):
"""转换绘图数据"""
def _filter_ind1_ind2(self,df_industry):
"""筛选出一级行业和二级行业"""
df_industry1 = df_industry.ix[df_industry['industryPid'] == '0',:]
df_industry2 = df_industry.ix[df_industry['industryPid'] != '0', :]
return df_industry1, df_industry2
def _merge_ind_draw(self,df_draw, df_industry1, df_industry2):
"""将一级、二级行业增加到对应的新列"""
df_draw = pd.merge(df_draw,df_industry1,
left_on='guid',right_on='attachId')
merge_ind = pd.merge(df_draw,df_industry2,
left_on='guid',right_on='attachId')
return merge_ind
def _deal_operatingState(self, df):
"""处理经营状态,对三个经营状态变量提取相关信息
:param df: 只有原始经营状态的数据框 operatingState,operatingState1,operatingState2
:return df:增加了转租、空置、招聘、装修、仓库
"""
add_vars = ['drawSublease','drawEmpty','drawRecruit','drawRenovation',
'drawWarehouse','drawClose','drawNormal']
for var in add_vars:
df[var] = None
# 转租、转让
df.ix[df['operatingState'].apply(lambda x: '5' in str(x)), 'drawSublease'] = '转租、转让'
df.ix[df['operatingState2'].apply(lambda x: '1' in str(x)), 'drawSublease'] = '转租、转让'
# 装修
df.ix[df['operatingState'].apply(lambda x: '6' in str(x)), 'drawRenovation'] = '装修'
df.ix[df['operatingState1'].apply(lambda x: '6' in str(x)), 'drawRenovation'] = '装修'
df.ix[df['operatingState2'].apply(lambda x: '3' in str(x)), 'drawRenovation'] = '装修'
# 仓库
df.ix[df['operatingState'].apply(lambda x: '3' in str(x)), 'drawWarehouse'] = '仓库'
df.ix[df['operatingState1'].apply(lambda x: '3' in str(x)), 'drawWarehouse'] = '仓库'
df.ix[df['operatingState1'].apply(lambda x: '5' in str(x)), 'drawWarehouse'] = '仓库'
# 空置
df.ix[df['operatingState'].apply(lambda x: '4' in str(x)), 'drawEmpty'] = '空置'
df.ix[df['operatingState1'].apply(lambda x: '4' in str(x)), 'drawEmpty'] = '空置'
# 招聘
df.ix[df['operatingState1'].apply(lambda x: '4' in str(x)), 'drawRecruit'] = '招聘'
# 关门
df.ix[df['operatingState'].apply(lambda x: '2' in str(x)), 'drawClose'] = '关门'
df.ix[df['operatingState1'].apply(lambda x: '2' in str(x)), 'drawClose'] = '关门'
# 正常
df.ix[df['operatingState'].apply(lambda x: '1' in str(x)), 'drawNormal'] = '正常'
df.ix[df['operatingState1'].apply(lambda x: '1' in str(x)), 'drawNormal'] = '正常'
return df
def _trans_DT(self,df):
"""增加日期和时间键
:param df: 未包含日期和时间键的数据框
:return: 包含日期和时间键的数据框
"""
def _extract_datekey(x):
return int(str(x)[:10].replace('-',''))
def _extract_timekey(x):
return int(str(x)[11:19].replace(':', ''))
df['receiveDateKey'] = df['receiveDate'].apply(_extract_datekey)
df['receiveTimeKey'] = df['receiveDate'].apply(_extract_timekey)
df['inputDateKey'] = df['inputDate'].apply(_extract_datekey)
df['inputTimeKey'] = df['inputDate'].apply(_extract_timekey)
return df
def _trans_deco(self,df):
def mapping(x):
deco_dict = {
1: '无装修',
2: '简单装修',
3: '精装修',
4: '无法观测'
}
return deco_dict.get(x)
df['decorateDescrption'] = df['decorateDescrption'].apply(mapping)
return df
def _concat_tel(self,df):
"""拼接电话和手机
:param df: 未拼接电话和手机的数据框
:return: 拼接了电话和手机的数据框
"""
df['drawTel'] = np.where(
(df['sampleMobile'] != '') & (df['sampleTel'] != ''),
df['sampleMobile']+','+df['sampleTel'],
df['sampleMobile'] + df['sampleTel']
)
df['drawTel'] = df['drawTel'].apply(lambda x:x.replace('|',','))
return df
def _split_zbh(self,doorplate_lst):
new_dp_lst = []
selfnum_lst = []
for dp in doorplate_lst:
try:
zbh = re.search('自编号*\d+号*',dp).group()
new_dp = dp.replace(zbh,'').replace('|','').replace('#','号')
except:
zbh = None
new_dp=dp if dp != 'None' else None
new_dp_lst.append(new_dp)
selfnum_lst.append(zbh)
return new_dp_lst, selfnum_lst
def _doorplate_selfnum(self,df):
"""从门牌号中提取自编号
:param df: 门牌号和自编号混淆的数据框
:return: 门牌号和自编号分离的数据框
"""
new_dp_lst, selfnum_lst = self._split_zbh(df['doorPlate'])
df['doorPlate'] = pd.Series(new_dp_lst).apply(angle2half)
tmp_zbh = pd.Series(selfnum_lst)
df['selfNum'] = np.where((df['selfNum'] == '') | (df['selfNum'].isnull()),
tmp_zbh, df['selfNum'])
return df
def _trans_has_licence(self,df):
df['isBusinessLicence'] = df['isBusinessLicence'].\
apply(lambda x: '悬挂' if x == 1 else '未悬挂')
return df
def _concat_companyaddress(self,df):
"""拼接地址
:param df: 未经地址转换的数据框
:return: 经过地址转换的数据框
"""
def split_grandParentName(x):
x = str(x)
if x.find(':') != -1:
return x.split(':')[0]
return x
df['grandParentName'] = df['grandParentName'].apply(split_grandParentName)
# 清理
df.ix[df['cityName'] == '东莞市', 'districtId'] = '441900'
df.ix[(df['cityName'] == '台州市') & (df['districtName'] == ''), 'districtId'] = '331003'
city_lst = [
'东莞市', '中山市',
'北京市辖区', '北京的县',
'重庆市辖区', '重庆的县',
'上海市辖区', '上海市的县',
'天津市辖区', '天津市的县'
]
df['cityName'] = df['cityName'].apply(lambda x: '' if x in city_lst else x)
df['drawCompanyAddress'] = df['provinceName'] + df['cityName'] + \
df['districtName'] + df['grandParentName']
return df
def transform_main(self,df_industry, df_draw):
# 转换行业
df_ind1, df_ind2 = self._filter_ind1_ind2(df_industry)
merge_ind = self._merge_ind_draw(df_draw,df_ind1,df_ind2)
# 转换经营状态
df = self._deal_operatingState(merge_ind)
# 增加日期和时间键
df = self._trans_DT(df)
# 拼接电话和手机
df = self._concat_tel(df)
# 拼接地址
df = self._concat_companyaddress(df)
# 清理自编号
df = self._doorplate_selfnum(df)
# 清理装修
df = self._trans_deco(df)
# 悬挂营业执照
df = self._trans_has_licence(df)
df['sampleName'] = df['sampleName'].apply(lambda x: str(x)[:50])
df['districtId'] = df['districtId'].apply(other2int)
# 重命名、选择要输出的变量
clean_dict = {
'drawGuid': df['guid'],
'marketGuid': df['grandParentId'],
'drawZoneGuid': df['zoneGuid'],
'divisionKey': df['districtId'],
'drawMateAddress': df['mateAddress'],
'drawDoorPlate': df['doorPlate'],
'drawSelfNum': df['selfNum'],
'drawCompanyName': df['sampleName'],
'drawLatitude': df['bdLatitude'],
'drawLongitude': df['bdlongitude'],
'drawPhotoCount': df['photoCount'],
'drawShopCount': df['shopCount'],
'drawDecorate': df['decorateDescrption'],
'drawHagLicence': df['isBusinessLicence'],
'drawIndustryNo_1': df['industryId_x'],
'drawIndustryName_1': df['industryName_x'],
'drawindustryNo_2': df['industryId_y'],
'drawIndustryName_2': df['industryName_y'],
'drawSublease': df['drawSublease'],
'drawEmpty': df['drawEmpty'],
'drawRecruit': df['drawRecruit'],
'drawRenovation': df['drawRenovation'],
'drawWarehouse': df['drawWarehouse'],
'drawClose': df['drawClose'],
'drawNormal': df['drawNormal'],
'receiveDateKey': df['receiveDateKey'],
'receiveTimeKey': df['receiveTimeKey'],
'inputDateKey': df['inputDateKey'],
'inputTimeKey': df['inputTimeKey'],
'drawTel': df['drawTel'],
'drawCompanyAddress': df['drawCompanyAddress']
}
return pd.DataFrame(clean_dict) | [
"zhangguo7@aliyun.com"
] | zhangguo7@aliyun.com |
1a2b977db98452df079d888fe65e02cf758fd88d | 203e1581f9838c7e253befd9965ad087263d8127 | /dashboard/myutils/__init__.py | 3f0611c2e422ac50fb42140c2b12ba398e609c13 | [] | no_license | magnito2/clownbot | a52a9ffbb79831ba8e96adae52651b384aa29303 | f54d26a1714a6215a3da492853e085064d9938f4 | refs/heads/master | 2022-12-10T18:34:47.811637 | 2020-01-30T13:54:39 | 2020-01-30T13:54:39 | 233,408,628 | 3 | 4 | null | 2022-12-08T05:25:35 | 2020-01-12T14:51:54 | JavaScript | UTF-8 | Python | false | false | 3,688 | py | import requests
from flask_security.signals import password_reset, reset_password_instructions_sent
from flask_security.utils import config_value, get_token_status, hash_data, hash_password, \
url_for_security, verify_hash
from flask_security.recoverable import generate_reset_password_token, send_password_reset_notice
from flask import current_app as app
from werkzeug.local import LocalProxy
from flask_mail import Message
# Convenient references
_security = LocalProxy(lambda: app.extensions['security'])
_datastore = LocalProxy(lambda: _security.datastore)
def get_binance_symbols():
try:
exc_info = requests.get("https://api.binance.com/api/v1/exchangeInfo")
symbols_info = exc_info.json()['symbols']
symbol_names = [symbol['symbol'] for symbol in symbols_info]
return {'error': False, 'result': symbol_names}
except Exception as e:
return {'error': True, 'message': str(e)}
def get_bittrex_symbols():
try:
markets_resp = requests.get('https://api.bittrex.com/api/v1.1/public/getmarkets')
markets_resp_json = markets_resp.json()
if markets_resp_json['success'] == False:
return {'error': True, 'message': markets_resp_json['message']}
markets = markets_resp_json['result']
symbols = [market['MarketName'] for market in markets]
return {'error': False, 'result': symbols}
except Exception as e:
return {'error': True, 'message': str(e)}
def send_reset_password_instructions(user):
"""Sends the reset password instructions email for the specified user.
:param user: The user to send the instructions to
"""
token = generate_reset_password_token(user)
reset_link = frontend_url('reset-password', token=token)
print(f"[+] The security is {_security}")
if config_value('SEND_PASSWORD_RESET_EMAIL'):
send_mail(config_value('EMAIL_SUBJECT_PASSWORD_RESET'),
user.email, 'reset_instructions',
user=user, reset_link=reset_link)
reset_password_instructions_sent.send(
app._get_current_object(), user=user, token=token
)
def frontend_url(resource, token):
return app.config['FRONTEND_URL'] + "/" + resource +"/" + token
def update_password(user, password):
"""Update the specified user's password
:param user: The user to update_password
:param password: The unhashed new password
"""
user.password = hash_password(password)
_datastore.put(user)
_datastore.commit()
send_password_reset_notice(user)
password_reset.send(app._get_current_object(), user=user)
def send_mail(subject, recipient, template, **context):
"""Send an email via the Flask-Mail extension.
:param subject: Email subject
:param recipient: Email recipient
:param template: The name of the email template
:param context: The context to render the template with
"""
context.setdefault('security', _security)
context.update(_security._run_ctx_processor('mail'))
sender = str(_security.email_sender)
if isinstance(sender, LocalProxy):
sender = sender._get_current_object()
msg = Message(subject,
sender=sender,
recipients=[recipient])
ctx = ('security/email', template)
if config_value('EMAIL_PLAINTEXT'):
msg.body = _security.render_template('%s/%s.txt' % ctx, **context)
if config_value('EMAIL_HTML'):
msg.html = _security.render_template('%s/%s.html' % ctx, **context)
if _security._send_mail_task:
_security._send_mail_task(msg)
return
mail = app.extensions.get('mail')
mail.send(msg)
| [
"magnusotwani@gmail.com"
] | magnusotwani@gmail.com |
30139ce10e0c04c0fc06066ee31e202cfe7665db | 36e51e49d7b56d66ea6eeff437309623bb409bdf | /part9/if_elif_else.py | f759ee1ce2403195322b11286560276f40ce35ab | [] | no_license | dhananjayharel/mark_trego_python_beginners | 2b236e22efeabc7ad848d110081dda487c8bf21b | 2c4d046c21da8b69a643be62dbbf684489caef76 | refs/heads/master | 2020-09-09T11:20:45.609867 | 2019-12-14T14:25:58 | 2019-12-14T14:25:58 | 221,433,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | x = 3
y = 7
z = 10
if x < y and x > z:
print('something here was the case')
elif x < z:
print(x,'is less than',z)
elif y < z:
print(y,'is less than',z)
else:
print('nothing was the case')
| [
"harel.dhananjay@gmail.com"
] | harel.dhananjay@gmail.com |
90197241a1030e09b3e3a1801be2cd12244e42d1 | 8158e8633883139f8de93d5fdbf7a85acc6b91f3 | /Original Code/fat32Test.py | b759cb6fdc924b466dadcbf0075690ebb2b207bc | [] | no_license | kyungsook/Forensics_Visualization | cf9c647a0b7622fb76eba35e693ab4876583f9e2 | cb2d1155a70824e1ba1453819c3e2eb0b5bde23c | refs/heads/master | 2021-02-15T22:19:22.127896 | 2020-10-08T09:01:02 | 2020-10-08T09:01:02 | 244,938,004 | 1 | 0 | null | 2020-06-06T09:37:42 | 2020-03-04T15:39:11 | HTML | UTF-8 | Python | false | false | 7,183 | py | import sys
import struct
class FAT32:
END_CLUSTER = 0x0fffffff
dir_list=[]
file_list=[]
reg_list=[]
def __init__(self, filename):
self.filename = filename
self.fd = open(filename, "rb")
self.read_vbr()
def read_vbr(self): # vbr 1섹터 읽기
self.fd.seek(0)
vbr = self.fd.read(512)
self.bps = struct.unpack_from("<H", vbr, 11)[0] #byte per sector
self.spc = struct.unpack_from("<B", vbr, 13)[0] #sector per cluster
self.reserved_sectors = struct.unpack_from("<H", vbr, 14)[0]
self.number_of_fats = struct.unpack_from("<B", vbr, 16)[0]
self.sectors = struct.unpack_from("<I", vbr, 32)[0]
self.fat_size = struct.unpack_from("<I", vbr, 36)[0]
self.root_cluster = struct.unpack_from("<I", vbr, 44)[0]
self.first_data_sector = self.fat_size * self.number_of_fats + self.reserved_sectors
def read_byte(self, offset, count=1):
self.fd.seek(offset)
return self.fd.read(count)
def read_sector(self, offset, count=1):
self.fd.seek(offset * self.bps)
return self.fd.read(self.bps * count)
def read_cluster(self, cluster, count=1):
if cluster < 2:
raise Exception("Can't read under cluster 2")
real_cluster = cluster - 2
return self.read_sector(self.first_data_sector + real_cluster * self.spc, count * self.spc)
def seek(self, offset, whence=0):
self.fd.seek(offset, whence)
def read_clusters(self, fats):
data = bytes(0)
for i in fats:
data += self.read_cluster(i)
return data
def to_decode(self, data, encoding):
if len(data) == 0:
return ""
return data.decode(encoding)
def to_utf_16_le(self, data):
return self.to_decode(data, 'utf-16-le')
def to_euc_kr(self, data):
return self.to_decode(data, 'euc-kr')
def filter_unused_lfn(self, data):
length = len(data)
for i in range(len(data), 0, -2):
if data[i - 2:i] == b'\xff\xff' or data[i - 2:i] == b'\x00\x00':
length = i - 2
else:
break
return data[:length]
def parse_dir_entry_lfn(self, data, lfn):
name1 = self.to_utf_16_le(self.filter_unused_lfn(data[1:11]))
name2 = self.to_utf_16_le(self.filter_unused_lfn(data[14:26]))
name3 = self.to_utf_16_le(self.filter_unused_lfn(data[28:32]))
return {'name': name1 + name2 + name3 + lfn}
def parse_dir_entry(self, data, lfn):
attr = data[11]
is_LFN = attr & 0x0F == 0x0F
if data[0]==0xE5 :
name='!'
name=name+self.to_euc_kr(data[2:7]).rstrip()
else :
name = self.to_euc_kr(data[0:8]).rstrip()
ext = self.to_euc_kr(data[8:11]).rstrip()
if len(ext) > 0:
name = name + "." + ext
create_time = struct.unpack_from("<H", data, 14)[0]
create_date = struct.unpack_from("<H", data, 16)[0]
lad = struct.unpack_from("<H", data, 18)[0] #last access date
highcluster = struct.unpack_from("<H", data, 20)[0]
write_time = struct.unpack_from("<H", data, 22)[0]
write_date = struct.unpack_from("<H", data, 24)[0]
lowcluster = struct.unpack_from("<H", data, 26)[0]
cluster = highcluster << 16 | lowcluster
size = struct.unpack_from("<I", data, 28)[0]
db_ext_byte = self.get_real_ext(cluster)
real_ext_byte = db_ext_byte[0:8]
real_ext_high = real_ext_byte[0:4]
real_ext=''
if real_ext_high == b'PK\x03\x04':
real_ext = 'ZIP/PPTX/XLSX/DOCX'
elif real_ext_high == b'\xFF\xD8\xFF\xE0':
real_ext = 'JPG'
elif real_ext_byte == b'\x89\x50\x4E\x47\x0D\x0A\x1A\x0A':
real_ext = 'PNG'
elif real_ext_high == b'\x25\x50\x44\x46':
real_ext = 'PDF'
elif real_ext_byte == b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1':
real_ext = 'HWP'
elif db_ext_byte == b'\x53\x51\x4C\x69\x74\x65\x20\x66\x6F\x72\x6D\x61\x74\x20\x33\x00':
real_ext = 'SQLite'
elif real_ext_high == b'regf':
real_ext = 'registry hive file'
entry = {'sname': name, 'attr': attr, 'cluster': cluster, 'size': size, 'ext': ext, 'real_ext': real_ext,
'create_time': create_time, 'create_date': create_date, 'lad': lad, 'write_time': write_time, 'write_date': write_date }
if len(lfn) > 0:
entry['name'] = lfn
if data[0] == 0xE5:
entry['del']='deleted'
return entry
def get_real_ext(self, cluster):
real_ext = self.read_byte(((cluster-2)* self.spc + self.first_data_sector)*512, 16)
return real_ext
def get_content(self, cluster): #연결된 fat를 찾아서 data를 다 읽어온다
fats = self.get_fats_by_start_cluster(cluster)
return self.read_clusters(fats)
def get_files(self, cluster):
fats = self.get_fats_by_start_cluster(cluster)
data = self.read_clusters(fats)
lfn = ""
for i in range(0, len(data), 32):
entry_data = data[i:i + 32] # 한 entry 씩 땡기네
c = struct.unpack("<QQQQ", entry_data)
if c[0] == 0 and c[1] == 0 and c[2] == 0 and c[3] == 0:
break
attr = entry_data[11]
is_LFN = attr & 0x0F == 0x0F #같으면 true, 다르면 false
if not is_LFN: #is_LFN이 false인 경우
entry = self.parse_dir_entry(entry_data, lfn.strip())
lfn = ""
self.define_dir(entry)
else:
entry = self.parse_dir_entry_lfn(entry_data, lfn)
lfn = entry['name']
def get_fats_by_start_cluster(self, cluster, fat=1):
# To get fat chain, it uses fat.
base_sector = self.reserved_sectors + self.fat_size * (fat - 1)
fats_per_sector = self.bps / 4
fats = []
next_cluster = cluster
while next_cluster != self.END_CLUSTER:
fats.append(next_cluster)
sector, idx = divmod(next_cluster, fats_per_sector)
sector = int(sector)
idx = int(idx)
data = self.read_sector(base_sector + sector)
next_cluster = struct.unpack_from("<I", data, idx * 4)[0]
return fats
def define_dir(self,entry):
if entry['attr'] == 8 or entry['attr'] == 16 or entry['attr'] == 22:
entry['ext']='Directory'
self.dir_list.append(entry)
elif entry['real_ext'] == 'registry hive file':
self.file_list.append(entry)
self.reg_list.append(entry)
else:
self.file_list.append(entry)
def renew_list(self):
self.dir_list=[]
self.file_list=[]
if __name__ == '__main__':
print("Fat32")
fs = FAT32(sys.argv[1])
#print(fs.root_cluster)
fs.get_files(fs.root_cluster)
print(fs.dir_list)
print(fs.root_cluster)
"""fs.renew_list()
fs.get_files(7)
for i in fs.dir_list:
print(i['sname'])"""
| [
"lovablekks@naver.com"
] | lovablekks@naver.com |
4eb90f5b339b74d2768d5d7d268b6d412f8d1798 | f1330ad06f86455a6b7ae61f5617f78a4647cb18 | /dailyfresh/utils/encryption.py | 15dbce8ded86218c2b569f416821693b607c3a3b | [] | no_license | pythonchuang/dailyfresh | 0d37c2a4db53527b9e2e2b7988be1bc8427440d7 | 64720733c14d845a89624169c263e4e8902df68b | refs/heads/dev | 2021-07-03T13:34:53.967419 | 2017-09-24T10:14:39 | 2017-09-24T10:14:39 | 104,314,992 | 0 | 1 | null | 2017-09-24T10:14:40 | 2017-09-21T07:20:43 | HTML | UTF-8 | Python | false | false | 192 | py | import hashlib
class Encrytion(object):
def sha1(text):
return hashlib.sha1(text.encode('utf-8')).hexdigest()
if __name__ == '__main__':
print(Encrytion.sha1('aaskdjfh'))
| [
"786355997@qq.com"
] | 786355997@qq.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.