repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
kianpu34593/base4gpaw | BASIC/optimizer.py | <filename>BASIC/optimizer.py
import os
from ase.eos import EquationOfState
from ase.optimize import BFGS
from gpaw import GPAW
from ase.io.trajectory import Trajectory
from ase.io import read,write
from fractions import Fraction
import numpy as np
from ase.dft.bee import BEEFEnsemble
from ase.parallel import parprint,world,barrier
def optimize_bulk(atoms,step=0.05,fmax=0.01,location='',extname=''):
cell=atoms.get_cell()
name=atoms.get_chemical_formula(mode='hill')
vol=atoms.get_volume()
volumes=[]
energies=[]
for x in np.linspace(1-2*step,1+2*step,5):
atoms.set_cell(cell*x,scale_atoms=True)
atoms.calc.set(txt=location+'/'+'eos_fit'+'/'+name+'_'+str(np.round(x,decimals=2))+'-'+str(extname)+'.txt')
energies.append(atoms.get_potential_energy())
volumes.append(atoms.get_volume())
eos=EquationOfState(volumes,energies,eos='birchmurnaghan')
v0=eos.fit()[0]
x0=(v0/vol)**Fraction('1/3')
atoms.set_cell(x0*cell,scale_atoms=True)
file_name=location+'/'+name+'-'+str(extname)
atoms.calc.set(txt=file_name+'.txt')
dyn=BFGS(atoms=atoms,trajectory=file_name+'.traj',
logfile=file_name+'.log') ## TO-DO: add maxstep control
dyn.run(fmax=fmax)
atoms.calc.write(file_name+'.gpw')
## TO-DO: add ensemble energies to file
# def relax(atoms, name, fmax=0.01, maxstep=0.04):
# gpwname=name+'/'+'slab'
# atoms.calc.set(txt=gpwname+'.txt')
# atoms.calc.attache(atoms.calc.write, 10, 'interm.gpaw')
# dyn=BFGS(atoms=atoms,trajectory=gpwname+'.traj',
# logfile = gpwname+'.log',maxstep=maxstep)
# dyn.run(fmax=fmax)
# atoms.calc.write(gpwname+'.gpw')
# # TO-DO: add ensemble energies to file
def relax(atoms, name, fmax=0.01, maxstep=0.04):
slab_name=name+'/'+'slab'
slab_hist_name=slab_name+'_history'
atoms.calc.set(txt=slab_name+'.txt')
atoms.calc.__dict__['observers']=[]
atoms.calc.attach(atoms.calc.write, 10, slab_name+"_interm.gpw")
def _check_file_exists(filename):
"""Check if file exists and is not empty"""
if os.path.isfile(filename):
return os.path.getsize(filename) > 0
else:
return False
# check if it is a restart
barrier()
if _check_file_exists(slab_name+".traj"):
latest = read(slab_name+".traj", index=":")
# check if already restarted previously and extend history if needed
if not (_check_file_exists(slab_hist_name+'.traj')):
barrier()
write(slab_hist_name+".traj",latest)
else:
hist = read(slab_hist_name+'.traj', index=":")
hist.extend(latest)
write(slab_hist_name+'.traj',hist)
dyn=BFGS(atoms=atoms,trajectory=slab_name+'.traj',
logfile = slab_name+'.log',maxstep=maxstep)
# if history exists, read in hessian
if _check_file_exists(slab_hist_name+".traj"):
dyn.replay_trajectory(slab_hist_name+".traj")
# optimize
dyn.run(fmax=fmax)
atoms.calc.write(slab_name+'.gpw') |
kianpu34593/base4gpaw | tutorial/Cu_bulk.py | <filename>tutorial/Cu_bulk.py
from gpaw import GPAW,Mixer,Davidson
from ase.calculators.calculator import kptdensity2monkhorstpack as kdens2mp
from actgpaw import bulk_autoconv as bulk_ac
# input the material
element = 'Cu_mp-30' #the name should be the same as the cif file
element_atom = bulk_ac.bulk_builder(element) #cif --> ase atom
# initalize the calulator
## convert k density to kpts based on cell size
kpts = kdens2mp(element_atom)
calc=GPAW(xc = 'PBE',
h = 0.16,
kpts = kpts,
spinpol = False,
maxiter = 333,
mixer = Mixer(0.05,5,50),
eigensolver = Davidson(3),
occupations = {'name':'fermi-dirac','width':0.1})
#call bulk_auto_conv module
bulk_ac.bulk_auto_conv(element, #input material
calc, #initial calculator
rela_tol=10*10**(-3), #convergence criteria
temp_print=True, #print out the convergence process
) |
kianpu34593/base4gpaw | BASIC/tests/test_actgpaw.py | """
Unit and regression test for the actgpaw package.
"""
# Import package, test suite, and other packages as needed
import actgpaw
import pytest
import sys
def test_actgpaw_imported():
"""Sample test, will always pass so long as import statement worked"""
assert "actgpaw" in sys.modules
|
kianpu34593/base4gpaw | BASIC/__init__.py | """
BASIC
A Python package for Bulk Adsorption Surface energy calculation with automatIc Convergence
"""
# Add imports here
from .utils import *
# Handle versioneer
from ._version import get_versions
versions = get_versions()
__version__ = versions['version']
__git_revision__ = versions['full-revisionid']
del get_versions, versions
|
kianpu34593/base4gpaw | tutorial/Cu_surf.py | from ase.db import connect
from gpaw import GPAW, Mixer, MixerDif, Davidson, PoissonSolver
from actgpaw import surf_autoconv as surf_ac
# read the optimized conventional cell in the database
element = "Cu_mp-30"
element_bulk = connect("final_database/bulk.db").get(name=element)
h = element_bulk.h
xc = element_bulk.xc
sw = element_bulk.sw
spin = element_bulk.spin
## all settings but kpts (due to structure dependence)
# miller index of interest
struc = "111"
# set up the calculator
calc = GPAW(
xc=xc,
h=h,
symmetry={"point_group": False},
eigensolver=Davidson(3),
mixer=Mixer(beta=0.05, nmaxold=5, weight=50),
spinpol=spin,
maxiter=500,
occupations={"name": "fermi-dirac", "width": sw},
poissonsolver={'dipolelayer': 'xy'},
)
# call surf_auto_conv module
surf_ac.surf_auto_conv(
element, #input material
struc, #miller index of interest
calc, #calculator
generator="import", #import the slab model
pbc_all=False, #periodic boundary condition true for all directions
init_layer=4, #initial slab layers
interval=2, #interval between layers
fix_layer=2, #number of fixed layers
fix_option='bottom', #constrain the bottom layers
vac=10, #vacuum size (Ang)
rela_tol=5, #convergence criteria (%)
temp_print=True, #print out the convergence process
)
|
kianpu34593/base4gpaw | BASIC/surface.py | from pymatgen.core.surface import SlabGenerator, generate_all_slabs
from pymatgen.io.ase import AseAtomsAdaptor
from ase.db import connect
from ase.build import surface
from pymatgen.io.cif import CifWriter
import numpy as np
from collections import Counter
from itertools import chain
from pymatgen.analysis.adsorption import plot_slab
from matplotlib import pyplot as plt
from ase.visualize.plot import plot_atoms
import os
from ase.io import read
def sym_all_slab(element,max_ind,layers,vacuum_layer):
bulk_ase=connect('final_database/bulk.db').get_atoms(name=element)
bulk_pym=AseAtomsAdaptor.get_structure(bulk_ase)
slabgenall=generate_all_slabs(bulk_pym,max_ind,layers,vacuum_layer,
lll_reduce=True,center_slab=True,
symmetrize=True,in_unit_planes=True)
print('Miller Index'+'\t'+'Num of Different Shift(s)')
slab_M=[]
for slab in slabgenall:
slab_M.append([slab.miller_index])
slab_M_unique = Counter(chain(*slab_M))
for key in list(slab_M_unique.keys()):
print(str(key)+'\t'+str(slab_M_unique[key]))
def surf_creator(element,ind,layers,vacuum_layer,option='pymatgen',max_ind=1,unit=True,order=0,save=False,plot=True):
bulk_ase=connect('final_database/bulk.db').get_atoms(name=element)
bulk_pym=AseAtomsAdaptor.get_structure(bulk_ase)
if option=='pymatgen':
slabgen = SlabGenerator(bulk_pym, ind, layers, vacuum_layer,
center_slab=True,lll_reduce=True,in_unit_planes=unit)
#slabs=slabgen.get_slabs()
#slabs_symmetric=[slab for slab in slabs if slab.is_symmetric()]
slabs_symmetric=slabgen.get_slabs(symmetrize=True)
if len(slabs_symmetric) == 0:
print('No symmetric slab found!')
else:
print('No.'+'\t'+'Layers'+'\t'+'Angles'+'\t\t\t\tCell Length')
if plot:
fig=plt.figure(figsize=(8,8))
layers_ls=[]
for n,slab in enumerate(slabs_symmetric):
#temp save for analysis
os.makedirs(element+'/raw_surf',exist_ok=True)
surf_location=element+'/raw_surf/'+str(ind)+'_temp'+'.cif'
CifWriter(slab).write_file(surf_location)
slab_ase=read(surf_location)
#slab_ase=AseAtomsAdaptor.get_atoms(slab)
angles=np.round(slab_ase.get_cell_lengths_and_angles()[3:],decimals=4)
cell_length=np.round(slab_ase.get_cell_lengths_and_angles()[:3],decimals=4)
layers=len(np.unique(np.round(slab_ase.positions[:,2],decimals=4)))
print(str(n)+'\t'+str(layers)+'\t'+str(angles)+'\t'+str(cell_length))
layers_ls.append(layers)
if plot:
ax=fig.add_subplot(np.ceil(len(slabs_symmetric)/2),2,n+1)
plot_slab(slab,ax,adsorption_sites=False,decay=0.25,window=1)
ax.set_title('{}: No. {}'.format(slab.miller_index,n),{'fontsize':20})
ax.set_xticks([])
ax.set_yticks([])
if os.path.isfile(surf_location):
os.remove(surf_location)
if save:
slab_to_save=slabs_symmetric[order]
surf_saver(element,slab_to_save,ind,layers_ls[order])
elif option=='pymatgen_all':
max_ind=max(ind)
slabgenall=generate_all_slabs(bulk_pym,max_ind,layers,vacuum_layer,
lll_reduce=True,center_slab=True,
symmetrize=True,in_unit_planes=True)
slab_RM=[]
for slab in slabgenall:
if slab.miller_index == ind:
slab_RM.append(slab)
print('No.'+'\t'+'Layers'+'\t'+'Angles'+'\t\t\t\tCell Length')
if plot:
fig=plt.figure(figsize=(8,8))
layers_ls=[]
surf_location='.'
for n,slab in enumerate(slab_RM):
#temp save for analysis
os.makedirs(element+'/raw_surf',exist_ok=True)
surf_location=element+'/raw_surf/'+str(ind)+'_temp'+'.cif'
CifWriter(slab).write_file(surf_location)
slab_ase=read(surf_location)
#slab_ase=AseAtomsAdaptor.get_atoms(slab)
angles=np.round(slab_ase.get_cell_lengths_and_angles()[3:],decimals=4)
cell_length=np.round(slab_ase.get_cell_lengths_and_angles()[:3],decimals=4)
layers=len(np.unique(np.round(slab_ase.positions[:,2],decimals=4)))
print(str(n)+'\t'+str(layers)+'\t'+str(angles)+'\t'+str(cell_length))
layers_ls.append(layers)
if plot:
ax=fig.add_subplot(np.ceil(len(slab_RM)/2),2,n+1)
plot_slab(slab,ax,adsorption_sites=False,decay=0.25,window=1)
ax.set_title('{}: No. {}'.format(slab.miller_index,n),{'fontsize':20})
ax.set_xticks([])
ax.set_yticks([])
if os.path.isfile(surf_location):
os.remove(surf_location)
if save:
slab_to_save=slab_RM[order]
surf_saver(element,slab_RM[order],ind,layers_ls[order])
elif option=='ase':
slab_ase=surface(bulk_ase,ind,layers=layers,vacuum=vacuum_layer)
print('No.'+'\t'+'Layers'+'\t'+'Angles'+'\t\t\t\tCell Length')
angles=np.round(slab_ase.get_cell_lengths_and_angles()[3:],decimals=4)
cell_length=np.round(slab_ase.get_cell_lengths_and_angles()[:3],decimals=4)
print(str(0)+'\t'+str(len(np.unique(np.round(slab_ase.positions[:,2],decimals=4))))+'\t'+str(angles)+'\t'+str(cell_length))
if plot:
fig=plt.figure(figsize=(8,8))
ax=fig.add_subplot(111)
plot_atoms(slab_ase,ax=ax)
ax.set_title('ASE created: {}'.format(str(ind)),{'fontsize':20})
ax.set_xticks([])
ax.set_yticks([])
if save:
slab_struc=AseAtomsAdaptor.get_structure(slab_ase)
layers=len(np.unique(np.round(slab_ase.positions[:,2],decimals=4)))
surf_saver(element,slab_struc,ind,layers)
def surf_saver(element,slab_to_save,ind,layers):
rep_location=element+'/raw_surf'
if os.path.isdir(rep_location):
print('WARNING: '+rep_location+' already exists!')
os.makedirs(rep_location,exist_ok=True)
surf_location=element+'/raw_surf/'+str(ind)+'_'+str(layers)+'.cif'
if os.path.isfile(surf_location):
print('WARNING: '+surf_location+' already exists!')
print('Raw surface saving fail!')
else:
CifWriter(slab_to_save).write_file(surf_location)
print('Raw surface saving complete!')
|
kianpu34593/base4gpaw | BASIC/surf_autoconv.py | <reponame>kianpu34593/base4gpaw
from gpaw import GPAW,Mixer,MixerDif,MixerSum,Davidson
from ase.constraints import FixAtoms
from ase.build import surface
from ase.io import write,read
from ase.db import connect
import os
import BASIC.optimizer as opt
from ase.parallel import parprint,paropen,world
import numpy as np
import re
import sys
from ase.calculators.calculator import kptdensity2monkhorstpack as kdens2mp
from pymatgen.core.surface import SlabGenerator
from pymatgen.io.ase import AseAtomsAdaptor
###Warning: Only stocimetric surface!
# def surf_auto_conv(element,
# struc,
# init_layer=5,
# vac=5,
# fix_layer=2,
# rela_tol=5,
# temp_print=True,
# generator='pymatgen',
# interval=2,
# maxiter=333,
# beta=0.05,
# nmaxold=5,
# weight=50.0):
def surf_auto_conv(element,struc,gpaw_calc,generator='pymatgen',pbc_all=False,init_layer=4,interval=2,fix_layer=2,fix_option='bottom',vac=10,solver_fmax=0.01,solver_step=0.05,rela_tol=5,temp_print=True):
#convert str ind to tuple
m_ind=tuple(map(int,struc))
#create report
rep_location=(element+'/'+'surf'+'/'+struc+'_results_report.txt')
if world.rank==0 and os.path.isfile(rep_location):
os.remove(rep_location)
# #check the optimized bulk object
# if not os.path.isfile('final_database/bulk.db'):
# with paropen(rep_location,'a') as f:
# parprint('ERROR: bulk database has not been established!',file=f)
# parprint('Surface Convergence Computation Suspended!',file=f)
# f.close()
# sys.exit()
# else:
# db_bulk=connect('final_database'+'/'+'bulk.db')
# try:
# opt_bulk=db_bulk.get_atoms(name=element)
# except:
# with paropen(rep_location,'a') as f:
# parprint('ERROR: No Optimized Bulk Object Found!',file=f)
# parprint('Surface Convergence Computation Suspended!',file=f)
# f.close()
# sys.exit()
db_bulk=connect('final_database'+'/'+'bulk.db')
opt_bulk=db_bulk.get_atoms(name=element)
calc_dict=gpaw_calc.__dict__['parameters']
#get the optimized bulk object and converged parameters
pymatgen_bulk=AseAtomsAdaptor.get_structure(opt_bulk)
if calc_dict['spinpol']:
magmom=np.mean(opt_bulk.get_magnetic_moments()) ## do i need to make this exposed as well?
k_density=db_bulk.get(name=element).k_density
kpts=[int(i) for i in (db_bulk.get(name=element).kpts).split(',')]
#print out parameters
with paropen(rep_location,'a') as f:
parprint('Initial Parameters:',file=f)
parprint('\t'+'Materials: '+element,file=f)
parprint('\t'+'Miller Index: '+str(m_ind),file=f)
parprint('\t'+'Actual Layer: '+str(init_layer),file=f)
parprint('\t'+'Vacuum length: '+str(vac)+'Ang',file=f)
parprint('\t'+'Fixed layer: '+str(fix_layer),file=f)
parprint('\t'+'xc: '+calc_dict['xc'],file=f)
parprint('\t'+'h: '+str(calc_dict['h']),file=f)
parprint('\t'+'k_density: '+str(k_density),file=f)
parprint('\t'+'kpts: '+str(kpts),file=f)
parprint('\t'+'sw: '+str(calc_dict['occupations']),file=f)
parprint('\t'+'spin polarized: '+str(calc_dict['spinpol']),file=f)
if calc_dict['spinpol']:
parprint('\t'+'Init magmom: '+str(magmom),file=f)
parprint('\t'+'rela_tol: '+str(rela_tol)+'%',file=f)
f.close()
#optimize the layers
##connect to the layer convergence database
db_layer=connect(element+'/'+'surf'+'/'+struc+'/'+'layer_converge.db')
diff_primary=100
diff_second=100
iters=len(db_layer)
act_layer_ls=[]
sim_layer_ls=[]
sim_layer=1
if iters>=2:
for i in range(2,iters):
fst=db_layer.get_atoms(id=i-1)
snd=db_layer.get_atoms(id=i)
trd=db_layer.get_atoms(id=i+1)
diff_primary=max(surf_e_calc(fst,snd,opt_bulk.get_potential_energy(),len(opt_bulk.get_tags())),surf_e_calc(fst,trd,opt_bulk.get_potential_energy(),len(opt_bulk.get_tags())))
diff_second=surf_e_calc(snd,trd,opt_bulk.get_potential_energy(),len(opt_bulk.get_tags()))
if temp_print==True:
temp_output_printer(db_layer,i,'act_layer',opt_bulk.get_potential_energy(),len(opt_bulk.get_tags()),rep_location)
for j in range(len(db_layer)):
act_layer_ls.append(db_layer.get(j+1).act_layer)
sim_layer_ls.append(db_layer.get(j+1).sim_layer)
sim_layer=sim_layer_ls[-1]+np.diff(sim_layer_ls)[0]
init_layer=act_layer_ls[-1]+np.diff(act_layer_ls)[0]
if generator=='pymatgen':
slabgen = SlabGenerator(pymatgen_bulk, m_ind, sim_layer, sim_layer*2, center_slab=True, lll_reduce=True, in_unit_planes=True)
slabs=slabgen.get_slabs() #this only take the first structure
slabs_symmetric=[slab for slab in slabs if slab.is_symmetric()]
slab=AseAtomsAdaptor.get_atoms(slabs_symmetric[0]) #convert to ase structure
actual_layer=len(np.unique(np.round(slab.positions[:,2],decimals=4)))
elif generator=='ase':
slab=surface(opt_bulk,m_ind,layers=sim_layer,vacuum=vac)
actual_layer=len(np.unique(np.round(slab.positions[:,2],decimals=4)))
while (diff_primary>rela_tol or diff_second>rela_tol) and iters <= 5:
if generator=='import':
slab=read(element+'/raw_surf/'+str(m_ind)+'_'+str(init_layer)+'.cif')
actual_layer=len(np.unique(np.round(slab.positions[:,2],decimals=4)))
while actual_layer != init_layer:
sim_layer+=1
if generator=='pymatgen':
slabgen = SlabGenerator(pymatgen_bulk, m_ind, sim_layer, sim_layer*2, center_slab=True, lll_reduce=True, in_unit_planes=True)
slabs=slabgen.get_slabs() #this only take the first structure
slabs_symmetric=[slab for slab in slabs if slab.is_symmetric()]
slab=AseAtomsAdaptor.get_atoms(slabs_symmetric[0]) #convert to ase structure
elif generator=='ase':
slab=surface(opt_bulk,m_ind,layers=sim_layer,vacuum=vac)
else:
with paropen(rep_location,'a') as f:
parprint('ERROR: The number of layers of the imported surface is not correct!',file=f)
parprint('\t'+'Actual Layer: '+str(actual_layer),file=f)
parprint('\t'+'Desired Layer: '+str(init_layer),file=f)
parprint('Computation Suspended!',file=f)
sys.exit()
actual_layer=len(np.unique(np.round(slab.positions[:,2],decimals=4)))
if actual_layer > init_layer:
with paropen(rep_location,'a') as f:
parprint('ERROR: Actual number of layers is greater than the desired number of layers.',file=f)
parprint('\t'+'Actual Layer: '+str(actual_layer),file=f)
parprint('\t'+'Desired Layer: '+str(init_layer),file=f)
parprint('Computation Suspended!',file=f)
sys.exit()
current_vac=slab.cell.lengths()[-1]-max(slab.positions[:,2])
if current_vac != vac:
slab.center(vacuum=vac,axis=2)
if calc_dict['spinpol']:
slab.set_initial_magnetic_moments(magmom*np.ones(len(slab)))
if fix_option =='bottom':
fix_mask=np.round(slab.positions[:,2],decimals=4) <= np.unique(np.round(slab.positions[:,2],decimals=4))[fix_layer-1]
elif fix_option == 'center':
z_direction=np.round(slab.positions[:,2],decimals=4)
z_direction_unique=np.unique(np.round(slab.positions[:,2],decimals=4))
if len(z_direction_unique)%2 == 0:
center_pos_up=len(z_direction_unique)//2
center_pos_down=len(z_direction_unique)//2-1
start_pos=int(center_pos_down-(fix_layer-2)/2)
end_pos=int(center_pos_up+(fix_layer-2)/2)
fix_mask_up=z_direction >= z_direction_unique[start_pos]
fix_mask_down=z_direction <= z_direction_unique[end_pos]
fix_mask=fix_mask_up & fix_mask_down
elif len(z_direction_unique)%2 == 1:
center_pos=int(np.median(np.arange(len(z_direction_unique))))
start_pos=int(center_pos-(fix_layer-1)/2)
end_pos=int(center_pos+(fix_layer-1)/2)
fix_mask_up=z_direction >= z_direction_unique[start_pos]
fix_mask_down=z_direction <= z_direction_unique[end_pos]
fix_mask=fix_mask_up & fix_mask_down
slab.set_constraint(FixAtoms(mask=fix_mask))
if pbc_all:
slab.set_pbc([1,1,1])
else:
slab.set_pbc([1,1,0])
# ortho=slab.get_cell_lengths_and_angles()[3:5]
# if np.all(90==ortho):
# slab.set_pbc([1,1,0])
# else:
# slab.set_pbc([1,1,1])
kpts=kdens2mp(slab,kptdensity=k_density,even=True)
gpaw_calc.__dict__['parameters']['kpts']=kpts
calc_dict=gpaw_calc.__dict__['parameters']
slab_length=slab.cell.lengths()
slab_long_short_ratio=max(slab_length)/min(slab_length)
if slab_long_short_ratio > 15:
with paropen(rep_location,'a') as f:
parprint('WARNING: slab long-short side ratio is'+str(slab_long_short_ratio),file=f)
parprint('Consider change the mixer setting, if not converged.',file=f)
f.close()
slab.set_calculator(gpaw_calc)
location=element+'/'+'surf'+'/'+struc+'/'+str(actual_layer)+'x1x1'
opt.surf_relax(slab, location, fmax=solver_fmax, maxstep=solver_step, replay_traj=None)
db_layer.write(slab,sim_layer=sim_layer,act_layer=actual_layer) #sim layer is different from the actual layers
if iters>=2:
fst=db_layer.get_atoms(id=iters-1)
snd=db_layer.get_atoms(id=iters)
trd=db_layer.get_atoms(id=iters+1)
diff_primary=max(surf_e_calc(fst,snd,opt_bulk.get_potential_energy(),len(opt_bulk.get_tags())),surf_e_calc(fst,trd,opt_bulk.get_potential_energy(),len(opt_bulk.get_tags())))
diff_second=surf_e_calc(snd,trd,opt_bulk.get_potential_energy(),len(opt_bulk.get_tags()))
if temp_print==True:
temp_output_printer(db_layer,iters,'act_layer',opt_bulk.get_potential_energy(),len(opt_bulk.get_tags()),rep_location)
act_layer_ls.append(actual_layer)
sim_layer_ls.append(sim_layer)
iters+=1
init_layer+=interval #change to one because the unit cell will generate 2 surfaces per layer
if iters>=5:
if diff_primary>rela_tol or diff_second>rela_tol:
# Fiorentini and Methfessel relation (linear fit)
with paropen(rep_location,'a') as f:
parprint('Regular surface convergence failed.',file=f)
parprint('Entering Fiorentini and Methfessel relation (linear fit) convergence test.',file=f)
energy_slabs=[db_layer.get_atoms(i+1).get_potential_energy() for i in range(len(db_layer))]
num_atoms=[db_layer.get(i+1).natoms for i in range(len(db_layer))]
energy_bulk_fit=np.round(np.polyfit(num_atoms,energy_slabs,1)[0],decimals=5)
fit_iters=2
while (diff_primary>rela_tol or diff_second>rela_tol) and fit_iters <= 5:
fst=db_layer.get_atoms(id=fit_iters-1)
snd=db_layer.get_atoms(id=fit_iters)
trd=db_layer.get_atoms(id=fit_iters+1)
diff_primary=max(surf_e_calc(fst,snd,energy_bulk_fit,1),surf_e_calc(fst,trd,energy_bulk_fit,2))
diff_second=surf_e_calc(snd,trd,energy_bulk_fit,1)
if temp_print==True:
temp_output_printer(db_layer,fit_iters,'act_layer',energy_bulk_fit,1,rep_location)
fit_iters+=1
if diff_primary>rela_tol or diff_second>rela_tol:
with paropen(rep_location,'a') as f:
parprint("WARNING: Max Surface iterations reached! System may not be converged.",file=f)
parprint("Computation Suspended!",file=f)
f.close()
sys.exit()
act_layer=act_layer_ls[fit_iters-3]
sim_layer=sim_layer_ls[fit_iters-3]
final_slab=db_layer.get_atoms(fit_iters-2)
act_layer=act_layer_ls[-3]
sim_layer=sim_layer_ls[-3]
final_slab=db_layer.get_atoms(len(db_layer)-2)
vac=np.round(final_slab.cell.lengths()[-1]-max(final_slab.positions[:,2]),decimals=4)
if calc_dict['spinpol']:
final_mag=final_slab.get_magnetic_moments()
db_final=connect('final_database'+'/'+'surf.db')
id=db_final.reserve(name=element+'('+struc+')')
if id is None:
id=db_final.get(name=element+'('+struc+')').id
db_final.update(id=id,atoms=final_slab,name=element+'('+struc+')',
act_layer=act_layer,sim_layer=sim_layer,vac=vac,
h=calc_dict['h'],sw=calc_dict['occupations']['width'],
xc=calc_dict['xc'],spin=calc_dict['spinpol'],
k_density=k_density,kpts=str(','.join(map(str, calc_dict['kpts']))))
else:
db_final.write(final_slab,id=id,name=element+'('+struc+')',
act_layer=act_layer,sim_layer=sim_layer,vac=vac,
h=calc_dict['h'],sw=calc_dict['occupations']['width'],
xc=calc_dict['xc'],spin=calc_dict['spinpol'],
k_density=k_density,kpts=str(','.join(map(str, calc_dict['kpts']))))
with paropen(rep_location,'a') as f:
parprint('Final Parameters:',file=f)
parprint('\t'+'Simulated Layer: '+str(sim_layer),file=f)
parprint('\t'+'Actual Layer: '+str(act_layer),file=f)
parprint('\t'+'Vacuum length: '+str(vac)+'Ang',file=f)
parprint('\t'+'Fixed layer: '+str(fix_layer),file=f)
if calc_dict['spinpol']:
parprint('\t'+'Final magmom: '+str(final_mag),file=f)
f.close()
def surf_e_calc(pre,post,bulk_e,bulk_num):
#bulk_num=len(bulk.get_tags())
#bulk_pot_e=bulk.get_potential_energy()
opt_bulk_e=bulk_e/bulk_num
pre_area=2*(pre.cell[0][0]*pre.cell[1][1])
post_area=2*(post.cell[0][0]*post.cell[1][1])
pre_e=pre.get_potential_energy()
post_e=post.get_potential_energy()
pre_num=len(pre.get_tags())
post_num=len(post.get_tags())
pre_surf_e=(1/pre_area)*(pre_e-pre_num*opt_bulk_e)
post_surf_e=(1/post_area)*(post_e-post_num*opt_bulk_e)
diff_surf_e=100*(abs((post_surf_e-pre_surf_e)/pre_surf_e))
return diff_surf_e
def temp_output_printer(db,iters,key,bulk_e,bulk_num,location):
fst_r=db.get(iters-1)
snd_r=db.get(iters)
trd_r=db.get(iters+1)
with paropen(location,'a') as f:
parprint('Optimizing parameter: '+key,file=f)
parprint('\t'+'1st: '+str(fst_r[key])+' 2nd: '+str(snd_r[key])+' 3rd: '+str(trd_r[key])+'\n',file=f)
parprint('\t'+'(2nd-1st)/1st: '+str(np.round(surf_e_calc(db.get_atoms(iters),db.get_atoms(iters-1),bulk_e,bulk_num),decimals=5))+'%',file=f)
parprint('\t'+'(3nd-1st)/1st: '+str(np.round(surf_e_calc(db.get_atoms(iters+1),db.get_atoms(iters-1),bulk_e,bulk_num),decimals=5))+'%',file=f)
parprint('\t'+'(3nd-2st)/2nd: '+str(np.round(surf_e_calc(db.get_atoms(iters+1),db.get_atoms(iters),bulk_e,bulk_num),decimals=5))+'%',file=f)
f.close() |
kianpu34593/base4gpaw | BASIC/converge_calc.py | <reponame>kianpu34593/base4gpaw
import os
from typing import Type
from ase.parallel import paropen, parprint, world
from ase.db import connect
from ase.io import read
from glob import glob
import numpy as np
from gpaw import restart
import BASIC.optimizer as opt
import sys
from ase.calculators.calculator import kptdensity2monkhorstpack as kdens2mp
from ase.constraints import FixAtoms
from BASIC.utils import detect_cluster
def bulk_builder(element):
location='orig_cif_data'+'/'+element+'.cif'
atoms=read(location)
return atoms
# def detect_cluster(slab,tol=0.3):
# n=len(slab)
# dist_matrix=np.zeros((n, n))
# slab_c=np.sort(slab.get_positions()[:,2])
# for i, j in itertools.combinations(list(range(n)), 2):
# if i != j:
# cdist = np.abs(slab_c[i] - slab_c[j])
# dist_matrix[i, j] = cdist
# dist_matrix[j, i] = cdist
# condensed_m = squareform(dist_matrix)
# z = linkage(condensed_m)
# clusters = fcluster(z, tol, criterion="distance")
# return slab_c,list(clusters)
def pbc_checker(slab):
anlges_arg=[angle != 90.0000 for angle in np.round(slab.cell.angles(),decimals=4)[:2]]
if np.any(anlges_arg):
slab.pbc=[1,1,1]
else:
slab.pbc=[1,1,0]
class surf_calc_conv:
def __init__(self,
element: str,
miller_index: str,
shift: float,
order: int,
gpaw_calc,
rela_tol: float=0.015,
restart_calc: bool=False,
fix_layer: int=2,
vacuum: int=10,
solver_fmax: float=0.01,
solver_max_step: float=0.05,
surf_energy_calc_mode: str='regular',
fix_option: str='bottom'):
#intialize
##globalize variables
self.element=element
self.solver_max_step=solver_max_step
self.solver_fmax=solver_fmax
self.surf_energy_calc_mode=surf_energy_calc_mode
self.vacuum=vacuum
self.fix_option=fix_option
self.fix_layer=fix_layer
self.miller_index_tight=''.join(miller_index.split(','))
self.miller_index_loose=tuple(map(int,miller_index.split(','))) #tuple
self.shift=shift
self.order=order
self.gpaw_calc=gpaw_calc
self.final_slab_name=self.element+'_'+self.miller_index_tight+'_'+str(self.shift)+'_'+str(order)
self.raw_slab_dir='results/'+element+'/'+'raw_surf/'
self.target_dir='results/'+element+'/'+'surf/'
self.target_sub_dir=self.target_dir+self.miller_index_tight+'_'+str(self.shift)+'_'+str(order)+'/'
self.report_location=(self.target_dir+self.miller_index_tight+'_'+str(self.shift)+'_'+str(order)+'_results_report.txt')
self.rela_tol = rela_tol
##connect to optimize bulk database to get gpw_dir and bulk potential_energy
db_bulk=connect('final_database/bulk.db')
kdensity=db_bulk.get(name=self.element).kdensity
self.bulk_potential_energy=(db_bulk.get_atoms(name=self.element).get_potential_energy())/len(db_bulk.get_atoms(name=element))
##read the smallest slab to get the kpoints
self.ascend_all_cif_files_full_path=self.sort_raw_slab()
raw_slab_smallest=read(self.ascend_all_cif_files_full_path[0])
raw_slab_smallest.pbc=[1,1,0]
kpts=kdens2mp(raw_slab_smallest,kptdensity=kdensity,even=True)
self.gpaw_calc.__dict__['parameters']['kpts']=kpts
self.calc_dict=self.gpaw_calc.__dict__['parameters']
##generate report
if self.calc_dict['spinpol']:
self.init_magmom=0#np.mean(db_bulk.get_atoms(name=element).get_magnetic_moments())
self.initialize_report()
# convergence test
## number of layers
### restart
if restart_calc and len(glob(self.target_sub_dir+'*/slab.gpw'))>=1:
ascend_layer_ls,ascend_gpw_files_dir=self.gather_gpw_file()
diff_primary=100
diff_second=100
if len(ascend_gpw_files_dir) > 2:
for i in range((len(ascend_layer_ls)-3)+1):
self.convergence_update(i,ascend_gpw_files_dir)
diff_primary=max(self.surf_energies_diff_arr[0],self.surf_energies_diff_arr[2])
diff_second=self.surf_energies_diff_arr[1]
else:
#os.remove(self.target_dir+self.miller_index_tight+'_'+str(self.shift)+'/'+)
ascend_layer_ls=[]
diff_primary=100
diff_second=100
iters=len(ascend_layer_ls)
self.convergence_loop(iters,diff_primary,diff_second)
#finalize
ascend_gpw_files_dir=self.gather_gpw_file()[1]
## calculate the surface energy
if self.surf_energy_calc_mode == 'regular':
final_atoms,self.gpaw_calc=restart(ascend_gpw_files_dir[-3])
slab_energy=[final_atoms.get_potential_energy()]
surface_area=[2*final_atoms.cell[0][0]*final_atoms.cell[1][1]]
num_of_atoms=[len(final_atoms)]
surf_energy=np.round(self.surface_energy_calculator(np.array(slab_energy),np.array(surface_area),np.array(num_of_atoms))[0],decimals=4)
self.calc_dict=self.gpaw_calc.__dict__['parameters']
elif self.surf_energy_calc_mode == 'linear-fit':
slab_energy_lst=[]
surface_area_total_lst=[]
num_of_atoms_lst=[]
for gpw_file_dir in ascend_gpw_files_dir[-3:]:
interm_atoms=restart(gpw_file_dir)[0]
slab_energy_lst.append(interm_atoms.get_potential_energy())
surface_area_total_lst.append(2*interm_atoms.cell[0][0]*interm_atoms.cell[1][1])
num_of_atoms_lst.append(len(interm_atoms))
surf_energy=np.round(self.surface_energy_calculator(np.array(slab_energy_lst),np.array(surface_area_total_lst),np.array(num_of_atoms_lst))[0],decimals=4)
final_atoms,self.gpaw_calc=restart(ascend_gpw_files_dir[-3])
self.calc_dict=self.gpaw_calc.__dict__['parameters']
else:
raise RuntimeError(self.surf_energy_calc_mode+'mode not avilable. Available modes are regular, linear-fit.')
##save to database
db_slab_interm=connect(self.target_dir+'all_miller_indices_all_shift'+'.db')
id=db_slab_interm.reserve(name=self.final_slab_name)
if id is None:
id=db_slab_interm.get(name=self.final_slab_name).id
db_slab_interm.update(id=id,atoms=final_atoms,name=self.final_slab_name,
surf_energy=surf_energy,
kpts=str(','.join(map(str, self.calc_dict['kpts']))))
else:
db_slab_interm.write(final_atoms,id=id,name=self.final_slab_name,
surf_energy=surf_energy,
kpts=str(','.join(map(str, self.calc_dict['kpts']))))
f = paropen(self.report_location,'a')
parprint('Surface energy calculation complete.', file=f)
f.close()
def convergence_loop(self,iters,diff_p,diff_s):
while (diff_p>self.rela_tol or diff_s>self.rela_tol) and iters <= 6:
layer=self.ascend_all_cif_files_full_path[iters].split('/')[-1].split('.')[0]
location=self.target_sub_dir+layer+'x1x1'
if os.path.isfile(location+'/'+'slab_interm.gpw'):
slab, gpaw_calc = restart(location+'/'+'slab_interm.gpw')
else:
slab=read(self.ascend_all_cif_files_full_path[iters])
pbc_checker(slab)
slab.center(vacuum=self.vacuum,axis=2)
if self.calc_dict['spinpol']:
slab.set_initial_magnetic_moments(self.init_magmom*np.ones(len(slab)))
slab_c_coord,cluster=detect_cluster(slab)
if self.fix_option == 'bottom':
unique_cluster_index=sorted(set(cluster), key=cluster.index)[self.fix_layer-1]
max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
fix_mask=slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
else:
raise RuntimeError('Only bottom fix option available now.')
slab.set_constraint(FixAtoms(mask=fix_mask))
slab.set_calculator(self.gpaw_calc)
opt.relax(slab,location,fmax=self.solver_fmax,maxstep=self.solver_max_step)
ascend_layer_ls,ascend_gpw_files_dir=self.gather_gpw_file()
iters=len(ascend_layer_ls)
if iters>2:
iter=iters-3
self.convergence_update(iter,ascend_gpw_files_dir)
diff_p=max(self.surf_energies_diff_arr[0],self.surf_energies_diff_arr[2])
diff_s=self.surf_energies_diff_arr[1]
self.check_convergence(diff_p,diff_s,iters)
def check_convergence(self,diff_p,diff_s,iters):
if iters>=6:
if diff_p>self.rela_tol or diff_s>self.rela_tol:
f=paropen(self.report_location,'a')
parprint("WARNING: Max iterations reached! layer convergence test failed.",file=f)
parprint("Computation Suspended!",file=f)
parprint(' ',file=f)
f.close()
sys.exit()
else:
f=paropen(self.report_location,'a')
parprint("layer convergence test success!",file=f)
parprint("="*44,file=f)
parprint('\n',file=f)
f.close()
def convergence_update(self,iter,gpw_files_dir):
slab_energy_lst=[]
num_of_atoms_lst=[]
surface_area_total_lst=[]
pymatgen_layer_ls=[]
for i in range(iter,iter+3,1):
atoms=restart(gpw_files_dir[i])[0]
slab_energy_lst.append(atoms.get_potential_energy())
surface_area_total_lst.append(2*atoms.cell[0][0]*atoms.cell[1][1])
num_of_atoms_lst.append(len(atoms))
pymatgen_layer_ls.append(int(gpw_files_dir[i].split('/')[-2].split('x')[0]))
surf_energy_lst=self.surface_energy_calculator(np.array(slab_energy_lst),np.array(surface_area_total_lst),np.array(num_of_atoms_lst))
surf_energy_arr=np.array(surf_energy_lst)
surf_energy_arr_rep= np.array((surf_energy_lst+surf_energy_lst)[1:4])
self.surf_energies_diff_arr=np.round(np.abs(surf_energy_arr-surf_energy_arr_rep),decimals=4)
self.convergence_update_report(pymatgen_layer_ls)
def convergence_update_report(self,layer_ls):
f = paropen(self.report_location,'a')
parprint('Optimizing parameter: '+'layers',file=f)
param_val_str='1st: '+str(layer_ls[0])+' 2nd: '+str(layer_ls[1])+' 3rd: '+str(layer_ls[2])
parprint('\t'+param_val_str,file=f)
divider_str='-'
parprint('\t'+divider_str*len(param_val_str),file=f)
substrat_str='| '+'2nd-1st'+' | '+'3rd-2nd'+' | '+'3rd-1st'+' |'
parprint('\t'+substrat_str,file=f)
energies_str='\t'+'| '
for i in range(3):
energies_str+=str(self.surf_energies_diff_arr[i])+' '+'|'+' '
energies_str+='eV/Ang^2'
parprint(energies_str,file=f)
parprint(' ',file=f)
f.close()
def surface_energy_calculator(self,slab_energy,surface_area_total,num_of_atoms):
if self.surf_energy_calc_mode=='regular':
surf_energy_lst=(1/surface_area_total)*(slab_energy-num_of_atoms*self.bulk_potential_energy)
# for slab_energy,surface_area,num_of_atoms in zip(slab_energies,surface_area_total_lst,num_of_atoms_lst):
# surf_energy=(1/surface_area)*(slab_energy-num_of_atoms*self.bulk_potential_energy)
# surf_energy_lst.append(surf_energy)
elif self.surf_energy_calc_mode=='linear-fit': ## TO-DO: need to think about how to fit to all slab energies, right now this is localize fitting
assert type(num_of_atoms)==type(np.array([1,2,3])), 'In linear-fit mode, the type of num_of_atoms variable must be numpy.ndarray'
assert type(slab_energy)==type(np.array([1,2,3])), 'In linear-fit mode, the type of slab_energy variable must be numpy.ndarray'
assert len(num_of_atoms)==3, 'In linear-fit mode, the size of num_of_atoms variable must be 3'
assert len(slab_energy)==3, 'In linear-fit mode, the size of slab_energy variable must be 3'
self.fitted_bulk_potential_energy=np.round(np.polyfit(num_of_atoms,slab_energy,1)[0],decimals=5)
surf_energy_lst=(1/surface_area_total)*(slab_energy-num_of_atoms*self.fitted_bulk_potential_energy)
# for slab_energy,surface_area,num_of_atoms in zip(slab_energies,surface_area_total_lst,num_of_atoms_lst):
# surf_energy=(1/surface_area)*(slab_energy-num_of_atoms*self.fitted_bulk_potential_energy)
# surf_energy_lst.append(surf_energy)
else:
raise RuntimeError(self.surf_energy_calc_mode+'mode not avilable. Available modes are regular, linear-fit.')
return list(surf_energy_lst)
def gather_gpw_file(self):
#need to make sure there are no gpw files from previous run
gpw_files_dir=glob(self.target_sub_dir+'*/slab.gpw')
gpw_slab_size=[gpw_file.split('/')[-2] for gpw_file in gpw_files_dir]
slab_layers=[int(i.split('x')[0]) for i in gpw_slab_size]
ascend_order=np.argsort(slab_layers)
ascend_gpw_files_dir=[gpw_files_dir[i] for i in ascend_order]
ascend_param_ls=np.sort(slab_layers)
return ascend_param_ls,ascend_gpw_files_dir
def sort_raw_slab(self):
cif_file_dir=self.raw_slab_dir+str(self.miller_index_tight)+'/'+str(self.shift)+'/'+str(self.order)
all_cif_files_full_path=glob(cif_file_dir+'/'+'*'+'.cif')
cif_files_name=[cif_file.split('/')[-1] for cif_file in all_cif_files_full_path]
layers=[int(name.split('.')[0]) for name in cif_files_name]
#layers=[int(name.split('-')[0]) for name in layers_and_shift]
ascend_order=np.argsort(layers)
ascend_all_cif_files_full_path=[all_cif_files_full_path[i] for i in ascend_order]
return ascend_all_cif_files_full_path
def initialize_report(self):
if world.rank==0 and os.path.isfile(self.report_location):
os.remove(self.report_location)
f = paropen(self.report_location,'a')
parprint('Initial Parameters:', file=f)
parprint('\t'+'xc: '+self.calc_dict['xc'],file=f)
parprint('\t'+'h: '+str(self.calc_dict['h']),file=f)
parprint('\t'+'kpts: '+str(self.calc_dict['kpts']),file=f)
parprint('\t'+'sw: '+str(self.calc_dict['occupations']),file=f)
parprint('\t'+'spin polarized: '+str(self.calc_dict['spinpol']),file=f)
if self.calc_dict['spinpol']:
parprint('\t'+'magmom: '+str(self.init_magmom),file=f)
parprint('\t'+'convergence tolerance: '+str(self.rela_tol)+'eV/Ang^2',file=f)
parprint('\t'+'surface energy calculation mode: '+str(self.surf_energy_calc_mode),file=f)
parprint('\t'+'fixed layers: '+str(self.fix_layer),file=f)
parprint('\t'+'fixed option: '+str(self.fix_option),file=f)
parprint(' \n',file=f)
f.close()
class bulk_calc_conv:
def __init__(self,element,gpaw_calc,rela_tol,init_magmom,solver_step,solver_fmax,restart_calc):
# generate report
self.target_dir='results/'+element+'/'+'bulk/'
self.report_location=(self.target_dir+'results_report.txt')
self.gpaw_calc=gpaw_calc
self.calc_dict=gpaw_calc.__dict__['parameters']
self.rela_tol = rela_tol
self.init_magmom = init_magmom
self.initialize_report()
self.element=element
self.solver_step=solver_step
self.solver_fmax=solver_fmax
# convergence test
## h size
param='h'
### restart
if restart_calc and len(glob(self.target_dir+'results_'+param+'/'+'*.gpw'))>0:
descend_param_ls,descend_gpw_files_dir=self.gather_gpw_file(param)
if len(descend_gpw_files_dir) < 3:
self.restart_report(param,descend_gpw_files_dir[-1])
diff_primary=100
diff_second=100
else:
for i in range((len(descend_param_ls)-3)+1):
self.convergence_update(param,i,descend_gpw_files_dir)
diff_primary=max(self.energies_diff_mat[0],self.energies_diff_mat[2])
diff_second=self.energies_diff_mat[1]
self.gpaw_calc.__dict__['parameters'][param]=np.round(descend_param_ls[-1]-0.02,decimals=2)
self.calc_dict=self.gpaw_calc.__dict__['parameters']
else:
descend_param_ls=[]
diff_primary=100
diff_second=100
### convergence loop
iters=len(descend_param_ls)
self.convergence_loop(param,iters,diff_primary,diff_second)
## kpts size
param='kdens'
### restart
if restart_calc and len(glob(self.target_dir+'results_'+param+'/'+'*.gpw'))>1:
descend_param_ls,descend_gpw_files_dir=self.gather_gpw_file(param)
if len(descend_gpw_files_dir) < 3:
self.restart_report(param,descend_gpw_files_dir[0])
diff_primary=100
diff_second=100
else:
for i in range((len(descend_param_ls)-3)+1):
self.convergence_update(param,i,descend_gpw_files_dir)
diff_primary=max(self.energies_diff_mat[0],self.energies_diff_mat[2])
diff_second=self.energies_diff_mat[1]
# atoms,calc=restart(descend_gpw_files_dir[0])
atoms=bulk_builder(self.element)
kpts=kdens2mp(atoms,kptdensity=descend_param_ls[0])
new_kpts=kpts.copy()
new_kdens=descend_param_ls[0].copy()
while np.mean(kpts)==np.mean(new_kpts):
new_kdens+=0.2
new_kpts=kdens2mp(atoms,kptdensity=np.round(new_kdens,decimals=1))
new_kdens_dict={'density':new_kdens,'even':True}
self.gpaw_calc.__dict__['parameters']['kpts']=new_kdens_dict
self.calc_dict=self.gpaw_calc.__dict__['parameters']
else:
### skip the first calculation
descend_gpw_files_dir=self.gather_gpw_file('h')[1]
atoms, calc = restart(descend_gpw_files_dir[-3])
self.gpaw_calc=calc
self.calc_dict=self.gpaw_calc.__dict__['parameters']
param_val=self.calc_dict['kpts']['density']
opt.optimize_bulk(atoms,
step=self.solver_step,fmax=self.solver_fmax,
location=self.target_dir+'results_'+param,
extname=param_val)
descend_param_ls=self.gather_gpw_file(param)[0]
diff_primary=100
diff_second=100
### convergence loop
iters=len(descend_param_ls)
self.convergence_loop(param,iters,diff_primary,diff_second)
#finalize
descend_gpw_files_dir=self.gather_gpw_file(param)[1]
final_atoms, calc = restart(descend_gpw_files_dir[2])
self.gpaw_calc=calc
self.calc_dict=self.gpaw_calc.__dict__['parameters']
if self.calc_dict['spinpol']:
self.final_magmom=final_atoms.get_magnetic_moments()
db_final=connect('final_database'+'/'+'bulk.db')
id=db_final.reserve(name=element)
if id is None:
id=db_final.get(name=element).id
db_final.update(id=id,atoms=final_atoms,name=element,
kdensity=self.calc_dict['kpts']['density'],
gpw_dir=descend_gpw_files_dir[2])
else:
db_final.write(final_atoms,id=id,name=element,
kdensity=self.calc_dict['kpts']['density'],
gpw_dir=descend_gpw_files_dir[2])
self.final_report()
def convergence_loop(self,param,iters,diff_p,diff_s):
while (diff_p>self.rela_tol or diff_s>self.rela_tol) and iters <= 6:
atoms=bulk_builder(self.element)
if self.calc_dict['spinpol']:
atoms.set_initial_magnetic_moments(self.init_magmom*np.ones(len(atoms)))
atoms.set_calculator(self.gpaw_calc)
if param == 'h':
param_val=self.calc_dict[param]
elif param == 'kdens':
param_val=self.calc_dict['kpts']['density']
opt.optimize_bulk(atoms,
step=self.solver_step,fmax=self.solver_fmax,
location=self.target_dir+'results_'+param,
extname=param_val)
#convergence update
descend_param_ls,descend_gpw_files_dir=self.gather_gpw_file(param)
iters=len(descend_param_ls)
if iters>2:
iter=iters-3
self.convergence_update(param,iter,descend_gpw_files_dir)
diff_p=max(self.energies_diff_mat[0],self.energies_diff_mat[2])
diff_s=self.energies_diff_mat[1]
#update param
if (diff_p>self.rela_tol or diff_s>self.rela_tol):
if param == 'h':
self.gpaw_calc.__dict__['parameters'][param]=np.round(param_val-0.02,decimals=2)
elif param == 'kdens':
atoms=bulk_builder(self.element)
kpts=kdens2mp(atoms,kptdensity=descend_param_ls[0])
new_kpts=kpts.copy()
new_kdens=descend_param_ls[0].copy()
while np.mean(kpts)==np.mean(new_kpts):
new_kdens+=0.2
new_kdens=np.round(new_kdens,decimals=1)
new_kpts=kdens2mp(atoms,kptdensity=new_kdens) #even=True
new_kdens_dict={'density':new_kdens,'even':True}
self.gpaw_calc.__dict__['parameters']['kpts']=new_kdens_dict
else:
continue
self.calc_dict=self.gpaw_calc.__dict__['parameters']
#check iteration
self.check_convergence(diff_p,diff_s,iters,param)
def check_convergence(self,diff_p,diff_s,iters,param):
if iters>=6:
if diff_p>self.rela_tol or diff_s>self.rela_tol:
f=paropen(self.report_location,'a')
parprint("WARNING: Max iterations reached! "+param+" convergence test failed.",file=f)
parprint("Computation Suspended!",file=f)
parprint(' ',file=f)
f.close()
sys.exit()
else:
f=paropen(self.report_location,'a')
parprint(param+" convergence test success!",file=f)
parprint("="*44,file=f)
parprint('\n',file=f)
f.close()
def gather_gpw_file(self,param):
gpw_files_dir=glob(self.target_dir+'results_'+param+'/'+'*.gpw')
gpw_files_name=[name.split('/')[-1] for name in gpw_files_dir]
param_ls=[float(i.split('-')[-1][:-4]) for i in gpw_files_name]
descend_order=np.argsort(param_ls)[::-1]
descend_gpw_files_dir=[gpw_files_dir[i] for i in descend_order]
descend_param_ls=np.sort(param_ls)[::-1]
return descend_param_ls,descend_gpw_files_dir
def convergence_update(self,param,iter,gpw_files_dir):
energies=[]
param_ls=[]
if param == 'kdens':
gpw_files_dir=gpw_files_dir[::-1]
for i in range(iter,iter+3,1):
atoms, calc = restart(gpw_files_dir[i])
if param == 'kdens':
kdens=calc.__dict__['parameters']['kpts']['density']
param_ls.append(kdens)
elif param == 'h':
param_ls.append(calc.__dict__['parameters'][param])
energies.append(atoms.get_potential_energy()/len(atoms)) #eV/atom
energies_arr = np.array(energies)
energies_arr_rep = np.array((energies+energies)[1:4])
self.energies_diff_mat=np.round(np.abs(energies_arr-energies_arr_rep),decimals=4)
self.convergence_update_report(param,param_ls)
def convergence_update_report(self,param,param_ls):
f = paropen(self.report_location,'a')
parprint('Optimizing parameter: '+param,file=f)
param_val_str='1st: '+str(param_ls[0])+' 2nd: '+str(param_ls[1])+' 3rd: '+str(param_ls[2])
parprint('\t'+param_val_str,file=f)
divider_str='-'
parprint('\t'+divider_str*len(param_val_str),file=f)
substrat_str='| '+'2nd-1st'+' | '+'3rd-2nd'+' | '+'3rd-1st'+' |'
parprint('\t'+substrat_str,file=f)
energies_str='\t'+'| '
for i in range(3):
energies_str+=str(self.energies_diff_mat[i])+' '+'|'+' '
energies_str+='eV/atom'
parprint(energies_str,file=f)
parprint(' ',file=f)
f.close()
def restart_report(self,param,updated_gpw):
calc = restart(updated_gpw)[1]
f = paropen(self.report_location,'a')
parprint('Restarting '+param+' convergence test...',file=f)
if param == 'kdens':
parprint('\t'+'Last computation:'+'\t'+param+'='+str(calc.__dict__['parameters']['kpts']),file=f)
elif param == 'h':
parprint('\t'+'Last computation:'+'\t'+param+'='+str(calc.__dict__['parameters']['h']),file=f)
parprint(' ',file=f)
f.close()
def initialize_report(self):
if world.rank==0 and os.path.isfile(self.report_location):
os.remove(self.report_location)
f = paropen(self.report_location,'a')
parprint('Initial Parameters:', file=f)
parprint('\t'+'xc: '+self.calc_dict['xc'],file=f)
parprint('\t'+'h: '+str(self.calc_dict['h']),file=f)
parprint('\t'+'kpts: '+str(self.calc_dict['kpts']),file=f)
parprint('\t'+'sw: '+str(self.calc_dict['occupations']),file=f)
parprint('\t'+'spin polarized: '+str(self.calc_dict['spinpol']),file=f)
if self.calc_dict['spinpol']:
parprint('\t'+'magmom: '+str(self.init_magmom),file=f)
parprint('\t'+'convergence tolerance: '+str(self.rela_tol)+'eV/atom',file=f)
parprint(' \n',file=f)
f.close()
def final_report(self):
f = paropen(self.report_location,'a')
parprint('Final Parameters:', file=f)
parprint('\t'+'xc: '+self.calc_dict['xc'],file=f)
parprint('\t'+'h: '+str(self.calc_dict['h']),file=f)
parprint('\t'+'kpts: '+str(self.calc_dict['kpts']),file=f)
parprint('\t'+'sw: '+str(self.calc_dict['occupations']),file=f)
parprint('\t'+'spin polarized: '+str(self.calc_dict['spinpol']),file=f)
if self.calc_dict['spinpol']:
parprint('\t'+'magmom: '+str(self.final_magmom),file=f)
parprint(' ',file=f)
f.close()
|
kianpu34593/base4gpaw | BASIC/non_converge_calc.py | from copy import Error
import os
from typing import Type
from ase.parallel import paropen, parprint, world
from ase.db import connect
from ase.io import read
from glob import glob
import numpy as np
from gpaw import restart
import BASIC.optimizer as opt
import sys
from ase.constraints import FixAtoms,FixedLine
import pandas as pd
from BASIC.utils import detect_cluster
def pbc_checker(slab):
anlges_arg=[angle != 90.0000 for angle in np.round(slab.cell.angles(),decimals=4)[:2]]
if np.any(anlges_arg):
slab.pbc=[1,1,1]
else:
slab.pbc=[1,1,0]
# def detect_cluster(slab,tol=0.1):
# n=len(slab)
# dist_matrix=np.zeros((n, n))
# slab_c=np.sort(slab.get_positions()[:,2])
# for i, j in itertools.combinations(list(range(n)), 2):
# if i != j:
# cdist = np.abs(slab_c[i] - slab_c[j])
# dist_matrix[i, j] = cdist
# dist_matrix[j, i] = cdist
# condensed_m = squareform(dist_matrix)
# z = linkage(condensed_m)
# clusters = fcluster(z, tol, criterion="distance")
# return slab_c,list(clusters)
def apply_magmom(opt_slab_magmom,ads_slab,adatom=1):
if adatom == 1:
magmom_ls=np.append(opt_slab_magmom,0)
elif adatom == 2:
magmom_ls=np.append(opt_slab_magmom,0)
magmom_ls=np.append(magmom_ls,0)
ads_slab.set_initial_magnetic_moments(magmom_ls)
return ads_slab
def get_clean_slab(element,
miller_index,
report_location,
target_dir,
size,
fix_layer,
solver_fmax,
solver_maxstep,
gpaw_calc):
f = paropen(report_location,'a')
parprint('Start clean slab calculation: ', file=f)
if size != '1x1':
clean_slab_gpw_path=target_dir+'/clean_slab/slab.gpw'
if os.path.isfile(clean_slab_gpw_path):
opt_slab, pre_calc = restart(clean_slab_gpw_path)
pre_kpts=list(pre_calc.__dict__['parameters']['kpts'])
set_kpts=list(gpaw_calc.__dict__['parameters']['kpts'])
if pre_kpts == set_kpts:
parprint('\t'+size+' clean slab is pre-calculated with kpts matched.',file=f)
else:
parprint('\t'+size+' clean slab pre-calculated has different kpts. Clean slab needs to re-calculate.', file=f)
parprint('\t'+'Calculating '+size+' clean slab...',file=f)
clean_slab=read(target_dir+'/clean_slab/input.traj')
opt_slab=clean_slab_calculator(clean_slab,fix_layer,gpaw_calc,target_dir,solver_fmax,solver_maxstep)
else:
parprint('\t'+size+' clean slab is not pre-calculated.',file=f)
parprint('\t'+'Calculating '+size+' clean slab...',file=f)
interm_gpw=target_dir+'/clean_slab/slab_interm.gpw'
if os.path.isfile(interm_gpw):
clean_slab, gpaw_calc=restart(interm_gpw)
else:
clean_slab=read(target_dir+'/clean_slab/input.traj')
opt_slab=clean_slab_calculator(clean_slab,fix_layer,gpaw_calc,target_dir,solver_fmax,solver_maxstep)
else:
parprint('\tslab size is 1x1. Clean slab calculation is skipped.', file=f)
opt_slab=connect('final_database'+'/'+'surf.db').get_atoms(simple_name=element+'_'+miller_index)
parprint(' ',file=f)
f.close()
return opt_slab.get_potential_energy(), opt_slab.get_magnetic_moments()
def clean_slab_calculator(clean_slab,
fix_layer,
gpaw_calc,
target_dir,
solver_fmax,
solver_maxstep,
fix_option='bottom'):
pbc_checker(clean_slab)
calc_dict=gpaw_calc.__dict__['parameters']
if calc_dict['spinpol']:
clean_slab.set_initial_magnetic_moments([0]*len(clean_slab))
slab_c_coord,cluster=detect_cluster(clean_slab)
if fix_option == 'bottom':
unique_cluster_index=sorted(set(cluster), key=cluster.index)[fix_layer-1]
max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
fix_mask=clean_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
fixed_atom_constrain=FixAtoms(mask=fix_mask)
clean_slab.set_constraint(fixed_atom_constrain)
clean_slab.set_calculator(gpaw_calc)
opt.relax(clean_slab,target_dir+'/clean_slab',fmax=solver_fmax,maxstep=solver_maxstep)
return clean_slab
def adsorption_energy_calculator(traj_file,
report_location,
opt_slab_energy,
adatom_pot_energy,
opt_slab_magmom,
gpaw_calc,
solver_fmax,
solver_maxstep,
calc_type,
fix_layer,
fix_option = 'bottom'):
interm_gpw='/'.join(traj_file.split('/')[:-1]+['slab_interm.gpw'])
if os.path.isfile(interm_gpw):
ads_slab, gpaw_calc=restart(interm_gpw)
else:
ads_slab=read(traj_file)
pbc_checker(ads_slab)
calc_dict=gpaw_calc.__dict__['parameters']
if calc_dict['spinpol']:
ads_slab=apply_magmom(opt_slab_magmom,ads_slab)
fixed_line_constrain=FixedLine(a=-1,direction=[0,0,1])
slab_c_coord,cluster=detect_cluster(ads_slab)
if fix_option == 'bottom':
unique_cluster_index=sorted(set(cluster), key=cluster.index)[fix_layer-1]
max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
fix_mask=ads_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
if calc_type == 'grid':
fixed_atom_constrain=FixAtoms(mask=fix_mask)
ads_slab.set_constraint([fixed_atom_constrain,fixed_line_constrain])
elif calc_type == 'normal' and fix_option == 'bottom':
fixed_atom_constrain=FixAtoms(mask=fix_mask)
ads_slab.set_constraint(fixed_atom_constrain)
ads_slab.set_calculator(gpaw_calc)
location='/'.join(traj_file.split('/')[:-1])
f=paropen(report_location,'a')
parprint('Calculating '+('/'.join(location.split('/')[-2:]))+' adsorption site...',file=f)
f.close()
opt.relax(ads_slab,location,fmax=solver_fmax,maxstep=solver_maxstep)
init_ads_site=traj_file.split('/')[-2]
E_slab_ads=ads_slab.get_potential_energy()
opt_slab_energy=opt_slab_energy
adsorption_energy=E_slab_ads-(opt_slab_energy+adatom_pot_energy)
final_ads_site=list(np.round(ads_slab.get_positions()[-1][:2],decimals=3))
final_ads_site_str='_'.join([str(i) for i in final_ads_site])
return init_ads_site, adsorption_energy, final_ads_site_str
def skip_ads_calculated(report_location,
all_gpw_files,
init_adsorbates_site_lst,
adsorption_energy_lst,
final_adsorbates_site_lst,
opt_slab_energy,
adatom_pot_energy):
f = paropen(report_location,'a')
parprint('Restarting...',file=f)
for gpw_file in all_gpw_files:
location='/'.join(gpw_file.split('/')[:-1])
parprint('Skipping '+('/'.join(location.split('/')[-2:]))+' adsorption site...',file=f)
atoms=restart(gpw_file)[0]
init_adsorbates_site_lst.append(gpw_file.split('/')[-2])
E_slab_ads=atoms.get_potential_energy()
adsorption_energy=E_slab_ads-(opt_slab_energy+adatom_pot_energy)
adsorption_energy_lst.append(adsorption_energy)
final_ads_site=list(np.round(atoms.get_positions()[-1][:2],decimals=3))
final_ads_site_str='_'.join([str(i) for i in final_ads_site])
final_adsorbates_site_lst.append(final_ads_site_str)
parprint(' ',file=f)
f.close()
return init_adsorbates_site_lst,adsorption_energy_lst,final_adsorbates_site_lst
def initialize_report(report_location,gpaw_calc):
calc_dict=gpaw_calc.__dict__['parameters']
if world.rank==0 and os.path.isfile(report_location):
os.remove(report_location)
f = paropen(report_location,'a')
parprint('Initial Parameters:', file=f)
parprint('\t'+'xc: '+calc_dict['xc'],file=f)
parprint('\t'+'h: '+str(calc_dict['h']),file=f)
parprint('\t'+'kpts: '+str(calc_dict['kpts']),file=f)
parprint('\t'+'sw: '+str(calc_dict['occupations']),file=f)
parprint('\t'+'spin polarized: '+str(calc_dict['spinpol']),file=f)
if calc_dict['spinpol']:
parprint('\t'+'magmom: initialize magnetic moment from slab calculation.',file=f)
parprint(' ',file=f)
f.close()
class ads_auto_select:
def __init__(self,
element,
miller_index_tight,
gpaw_calc,
ads,
adatom_pot_energy,
solver_fmax,
solver_max_step,
restart_calc,
size=(1,1), #xy size
fix_layer=2,
fix_option='bottom'):
#initalize variable
size_xy=str(size[0])+'x'+str(size[1])
target_dir='results/'+element+'/'+'ads/'+size_xy+'/'+miller_index_tight
report_location=target_dir+'_autocat_results_report.txt'
all_ads_file_loc=target_dir+'/'+'adsorbates/'+str(ads)+'/'
## TO-DO: need to figure out how to calculate adsorption energy for larger system
# self.gpaw_calc=gpaw_calc
# self.calc_dict=self.gpaw_calc.__dict__['parameters']
# self.ads=ads
# self.all_ads_file_loc=self.target_dir+'/'+'adsorbates/'+str(self.ads)+'/'
# self.adatom_pot_energy=adatom_pot_energy
##generate report
initialize_report(report_location, gpaw_calc)
##compute clean slab energy
opt_slab_energy, opt_slab_magmom=get_clean_slab(element, miller_index_tight,
report_location, target_dir,size_xy,
fix_layer,solver_fmax,solver_max_step,
gpaw_calc)
#opt_slab=self.get_clean_slab()
##start adsorption calculation
adsorption_energy_dict={}
init_adsorbates_site_lst=[]
final_adsorbates_site_lst=[]
adsorption_energy_lst=[]
all_bridge_traj_files=glob(all_ads_file_loc+'bridge/*/input.traj')
all_ontop_traj_files=glob(all_ads_file_loc+'ontop/*/input.traj')
all_hollow_traj_files=glob(all_ads_file_loc+'hollow/*/input.traj')
all_traj_files=all_bridge_traj_files+all_ontop_traj_files+all_hollow_traj_files
all_bridge_gpw_files=glob(all_ads_file_loc+'bridge/*/slab.gpw')
all_ontop_gpw_files=glob(all_ads_file_loc+'ontop/*/slab.gpw')
all_hollow_gpw_files=glob(all_ads_file_loc+'hollow/*/slab.gpw')
all_gpw_files=all_bridge_gpw_files+all_ontop_gpw_files+all_hollow_gpw_files
## restart
if restart_calc==True and len(all_gpw_files)>=1:
init_adsorbates_site_lst,adsorption_energy_lst,final_adsorbates_site_lst=skip_ads_calculated(report_location,
all_gpw_files,
init_adsorbates_site_lst,
adsorption_energy_lst,
final_adsorbates_site_lst,
opt_slab_energy,
adatom_pot_energy)
all_gpw_files_ads_site=['/'.join(i.split('/')[:-1]) for i in all_gpw_files]
all_traj_files=[i for i in all_traj_files if '/'.join(i.split('/')[:-1]) not in all_gpw_files_ads_site]
for traj_file in all_traj_files:
#init_adsobates_site, adsorption_energy, final_adsorbates_site=self.adsorption_energy_calculator(traj_file,opt_slab)
output_lst=adsorption_energy_calculator(traj_file,report_location,
opt_slab_energy,adatom_pot_energy,
opt_slab_magmom,gpaw_calc,
solver_fmax,solver_max_step,
calc_type='normal',
fix_layer=fix_layer,fix_option = fix_option,
)
init_adsorbates_site_lst.append(output_lst[0])
adsorption_energy_lst.append(output_lst[1])
final_adsorbates_site_lst.append(output_lst[2])
adsorption_energy_dict['init_sites[x_y](Ang)']=init_adsorbates_site_lst
adsorption_energy_dict['final_sites[x_y](Ang)']=final_adsorbates_site_lst
adsorption_energy_dict['adsorption_energy(eV)']=adsorption_energy_lst
ads_df=pd.DataFrame(adsorption_energy_dict)
# ads_df.set_index('init_adsorbates_sites[x_y](Ang)',inplace=True)
ads_df.sort_values(by=['adsorption_energy(eV)'],inplace=True)
pd.set_option("display.max_rows", None, "display.max_columns", None)
f=paropen(report_location,'a')
parprint(ads_df,file=f)
parprint('',file=f)
f.close()
min_adsorbates_site=ads_df.iloc[[0]]['init_sites[x_y](Ang)'].to_list()[0]
lowest_ads_energy_slab=read(glob(all_ads_file_loc+'*/'+min_adsorbates_site+'/slab.traj')[0])
#finalize
final_slab_simple_name=element+'_'+miller_index_tight
ads_db=connect('final_database/ads_'+size_xy+'.db')
id=ads_db.reserve(name=final_slab_simple_name)
if id is None:
id=ads_db.get(name=final_slab_simple_name).id
ads_db.update(id=id,atoms=lowest_ads_energy_slab,name=final_slab_simple_name,
ads_pot_e=float(ads_df.iloc[[0]]['adsorption_energy(eV)'].to_list()[0]))
else:
ads_db.write(lowest_ads_energy_slab,
id=id,
name=final_slab_simple_name,
ads_pot_e=float(ads_df.iloc[[0]]['adsorption_energy(eV)'].to_list()[0]))
f=paropen(report_location,'a')
parprint('Adsorption energy calculation complete.',file=f)
parprint('Selected ads site is: ',file=f)
parprint(min_adsorbates_site,file=f)
f.close()
# def get_clean_slab(self):
# f = paropen(self.report_location,'a')
# parprint('Start clean slab calculation: ', file=f)
# if self.size != '1x1':
# clean_slab_gpw_path=self.target_dir+'/clean_slab/slab.gpw'
# clean_slab=read(self.target_dir+'/clean_slab/input.traj')
# if os.path.isfile(clean_slab_gpw_path):
# opt_slab, pre_calc = restart(clean_slab_gpw_path)
# pre_kpts=pre_calc.__dict__['parameters']['kpts']
# set_kpts=self.calc_dict['kpts']
# if pre_kpts == set_kpts:
# parprint('\t'+self.size+' clean slab is pre-calculated with kpts matched.',file=f)
# else:
# parprint('\t'+self.size+' clean slab pre-calculated has different kpts. Clean slab needs to re-calculate.', file=f)
# parprint('\t'+'Calculating '+self.size+' clean slab...',file=f)
# opt_slab=self.clean_slab_calculator(clean_slab)
# else:
# parprint('\t'+self.size+' clean slab is not pre-calculated.',file=f)
# parprint('\t'+'Calculating '+self.size+' clean slab...',file=f)
# opt_slab=self.clean_slab_calculator(clean_slab)
# else:
# parprint('slab size is 1x1. Clean slab calculation is skipped.', file=f)
# opt_slab=connect('final_database'+'/'+'surf.db').get_atoms(simple_name=self.element+'_'+self.miller_index_tight)
# f.close()
# return opt_slab
# def clean_slab_calculator(self,clean_slab):
# pbc_checker(clean_slab)
# if self.calc_dict['spinpol']:
# clean_slab.set_initial_magnetic_moments([0]*len(clean_slab))
# slab_c_coord,cluster=detect_cluster(clean_slab)
# if self.fix_option == 'bottom':
# unique_cluster_index=sorted(set(cluster), key=cluster.index)[self.fix_layer-1]
# max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
# fix_mask=clean_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
# else:
# raise RuntimeError('Only bottom fix option available now.')
# fixed_atom_constrain=FixAtoms(mask=fix_mask)
# clean_slab.set_constraint(fixed_atom_constrain)
# clean_slab.set_calculator(self.gpaw_calc)
# opt.relax(clean_slab,self.target_dir+'/clean_slab',fmax=self.solver_fmax,maxstep=self.solver_max_step)
# return clean_slab
# def adsorption_energy_calculator(self,traj_file,opt_slab):
# ads_slab=read(traj_file)
# pbc_checker(ads_slab)
# if self.calc_dict['spinpol']:
# ads_slab=apply_magmom(opt_slab,ads_slab)
# slab_c_coord,cluster=detect_cluster(ads_slab)
# if self.fix_option == 'bottom':
# unique_cluster_index=sorted(set(cluster), key=cluster.index)[self.fix_layer-1]
# max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
# fix_mask=ads_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
# else:
# raise RuntimeError('Only bottom fix option available now.')
# fixed_atom_constrain=FixAtoms(mask=fix_mask)
# ads_slab.set_constraint(fixed_atom_constrain)
# ads_slab.set_calculator(self.gpaw_calc)
# location='/'.join(traj_file.split('/')[:-1])
# f=paropen(self.report_location,'a')
# parprint('Calculating '+('/'.join(location.split('/')[-2:]))+' adsorption site...',file=f)
# f.close()
# opt.relax(ads_slab,location,fmax=self.solver_fmax,maxstep=self.solver_max_step)
# init_ads_site=traj_file.split('/')[-2]
# E_slab_ads=ads_slab.get_potential_energy()
# opt_slab_energy=opt_slab.get_potential_energy()*int(self.size[0])*int(self.size[2])
# adsorption_energy=E_slab_ads-(opt_slab_energy+self.adatom_pot_energy)
# final_ads_site=list(np.round(ads_slab.get_positions()[-1][:2],decimals=3))
# final_ads_site_str='_'.join([str(i) for i in final_ads_site])
# return init_ads_site, adsorption_energy, final_ads_site_str
# def apply_magmom(self,opt_slab,ads_slab):
# slab_formula=ads_slab.get_chemical_symbols()
# magmom=opt_slab.get_magnetic_moments()
# magmom_ls=np.append(magmom,np.mean(magmom))
# magmom_ls[slab_formula.index(self.ads)]=0
# ads_slab.set_initial_magnetic_moments(magmom_ls)
# def initialize_report(self,report_location,gpaw_calc):
# calc_dict=gpaw_calc.__dict__['parameters']
# if world.rank==0 and os.path.isfile(report_location):
# os.remove(report_location)
# f = paropen(report_location,'a')
# parprint('Initial Parameters:', file=f)
# parprint('\t'+'xc: '+calc_dict['xc'],file=f)
# parprint('\t'+'h: '+str(calc_dict['h']),file=f)
# parprint('\t'+'kpts: '+str(calc_dict['kpts']),file=f)
# parprint('\t'+'sw: '+str(calc_dict['occupations']),file=f)
# parprint('\t'+'spin polarized: '+str(calc_dict['spinpol']),file=f)
# if calc_dict['spinpol']:
# parprint('\t'+'magmom: initialize magnetic moment from slab calculation.',file=f)
# parprint(' ',file=f)
# f.close()
class ads_grid_calc:
def __init__(self,
element,
miller_index_tight,
gpaw_calc,
ads,
adatom_pot_energy,
solver_fmax,
solver_max_step,
restart_calc,
size,
fix_layer=2,
fix_option='bottom'):
#initalize variables
size_xy=str(size[0])+'x'+str(size[1])
target_dir='results/'+element+'/'+'ads/'+size_xy+'/'+miller_index_tight
report_location=target_dir+'_grid_results_report.txt'
all_ads_file_loc=target_dir+'/'+'adsorbates/'+str(ads)+'/'
## TO-DO: need to figure out how to calculate adsorption energy for larger system
# self.gpaw_calc=gpaw_calc
# self.calc_dict=self.gpaw_calc.__dict__['parameters']
# self.ads=ads
#self.all_ads_file_loc=self.target_dir+'/'+'adsorbates/'+str(self.ads)+'/'
#self.adatom_pot_energy=adatom_pot_energy
##generate report
initialize_report(report_location,gpaw_calc)
##compute clean slab energy
opt_slab_energy, opt_slab_magmom=get_clean_slab(element, miller_index_tight,
report_location, target_dir, size_xy,
fix_layer,solver_fmax,solver_max_step,
gpaw_calc)
##start adsorption calculation
adsorption_energy_dict={}
init_adsorbates_site_lst=[]
adsorption_energy_lst=[]
final_adsorbates_site_lst=[]
all_traj_files=glob(all_ads_file_loc+'grid/*/input.traj')
all_gpw_files=glob(all_ads_file_loc+'grid/*/slab.gpw')
## restart
if restart_calc==True and len(all_gpw_files)>=1:
init_adsorbates_site_lst,adsorption_energy_lst=skip_ads_calculated(report_location,
all_gpw_files,
init_adsorbates_site_lst,
adsorption_energy_lst,
final_adsorbates_site_lst,
opt_slab_energy,
adatom_pot_energy)[0:2]
all_gpw_files_ads_site=['/'.join(i.split('/')[:-1]) for i in all_gpw_files]
all_traj_files=[i for i in all_traj_files if '/'.join(i.split('/')[:-1]) not in all_gpw_files_ads_site]
for traj_file in all_traj_files:
output_lst=adsorption_energy_calculator(traj_file,report_location,
opt_slab_energy,adatom_pot_energy,
opt_slab_magmom,gpaw_calc,
solver_fmax,solver_max_step,
calc_type='grid',
fix_layer=fix_layer,fix_option = 'bottom',
)
init_adsorbates_site_lst.append(output_lst[0])
adsorption_energy_lst.append(output_lst[1])
adsorption_energy_dict['init_sites[x_y](Ang)']=init_adsorbates_site_lst
adsorption_energy_dict['adsorption_energy(eV)']=adsorption_energy_lst
ads_df=pd.DataFrame(adsorption_energy_dict)
#ads_df.set_index('init_adsorbates_sites[x_y](Ang)',inplace=True)
ads_df.sort_values(by=['adsorption_energy(eV)'],inplace=True)
ads_df.to_csv(target_dir+'_ads_grid.csv')
pd.set_option("display.max_rows", None, "display.max_columns", None)
f=paropen(report_location,'a')
parprint(ads_df,file=f)
parprint('',file=f)
parprint('Grid adsorption energy calculation complete.',file=f)
f.close()
# def get_clean_slab(self):
# f = paropen(self.report_location,'a')
# parprint('Start clean slab calculation: ', file=f)
# if self.size != '1x1':
# clean_slab_gpw_path=self.target_dir+'/clean_slab/slab.gpw'
# clean_slab=read(self.target_dir+'/clean_slab/input.traj')
# if os.path.isfile(clean_slab_gpw_path):
# opt_slab, pre_calc = restart(clean_slab_gpw_path)
# pre_kpts=pre_calc.__dict__['parameters']['kpts']
# set_kpts=self.calc_dict['kpts']
# if pre_kpts == set_kpts:
# parprint('\t'+self.size+' clean slab is pre-calculated with kpts matched.',file=f)
# else:
# parprint('\t'+self.size+' clean slab pre-calculated has different kpts. Clean slab needs to re-calculate.', file=f)
# parprint('\t'+'Calculating '+self.size+' clean slab...',file=f)
# opt_slab=self.clean_slab_calculator(clean_slab)
# else:
# parprint('\t'+self.size+' clean slab is not pre-calculated.',file=f)
# parprint('\t'+'Calculating '+self.size+' clean slab...',file=f)
# opt_slab=self.clean_slab_calculator(clean_slab)
# else:
# parprint('slab size is 1x1. Clean slab calculation is skipped.', file=f)
# opt_slab=connect('final_database'+'/'+'surf.db').get_atoms(simple_name=self.element+'_'+self.miller_index_tight)
# f.close()
# return opt_slab
# def clean_slab_calculator(self,clean_slab):
# pbc_checker(clean_slab)
# if self.calc_dict['spinpol']:
# clean_slab.set_initial_magnetic_moments([0]*len(clean_slab))
# slab_c_coord,cluster=detect_cluster(clean_slab)
# if self.fix_option == 'bottom':
# unique_cluster_index=sorted(set(cluster), key=cluster.index)[self.fix_layer-1]
# max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
# fix_mask=clean_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
# else:
# raise RuntimeError('Only bottom fix option available now.')
# fixed_atom_constrain=FixAtoms(mask=fix_mask)
# clean_slab.set_constraint(fixed_atom_constrain)
# clean_slab.set_calculator(self.gpaw_calc)
# opt.relax(clean_slab,self.target_dir+'/clean_slab',fmax=self.solver_fmax,maxstep=self.solver_max_step)
# return clean_slab
# def adsorption_energy_calculator(self,traj_file,opt_slab):
# ads_slab=read(traj_file)
# pbc_checker(ads_slab)
# if self.calc_dict['spinpol']:
# ads_slab=apply_magmom(opt_slab,ads_slab)
# fixed_line_constrain=FixedLine(a=-1,direction=[0,0,1])
# slab_c_coord,cluster=detect_cluster(ads_slab)
# if self.fix_option == 'bottom':
# unique_cluster_index=sorted(set(cluster), key=cluster.index)[self.fix_layer-1]
# max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
# fix_mask=ads_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
# else:
# raise RuntimeError('Only bottom fix option available now.')
# fixed_atom_constrain=FixAtoms(mask=fix_mask)
# ads_slab.set_constraint([fixed_atom_constrain,fixed_line_constrain])
# ads_slab.set_calculator(self.gpaw_calc)
# location='/'.join(traj_file.split('/')[:-1])
# f=paropen(self.report_location,'a')
# parprint('Calculating '+('/'.join(location.split('/')[-2:]))+' adsorption site...',file=f)
# f.close()
# opt.relax(ads_slab,location,fmax=self.solver_fmax,maxstep=self.solver_max_step)
# init_ads_site=traj_file.split('/')[-2]
# adsorption_energy=ads_slab.get_potential_energy()-(opt_slab.get_potential_energy()+self.adatom_pot_energy)
# return init_ads_site, adsorption_energy
# def apply_magmom(self,opt_slab,ads_slab):
# slab_formula=ads_slab.get_chemical_symbols()
# magmom=opt_slab.get_magnetic_moments()
# magmom_ls=np.append(magmom,np.mean(magmom))
# magmom_ls[slab_formula.index(self.ads)]=0
# ads_slab.set_initial_magnetic_moments(magmom_ls)
# def initialize_report(self):
# if world.rank==0 and os.path.isfile(self.report_location):
# os.remove(self.report_location)
# f = paropen(self.report_location,'a')
# parprint('Initial Parameters:', file=f)
# parprint('\t'+'xc: '+self.calc_dict['xc'],file=f)
# parprint('\t'+'h: '+str(self.calc_dict['h']),file=f)
# parprint('\t'+'kpts: '+str(self.calc_dict['kpts']),file=f)
# parprint('\t'+'sw: '+str(self.calc_dict['occupations']),file=f)
# parprint('\t'+'spin polarized: '+str(self.calc_dict['spinpol']),file=f)
# if self.calc_dict['spinpol']:
# parprint('\t'+'magmom: initial magnetic moment from slab calculation.',file=f)
# parprint(' ',file=f)
# f.close()
class ads_lowest_ads_site_calc:
def __init__(self,
element,
miller_index_tight,
gpaw_calc,
ads,
adatom_pot_energy,
solver_fmax,
solver_max_step,
restart_calc,
size, #xy size
fix_layer=2,
fix_option='bottom'):
#initalize
##globlalize variable
size_xy=str(size[0])+'x'+str(size[1])
target_dir='results/'+element+'/'+'ads/'+size_xy+'/'+miller_index_tight
report_location=target_dir+'_lowest_ads_results_report.txt'
all_ads_file_loc=target_dir+'/'+'adsorbates/'+str(ads)+'/'
##generate report
initialize_report(report_location, gpaw_calc)
##compute clean slab energy
opt_slab_energy, opt_slab_magmom=get_clean_slab(element, miller_index_tight,
report_location, target_dir, size_xy,
fix_layer,solver_fmax,solver_max_step,
gpaw_calc)
##start adsorption calculation
adsorption_energy_dict={}
init_adsorbates_site_lst=[]
final_adsorbates_site_lst=[]
adsorption_energy_lst=[]
all_traj_files=glob(all_ads_file_loc+'lowest_ads_site/*/input.traj')
all_gpw_files=glob(all_ads_file_loc+'lowest_ads_site/*/slab.gpw')
if restart_calc==True and len(all_gpw_files)>=1:
init_adsorbates_site_lst,adsorption_energy_lst=skip_ads_calculated(report_location,
all_gpw_files,
init_adsorbates_site_lst,
adsorption_energy_lst,
final_adsorbates_site_lst,
opt_slab_energy,
adatom_pot_energy)[0:2]
all_gpw_files_ads_site=['/'.join(i.split('/')[:-1]) for i in all_gpw_files]
all_traj_files=[i for i in all_traj_files if '/'.join(i.split('/')[:-1]) not in all_gpw_files_ads_site]
for traj_file in all_traj_files:
output_lst=adsorption_energy_calculator(traj_file,report_location,
opt_slab_energy,adatom_pot_energy,
opt_slab_magmom,gpaw_calc,
solver_fmax,solver_max_step,
calc_type='normal',
fix_layer=fix_layer,fix_option = 'bottom',
)
init_adsorbates_site_lst.append(output_lst[0])
adsorption_energy_lst.append(output_lst[1])
final_adsorbates_site_lst.append(output_lst[2])
adsorption_energy_dict['init_sites[x_y](Ang)']=init_adsorbates_site_lst
adsorption_energy_dict['final_sites[x_y](Ang)']=final_adsorbates_site_lst
adsorption_energy_dict['adsorption_energy(eV)']=adsorption_energy_lst
ads_df=pd.DataFrame(adsorption_energy_dict)
# ads_df.set_index('init_adsorbates_sites[x_y](Ang)',inplace=True)
ads_df.sort_values(by=['adsorption_energy(eV)'],inplace=True)
pd.set_option("display.max_rows", None, "display.max_columns", None)
f=paropen(report_location,'a')
parprint(ads_df,file=f)
parprint('',file=f)
f.close()
min_adsorbates_site=ads_df.iloc[[0]]['init_sites[x_y](Ang)'].to_list()[0]
lowest_ads_energy_slab=read(glob(all_ads_file_loc+'*/'+min_adsorbates_site+'/slab.traj')[0])
#finalize
final_slab_simple_name=element+'_'+miller_index_tight
ads_db=connect('final_database/ads_'+size_xy+'.db')
id=ads_db.reserve(name=final_slab_simple_name)
if id is None:
id=ads_db.get(name=final_slab_simple_name).id
ads_db.update(id=id,atoms=lowest_ads_energy_slab,name=final_slab_simple_name,
ads_pot_e=float(ads_df.iloc[[0]]['adsorption_energy(eV)'].to_list()[0]))
else:
ads_db.write(lowest_ads_energy_slab,
id=id,
name=final_slab_simple_name,
ads_pot_e=float(ads_df.iloc[[0]]['adsorption_energy(eV)'].to_list()[0]))
f=paropen(report_location,'a')
parprint('Adsorption energy calculation complete.',file=f)
parprint('Selected ads site is: ',file=f)
parprint(min_adsorbates_site,file=f)
f.close()
# def get_clean_slab(self):
# f = paropen(self.report_location,'a')
# parprint('Start clean slab calculation: ', file=f)
# if self.size != '1x1':
# clean_slab_gpw_path=self.target_dir+'/clean_slab/slab.gpw'
# clean_slab=read(self.target_dir+'/clean_slab/input.traj')
# if os.path.isfile(clean_slab_gpw_path):
# opt_slab, pre_calc = restart(clean_slab_gpw_path)
# pre_kpts=pre_calc.__dict__['parameters']['kpts']
# set_kpts=self.calc_dict['kpts']
# if pre_kpts == set_kpts:
# parprint('\t'+self.size+' clean slab is pre-calculated with kpts matched.',file=f)
# else:
# parprint('\t'+self.size+' clean slab pre-calculated has different kpts. Clean slab needs to re-calculate.', file=f)
# parprint('\t'+'Calculating '+self.size+' clean slab...',file=f)
# opt_slab=self.clean_slab_calculator(clean_slab)
# else:
# parprint('\t'+self.size+' clean slab is not pre-calculated.',file=f)
# parprint('\t'+'Calculating '+self.size+' clean slab...',file=f)
# opt_slab=self.clean_slab_calculator(clean_slab)
# else:
# parprint('slab size is 1x1. Clean slab calculation is skipped.', file=f)
# opt_slab=connect('final_database'+'/'+'surf.db').get_atoms(simple_name=self.element+'_'+self.miller_index_tight)
# parprint(' ',file=f)
# f.close()
# return opt_slab
# def clean_slab_calculator(self,clean_slab):
# pbc_checker(clean_slab)
# if self.calc_dict['spinpol']:
# clean_slab.set_initial_magnetic_moments([0]*len(clean_slab))
# slab_c_coord,cluster=detect_cluster(clean_slab)
# if self.fix_option == 'bottom':
# unique_cluster_index=sorted(set(cluster), key=cluster.index)[self.fix_layer-1]
# max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
# fix_mask=clean_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
# else:
# raise RuntimeError('Only bottom fix option available now.')
# fixed_atom_constrain=FixAtoms(mask=fix_mask)
# clean_slab.set_constraint(fixed_atom_constrain)
# clean_slab.set_calculator(self.gpaw_calc)
# opt.relax(clean_slab,self.target_dir+'/clean_slab',fmax=self.solver_fmax,maxstep=self.solver_max_step)
# return clean_slab
# def adsorption_energy_calculator(self,traj_file,opt_slab):
# ads_slab=read(traj_file)
# pbc_checker(ads_slab)
# if self.calc_dict['spinpol']:
# ads_slab=apply_magmom(opt_slab,ads_slab)
# slab_c_coord,cluster=detect_cluster(ads_slab)
# if self.fix_option == 'bottom':
# unique_cluster_index=sorted(set(cluster), key=cluster.index)[self.fix_layer-1]
# max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
# fix_mask=ads_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
# else:
# raise RuntimeError('Only bottom fix option available now.')
# fixed_atom_constrain=FixAtoms(mask=fix_mask)
# ads_slab.set_constraint(fixed_atom_constrain)
# ads_slab.set_calculator(self.gpaw_calc)
# location='/'.join(traj_file.split('/')[:-1])
# f=paropen(self.report_location,'a')
# parprint('\tCalculating '+('/'.join(location.split('/')[-2:]))+' adsorption site...',file=f)
# f.close()
# opt.relax(ads_slab,location,fmax=self.solver_fmax,maxstep=self.solver_max_step)
# init_ads_site=traj_file.split('/')[-2]
# E_slab_ads=ads_slab.get_potential_energy()
# opt_slab_energy=opt_slab.get_potential_energy()
# adsorption_energy=E_slab_ads-(opt_slab_energy+self.adatom_pot_energy)
# final_ads_site=list(np.round(ads_slab.get_positions()[-1][:2],decimals=3))
# final_ads_site_str='_'.join([str(i) for i in final_ads_site])
# return init_ads_site, adsorption_energy, final_ads_site_str
# def initialize_report(self):
# if world.rank==0 and os.path.isfile(self.report_location):
# os.remove(self.report_location)
# f = paropen(self.report_location,'a')
# parprint('Initial Parameters:', file=f)
# parprint('\t'+'xc: '+self.calc_dict['xc'],file=f)
# parprint('\t'+'h: '+str(self.calc_dict['h']),file=f)
# parprint('\t'+'kpts: '+str(self.calc_dict['kpts']),file=f)
# parprint('\t'+'sw: '+str(self.calc_dict['occupations']),file=f)
# parprint('\t'+'spin polarized: '+str(self.calc_dict['spinpol']),file=f)
# if self.calc_dict['spinpol']:
# parprint('\t'+'magmom: initial magnetic moment from slab calculation.',file=f)
# parprint(' ',file=f)
# f.close()
class ads_NN_interact_calc:
def __init__(self,
element,
miller_index_tight,
gpaw_calc,
ads,
solver_fmax,
solver_max_step,
restart_calc,
size, #xy size
sub_dir,
fix_layer=2,
fix_option='bottom'):
#initalize
##globlalize variable
size_xy=str(size[0])+'x'+str(size[1])
target_dir='results/'+element+'/'+'ads/'+size_xy+'/'+miller_index_tight
#report_location=target_dir+'_lowest_ads_results_report.txt'
all_ads_file_loc=target_dir+'/'+'adsorbates/'+str(ads)+'/'
##start adsorption calculation
# adsorption_energy_dict={}
# init_adsorbates_site_lst=[]
# final_adsorbates_site_lst=[]
# adsorption_energy_lst=[]
all_traj_files=glob(all_ads_file_loc+sub_dir+'/*/input.traj')
all_gpw_files=glob(all_ads_file_loc+sub_dir+'/*/slab.gpw')
if restart_calc==True and len(all_gpw_files)>=1:
all_gpw_files_ads_site=['/'.join(i.split('/')[:-1]) for i in all_gpw_files]
all_traj_files=[i for i in all_traj_files if '/'.join(i.split('/')[:-1]) not in all_gpw_files_ads_site]
for traj_file in all_traj_files:
interm_gpw='/'.join(traj_file.split('/')[:-1]+['slab_interm.gpw'])
if os.path.isfile(interm_gpw):
ads_slab, gpaw_calc=restart(interm_gpw)
else:
ads_slab=read(traj_file)
pbc_checker(ads_slab)
calc_dict=gpaw_calc.__dict__['parameters']
if calc_dict['spinpol']:
raise RuntimeError('spin polarization calculation not supported.')
slab_c_coord,cluster=detect_cluster(ads_slab)
if fix_option == 'bottom':
unique_cluster_index=sorted(set(cluster), key=cluster.index)[fix_layer-1]
max_height_fix=max(slab_c_coord[cluster==unique_cluster_index])
fix_mask=ads_slab.positions[:,2]<(max_height_fix+0.05) #add 0.05 Ang to make sure all bottom fixed
else:
raise RuntimeError('Only bottom fix option available now.')
fixed_atom_constrain=FixAtoms(mask=fix_mask)
ads_slab.set_constraint(fixed_atom_constrain)
ads_slab.set_calculator(gpaw_calc)
location='/'.join(traj_file.split('/')[:-1])
opt.relax(ads_slab,location,fmax=solver_fmax,maxstep=solver_max_step)
class ads_custom_ads_site_calc:
def __init__(self,
element,
miller_index_tight,
gpaw_calc,
ads,
adatom_pot_energy,
solver_fmax,
solver_max_step,
restart_calc,
size, #xy size
fix_layer=2,
fix_option='bottom'):
#initalize
##globlalize variable
size_xy=str(size[0])+'x'+str(size[1])
target_dir='results/'+element+'/'+'ads/'+size_xy+'/'+miller_index_tight
report_location=target_dir+'_custom_ads_results_report.txt'
all_ads_file_loc=target_dir+'/'+'adsorbates/'+str(ads)+'/'
##generate report
initialize_report(report_location, gpaw_calc)
##compute clean slab energy
opt_slab_energy, opt_slab_magmom=get_clean_slab(element, miller_index_tight,
report_location, target_dir, size_xy,
fix_layer,solver_fmax,solver_max_step,
gpaw_calc)
##start adsorption calculation
adsorption_energy_dict={}
init_adsorbates_site_lst=[]
final_adsorbates_site_lst=[]
adsorption_energy_lst=[]
all_traj_files=glob(all_ads_file_loc+'custom/*/input.traj')
all_gpw_files=glob(all_ads_file_loc+'custom/*/slab.gpw')
if restart_calc==True and len(all_gpw_files)>=1:
init_adsorbates_site_lst,adsorption_energy_lst=skip_ads_calculated(report_location,
all_gpw_files,
init_adsorbates_site_lst,
adsorption_energy_lst,
final_adsorbates_site_lst,
opt_slab_energy,
adatom_pot_energy)[0:2]
all_gpw_files_ads_site=['/'.join(i.split('/')[:-1]) for i in all_gpw_files]
all_traj_files=[i for i in all_traj_files if '/'.join(i.split('/')[:-1]) not in all_gpw_files_ads_site]
for traj_file in all_traj_files:
output_lst=adsorption_energy_calculator(traj_file,report_location,
opt_slab_energy,adatom_pot_energy,
opt_slab_magmom,gpaw_calc,
solver_fmax,solver_max_step,
calc_type='normal',
fix_layer=fix_layer,fix_option = 'bottom',
)
init_adsorbates_site_lst.append(output_lst[0])
adsorption_energy_lst.append(output_lst[1])
final_adsorbates_site_lst.append(output_lst[2])
adsorption_energy_dict['init_sites[x_y](Ang)']=init_adsorbates_site_lst
adsorption_energy_dict['final_sites[x_y](Ang)']=final_adsorbates_site_lst
adsorption_energy_dict['adsorption_energy(eV)']=adsorption_energy_lst
ads_df=pd.DataFrame(adsorption_energy_dict)
# ads_df.set_index('init_adsorbates_sites[x_y](Ang)',inplace=True)
ads_df.sort_values(by=['adsorption_energy(eV)'],inplace=True)
pd.set_option("display.max_rows", None, "display.max_columns", None)
f=paropen(report_location,'a')
parprint(ads_df,file=f)
parprint('',file=f)
f.close()
min_adsorbates_site=ads_df.iloc[[0]]['init_sites[x_y](Ang)'].to_list()[0]
#lowest_ads_energy_slab=read(glob(all_ads_file_loc+'*/'+min_adsorbates_site+'/slab.traj')[0])
#finalize
# final_slab_simple_name=element+'_'+miller_index_tight
# ads_db=connect('final_database/ads_'+size_xy+'.db')
# id=ads_db.reserve(name=final_slab_simple_name)
# if id is None:
# id=ads_db.get(name=final_slab_simple_name).id
# ads_db.update(id=id,atoms=lowest_ads_energy_slab,name=final_slab_simple_name,
# ads_pot_e=float(ads_df.iloc[[0]]['adsorption_energy(eV)'].to_list()[0]))
# else:
# ads_db.write(lowest_ads_energy_slab,
# id=id,
# name=final_slab_simple_name,
# ads_pot_e=float(ads_df.iloc[[0]]['adsorption_energy(eV)'].to_list()[0]))
f=paropen(report_location,'a')
parprint('Adsorption energy calculation complete.',file=f)
parprint('Selected ads site is: ',file=f)
parprint(min_adsorbates_site,file=f)
f.close()
|
kianpu34593/base4gpaw | BASIC/utils.py | import os
import sys
from pymatgen.io.cif import CifWriter
from pymatgen.core.structure import Structure
from pymatgen.ext.matproj import MPRester
from autocat import adsorption
from ase.db import connect
from pymatgen.core.surface import SlabGenerator, generate_all_slabs
from pymatgen.io.ase import AseAtomsAdaptor
from collections import Counter
from itertools import chain
from ase.io import read,write
import numpy as np
import pandas as pd
from typing import List, Type
from glob import glob
import warnings
import itertools
from scipy.spatial.distance import squareform
from scipy.cluster.hierarchy import fcluster, linkage
from ase import Atom
import matplotlib.pyplot as plt
from ase.visualize.plot import plot_atoms
def pause():
input('Press <ENTER> to continue...')
def create_big_dir():
current_dir=os.getcwd()
os.chdir(current_dir)
#create the orig_cif_data and final_database dir
if os.path.isdir('orig_cif_data'):
print("WARNING: orig_cif_data/ directory already exists!")
pause()
else:
os.makedirs('orig_cif_data',exist_ok=True)
if os.path.isdir('final_database'):
print("WARNING: final_database/ directory already exists!")
pause()
else:
os.makedirs('final_database',exist_ok=True)
if os.path.isdir('results'):
print("WARNING: results/ directory already exists!")
pause()
else:
os.makedirs('results',exist_ok=True)
def create_element_dir(element,
miller_index=None,
shift_lst: List[float]=None,
order_lst: List[int]=None,
options=['bulk','surf'],
optimized_parameters=['h','kdens']):
current_dir=os.getcwd()
os.chdir(current_dir)
element='results/'+element
#create the element dir
if os.path.isdir(element):
print("WARNING: {}/ directory already exists!".format(element))
pause()
else:
os.makedirs(element,exist_ok=True)
#create the bulk dir
if 'bulk' in options:
if os.path.isdir(element+'/'+'bulk'):
print("WARNING: {}/bulk/ directory already exists!".format(element))
pause()
else:
os.makedirs(element+'/'+'bulk',exist_ok=True)
for par in optimized_parameters:
create_bulk_sub_dir(element,par)
print("{}/bulk/ directory created!".format(element))
#create the surf dir
if 'surf' in options:
if os.path.isdir(element+'/'+'surf'):
print("WARNING: {}/surf/ directory already exists!".format(element))
pause()
else:
os.makedirs(element+'/'+'surf',exist_ok=True)
for shift,order in zip(shift_lst,order_lst):
create_surf_sub_dir(element,miller_index,shift,order)
# create_surf_vac_dir(element,struc,init_vac)
print('{}/surf/ directories created!'.format(element))
def create_surf_sub_dir(element,miller_index_input,shift,order):
miller_index=''.join(miller_index_input.split(','))
miller_index_loose=tuple(map(int,miller_index_input.split(',')))
raw_surf_dir=element+'/'+'raw_surf'
if not os.path.isdir(raw_surf_dir):
raise RuntimeError(raw_surf_dir+' does not exist.')
else:
raw_cif_path=element+'/'+'raw_surf/'+str(miller_index)+'/'+str(shift)+'/'+str(order)+'/'+'*.cif'
raw_cif_files=glob(raw_cif_path)
assert len(raw_cif_files)==6, 'The size of raw_cif_files is not 6.'
#cif_files_name=[cif_file.split('/')[-1] for cif_file in raw_cif_files]
layers=[cif_file.split('/')[-1].split('.')[0] for cif_file in raw_cif_files]
#layers=[int(name.split('-')[0]) for name in layers_and_shift]
sub_dir=element+'/'+'surf'+'/'+miller_index+'_'+str(shift)+'_'+str(order)
if os.path.isdir(sub_dir):
print('WARNING: '+sub_dir+'/ directory already exists!')
pause()
else:
os.makedirs(sub_dir,exist_ok=True)
for layer in layers:
sub_sub_dir=sub_dir+'/'+str(layer)+'x1x1'
os.makedirs(sub_sub_dir,exist_ok=True)
def create_bulk_sub_dir(element,par):
sub_dir=element+'/'+'bulk'+'/'+'results'+'_'+par
if os.path.isdir(sub_dir):
print('WARNING: '+sub_dir+'/ directory already exists!')
pause()
else:
os.makedirs(sub_dir,exist_ok=True)
sub_sub_dir=element+'/'+'bulk'+'/'+'results'+'_'+par+'/'+'eos_fit'
os.makedirs(sub_sub_dir,exist_ok=True)
def create_ads_and_dir(element,
surf_struc,
ads_option,
offset,
ortho=False,
ads_atom=['Li'],
ads_site=['ontop','hollow','bridge'],
grid_size=[0.5,0.5],
slab_size=(1,1,1),
tuple_list=[()],
height_dict=None,
custom_position=[0,0],
):
current_dir=os.getcwd()
surf_db_path='final_database/surf.db'
if not os.path.isfile(surf_db_path):
sys.exit("ERROR: surf database has not been established!")
else:
surf_db=connect(surf_db_path)
primitive_ads_db_path='final_database/ads_1x1.db'
if os.path.isfile(primitive_ads_db_path):
ads1x1_db=connect(primitive_ads_db_path)
for struc in surf_struc:
primitive_slab = surf_db.get_atoms(simple_name=element+'_'+struc)
sub_dir='results/'+element+'/'+'ads'+'/'+str(slab_size[0])+'x'+str(slab_size[1])+'/'+struc
os.makedirs(sub_dir,exist_ok=True)
big_slab=primitive_slab*slab_size
if ads_option=='autocat':
os.chdir(current_dir+'/'+sub_dir)
adsorption.generate_rxn_structures(big_slab,ads=ads_atom,site_type=ads_site,write_to_disk=True,height=height_dict)
elif ads_option=='grid':
single_cell_x=primitive_slab.cell.cellpar()[0]
single_cell_y=primitive_slab.cell.cellpar()[1]
single_frac_x=1/(single_cell_x//grid_size[0])
single_frac_y=1/(single_cell_y//grid_size[1])
if ortho:
single_cell_x_element=np.array([primitive_slab.cell[0][0],0])*single_frac_x
single_cell_y_element=np.array([0,primitive_slab.cell[1][1]])*single_frac_y
else:
single_cell_x_element=primitive_slab.cell[0][0:2]*single_frac_x
single_cell_y_element=primitive_slab.cell[1][0:2]*single_frac_y
ads_sites=[]
for i, j in itertools.product(list(range(int(single_cell_x//grid_size[0]))), list(range(int(single_cell_y//grid_size[1])))):
single_ads_site=np.round(i*single_cell_x_element+j*single_cell_y_element,decimals=3)
###testing
single_ads_site+=offset
####
ads_sites.append((single_ads_site))
sites_dict={'grid':ads_sites}
os.chdir(current_dir+'/'+sub_dir)
adsorption.generate_rxn_structures(big_slab,ads=ads_atom,all_sym_sites=False,sites=sites_dict,write_to_disk=True,height=height_dict)
elif ads_option=='lowest_ads_site':
primitive_ads_slab=ads1x1_db.get_atoms(name=element+'_'+struc)
primitive_ads_slab.wrap()
ads_xy_position=np.round(primitive_ads_slab.get_positions()[-1,:2],decimals=3)
ads_height=primitive_ads_slab.get_positions()[-1,2]-np.max(primitive_ads_slab.get_positions()[:-1,2])
height_dict={ads_atom[0]:np.round(ads_height,decimals=3)}
site_dict={'lowest_ads_site':[tuple(ads_xy_position)]}
os.chdir(current_dir+'/'+sub_dir)
adsorption.generate_rxn_structures(big_slab,ads=ads_atom,all_sym_sites=False,sites=site_dict,write_to_disk=True,height=height_dict)
elif ads_option=='custom':
site_dict={'custom':[tuple(custom_position)]}
os.chdir(current_dir+'/'+sub_dir)
adsorption.generate_rxn_structures(big_slab,ads=ads_atom,all_sym_sites=False,sites=site_dict,write_to_disk=True,height=height_dict)
elif ads_option=='nearest-neighbors':
big_ads_slab_path = 'final_database/ads_'+str(slab_size[0])+'x'+str(slab_size[1])+'.db'
big_ads_db = connect(big_ads_slab_path)
big_ads_slab = big_ads_db.get_atoms(name=element+'_'+struc)
single_cell_x=primitive_slab.cell.cellpar()[0]
single_cell_y=primitive_slab.cell.cellpar()[1]
single_frac_x=1/single_cell_x
single_frac_y=1/single_cell_y
single_cell_x_element=primitive_slab.cell[0][0:2]
single_cell_y_element=primitive_slab.cell[1][0:2]
ads_xy_position=np.round(big_ads_slab.get_positions()[-1,:2],decimals=3)
nearest_position_list=[]
# if single_cell_x != single_cell_y:
# if single_cell_x > single_cell_y:
# fst_nearst_position[1]+=single_cell_y
# snd_nearst_position[0]+=single_cell_x
# else:
# fst_nearst_position[0]+=single_cell_x
# snd_nearst_position[1]+=single_cell_y
# else:
# fst_nearst_position[1]+=single_cell_y
# snd_nearst_position[0]+=single_cell_x
# snd_nearst_position[1]+=single_cell_y
if len(tuple_list)==0:
raise ValueError('Positions tuple is empty.')
for i in tuple_list:
nearst_position=ads_xy_position+single_cell_x_element*i[0]+single_cell_y_element*i[1]
nearest_position_list.append(nearst_position)
site_dict={str(i[0])+'_'+str(i[1]):[tuple(j)] for i,j in zip(tuple_list,nearest_position_list)}
#site_dict={'fst_near':[tuple(fst_nearst_position)],'snd_near':[tuple(snd_nearst_position)]}
ads_height=big_ads_slab.get_positions()[-1,2]-np.max(big_ads_slab.get_positions()[:-1,2])
#height_dict={ads_atom[0]:np.round(ads_height,decimals=3)}
height_dict={ads_atom[0]:0}
os.chdir(current_dir+'/'+sub_dir)
adsorption.generate_rxn_structures(big_ads_slab,ads=ads_atom,all_sym_sites=False,sites=site_dict,write_to_disk=True,height=height_dict)
elif ads_option=='no-adatom':
os.chdir(current_dir+'/'+sub_dir)
os.makedirs('clean_slab')
big_slab.write('clean_slab/input.traj')
print('clean slab written to ./clean_slab/input.traj')
else:
raise TypeError('Specify the ads_option. Availble options: autocat, grid, custom and 2-adatoms')
os.chdir(current_dir)
def adsobates_plotter(element,
miller_index,
slab_size=(1,1,1),
option='autocat',#grid
):
current_dir=os.getcwd()
surf_db_path='final_database/surf.db'
if not os.path.isfile(surf_db_path):
sys.exit("ERROR: surf database has not been established!")
else:
surf_db=connect(surf_db_path)
for m_ind in miller_index:
base_slab = surf_db.get_atoms(simple_name=element+'_'+m_ind)
base_slab=base_slab*slab_size
sub_dir='results/'+element+'/'+'ads'+'/'+str(slab_size[0])+'x'+str(slab_size[1])+'/'+m_ind+'/adsorbates/'
if option == 'autocat':
os.chdir(current_dir+'/'+sub_dir)
bridges=glob('Li/bridge/*/input.traj')
ontop=glob('Li/ontop/*/input.traj')
hollow=glob('Li/hollow/*/input.traj')
all_files=bridges+ontop+hollow
elif option == 'grid':
os.chdir(current_dir+'/'+sub_dir)
all_files=glob('Li/grid/*/input.traj')
elif option == 'lowest_ads_site':
os.chdir(current_dir+'/'+sub_dir)
all_files=glob('Li/lowest_ads_site/*/input.traj')
elif option == 'nearest-neighbors':
big_ads_slab_path = 'final_database/ads_'+str(slab_size[0])+'x'+str(slab_size[1])+'.db'
big_ads_db = connect(big_ads_slab_path)
base_slab = big_ads_db.get_atoms(name=element+'_'+m_ind)
os.chdir(current_dir+'/'+sub_dir)
all_files=glob('Li/1_0/*/input.traj')+glob('Li/0_1/*/input.traj')+glob('Li/1_1/*/input.traj')+glob('Li/0.5_0.5/*/input.traj')
else:
raise TypeError('Specify the option. Availble options: autocat, grid, custom and 2-adatoms')
for file in all_files:
slab=read(file)
positions=slab.get_positions()
ads_atom_index=[-1]
Li_position=positions[ads_atom_index,:][0]
base_slab.append(Atom('He',position=Li_position))
fig, axarr = plt.subplots(1, 3, figsize=(15, 5))
plot_atoms(base_slab,axarr[0],rotation=('0x,0y,0z'))
plot_atoms(base_slab,axarr[1],rotation=('270x,0y,0z'))
plot_atoms(base_slab,axarr[2],rotation=('270x,90y,0z'))
fig.savefig("ads_sites.png")
os.chdir(current_dir)
def cif_grabber(API_key,pretty_formula):
#currently will grab the cif of the lowest formation_energy_per_atom
mpr=MPRester(str(API_key))
doc_ls=mpr.query({'pretty_formula':pretty_formula},['material_id','formation_energy_per_atom'])
form_dict={}
for doc in doc_ls:
form_dict[doc['material_id']]=doc['formation_energy_per_atom']
id_sorted = sorted(form_dict,key=form_dict.get)
lowest_matID=id_sorted[0]
struc=mpr.get_structure_by_material_id(lowest_matID,final=True,conventional_unit_cell=True)
Cif_temp=CifWriter(struc)
name='orig_cif_data'+'/'+pretty_formula+'_'+lowest_matID
Cif_temp.write_file('{}.cif'.format(name))
def sym_all_slab(element,max_ind,layers=5,vacuum_layer=10,symmetric=False):
bulk_ase=connect('final_database/bulk.db').get_atoms(name=element)
bulk_pym=AseAtomsAdaptor.get_structure(bulk_ase)
slabgenall=generate_all_slabs(bulk_pym,max_ind,layers,vacuum_layer,
center_slab=True,symmetrize=symmetric,in_unit_planes=True)
print('Miller Index'+'\t'+'Num of Different Shift(s)'+'\t'+'Shifts')
slab_M=[]
slabgenall_sym=[]
for slab in slabgenall:
if slab.is_symmetric():
slab_M.append([slab.miller_index])
slabgenall_sym.append(slab)
slab_M_unique = Counter(chain(*slab_M))
for key in list(slab_M_unique.keys()):
print(str(key)+'\t'+str(slab_M_unique[key])+'\t\t\t\t'+str([np.round(slab.shift,decimals=4) for slab in slabgenall_sym if slab.miller_index==key]))
def surf_creator(element,ind,layers,vacuum_layer,unit,order_to_save,save=False,orthogonalize=False,symmetric=False):
bulk_ase=connect('final_database/bulk.db').get_atoms(name=element)
bulk_pym=AseAtomsAdaptor.get_structure(bulk_ase)
slabgen = SlabGenerator(bulk_pym, ind, layers, vacuum_layer,
center_slab=True,in_unit_planes=unit)
slabs_all=slabgen.get_slabs(symmetrize=symmetric)
slabs_symmetric=slabs_all
#slabs_symmetric=[slabs_all[i] for i, slab in enumerate(slabs_all) if slab.is_symmetric()]
if len(slabs_symmetric) == 0:
raise RuntimeError('No symmetric slab found!')
else:
shift_ls=[]
slab_ase_ls=[]
angle_ls=[]
num_different_layers_ls=[]
num_atom_ls=[]
shift_ls = []
for i,slab in enumerate(slabs_symmetric):
#temp save for analysis
os.makedirs('results/'+element+'/raw_surf',exist_ok=True)
surf_location='results/'+element+'/raw_surf/'+str(ind)+'_temp'+'.cif'
CifWriter(slab).write_file(surf_location)
slab_ase=read(surf_location)
angles=np.round(slab_ase.cell.angles(),decimals=4)
anlges_arg=[angle != 90.0000 for angle in angles[:2]]
if orthogonalize==True and np.any(anlges_arg):
L=slab_ase.cell.lengths()[2]
slab_ase.cell[2]=[0,0,L]
slab_ase.wrap()
slab_ase.center()
slab_ase_ls.append(slab_ase)
angle_ls.append(np.round(slab_ase.cell.angles(),decimals=4))
shift_ls.append(np.round(slab.shift,decimals=4))
unique_cluster=np.unique(detect_cluster(slab_ase)[1])
num_different_layers_ls.append(len(unique_cluster))
num_atom_ls.append(len(slab_ase))
if len(slabs_symmetric)==len(slabgen._calculate_possible_shifts()):
shift_ls=np.round(slabgen._calculate_possible_shifts(),decimals=4)
slabs_info_dict={'shift':shift_ls,'angles':angle_ls,'actual_layers':num_different_layers_ls,'num_of_atoms':num_atom_ls}
slabs_info_df=pd.DataFrame(slabs_info_dict)
print(slabs_info_df)
if os.path.isfile(surf_location):
os.remove(surf_location)
if save:
#slab_order_save=[i for i,slab in enumerate(slabs_symmetric) if np.round(slab.shift,decimals=4)==shift_to_save]
if len(slab_ase_ls)==0:
raise RuntimeError('No slab to save!')
#elif len(slab_order_save)>1:
#warnings.warn('More than one slabs to save! Current code only saves the first one!')
if order_to_save>len(slab_ase_ls)-1:
raise RuntimeError('order_to_save exceeds the number of slabs!')
surf_saver(element,slab_ase_ls[order_to_save],ind,layers,shift_ls[order_to_save],order_to_save)
def surf_saver(element,slab_to_save,ind,layers,shift,order_to_save):
tight_ind=''.join(list(map(lambda x:str(x),ind)))
rep_location='results/'+element+'/raw_surf/'+str(tight_ind)+'/'+str(shift)+'/'+str(order_to_save)
os.makedirs(rep_location,exist_ok=True)
surf_location='results/'+element+'/raw_surf/'+str(tight_ind)+'/'+str(shift)+'/'+str(order_to_save)+'/'+str(layers)+'.cif'
if os.path.isfile(surf_location):
raise RuntimeError(surf_location+' already exists!')
else:
slab_to_save.write(surf_location,format='cif')
print('Raw surface saving complete!')
def detect_cluster(slab,tol=0.3):
n=len(slab)
dist_matrix=np.zeros((n, n))
slab_c=np.sort(slab.get_positions()[:,2])
for i, j in itertools.combinations(list(range(n)), 2):
if i != j:
cdist = np.abs(slab_c[i] - slab_c[j])
dist_matrix[i, j] = cdist
dist_matrix[j, i] = cdist
condensed_m = squareform(dist_matrix)
z = linkage(condensed_m)
clusters = fcluster(z, tol, criterion="distance")
return slab_c,list(clusters)
|
peterfabakker/pyEX | pyEX/stats.py | import pandas as pd
from datetime import datetime
from .common import _getJson, PyEXception, _strOrDate, _reindex, _toDatetime
def stats(token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-intraday
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
dict: result
'''
return _getJson('stats/intraday', token, version, filter)
def statsDF(token='', version='', filters=''):
'''https://iexcloud.io/docs/api/#stats-intraday
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
DataFrame: result
'''
df = pd.DataFrame(stats(token, version, filter))
_toDatetime(df)
return df
def recent(token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-recent
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
dict: result
'''
return _getJson('stats/recent', token, version, filter)
def recentDF(token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-recent
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
DataFrame: result
'''
df = pd.DataFrame(recent(token, version, filter))
_toDatetime(df)
_reindex(df, 'date')
return df
def records(token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-records
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
dict: result
'''
return _getJson('stats/records', token, version, filter)
def recordsDF(token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-records
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
DataFrame: result
'''
df = pd.DataFrame(records(token, version, filter))
_toDatetime(df)
return df
def summary(date=None, token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-historical-summary
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
dict: result
'''
if date:
if isinstance(date, str):
return _getJson('stats/historical?date=' + date, token, version, filter)
elif isinstance(date, datetime):
return _getJson('stats/historical?date=' + date.strftime('%Y%m'), token, version, filter)
else:
raise PyEXception("Can't handle type : %s" % str(type(date)), token, version, filter)
return _getJson('stats/historical', token, version, filter)
def summaryDF(date=None, token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-historical-summary
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
DataFrame: result
'''
df = pd.DataFrame(summary(date, token, version, filter))
_toDatetime(df)
return df
def daily(date=None, last='', token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-historical-daily
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
dict: result
'''
if date:
date = _strOrDate(date)
return _getJson('stats/historical/daily?date=' + date, token, version, filter)
elif last:
return _getJson('stats/historical/daily?last=' + last, token, version, filter)
return _getJson('stats/historical/daily', token, version, filter)
def dailyDF(date=None, last='', token='', version='', filter=''):
'''https://iexcloud.io/docs/api/#stats-historical-daily
Args:
token (string); Access token
version (string); API version
filter (string); filters: https://iexcloud.io/docs/api/#filter-results
Returns:
DataFrame: result
'''
df = pd.DataFrame(daily(date, last, token, version, filter))
_toDatetime(df)
return df
|
liuqiaoping7/tensorflow | tensorflow/python/saved_model/load_v1_in_v2.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Import a TF v1-style SavedModel when executing eagerly."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.saved_model import loader_impl
from tensorflow.python.saved_model import signature_serialization
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training.tracking import tracking
class _Initializer(tracking.CapturableResource):
"""Represents an initialization operation restored from a SavedModel.
Without this object re-export of imported 1.x SavedModels would omit the
original SavedModel's initialization procedure.
Created when `tf.saved_model.load` loads a TF 1.x-style SavedModel with an
initialization op. This object holds a function which runs the
initialization. It does not require any manual user intervention;
`tf.saved_model.save` will see this object and automatically add it to the
exported SavedModel, and `tf.saved_model.load` runs the initialization
function automatically.
"""
def __init__(self, init_fn, asset_paths):
super(_Initializer, self).__init__()
self._asset_paths = asset_paths
self._init_fn = init_fn
def _create_resource(self):
return array_ops.placeholder(
dtype=dtypes.resource, shape=[], name="unused_resource")
def _initialize(self):
self._init_fn(*[path.asset_path for path in self._asset_paths])
class _EagerSavedModelLoader(loader_impl.SavedModelLoader):
"""Loads a SavedModel without using Sessions."""
def get_meta_graph_def_from_tags(self, tags):
"""Override to support implicit one-MetaGraph loading with tags=None."""
if tags is None:
if len(self._saved_model.meta_graphs) != 1:
tag_sets = [mg.meta_info_def.tags
for mg in self._saved_model.meta_graphs]
raise ValueError(
("Importing a SavedModel with tf.saved_model.load requires a "
"'tags=' argument if there is more than one MetaGraph. Got "
"'tags=None', but there are {} MetaGraphs in the SavedModel with "
"tag sets {}. Pass a 'tags=' argument to load this SavedModel.")
.format(len(self._saved_model.meta_graphs), tag_sets))
return self._saved_model.meta_graphs[0]
return super(_EagerSavedModelLoader, self).get_meta_graph_def_from_tags(
tags)
def load_graph(self, returns, meta_graph_def):
"""Called from wrap_function to import `meta_graph_def`."""
# pylint: disable=protected-access
saver, _ = tf_saver._import_meta_graph_with_return_elements(
meta_graph_def)
# pylint: enable=protected-access
returns[0] = saver
def restore_variables(self, wrapped, saver):
"""Restores variables from the checkpoint."""
if saver is not None:
saver_def = saver.saver_def
restore_fn = wrapped.prune(
feeds=[wrapped.graph.as_graph_element(
saver_def.filename_tensor_name)],
fetches=[wrapped.graph.as_graph_element(saver_def.restore_op_name)])
restore_fn(constant_op.constant(self._variables_path))
def _extract_signatures(self, wrapped, meta_graph_def):
"""Creates ConcreteFunctions for signatures in `meta_graph_def`."""
signature_functions = {}
for signature_key, signature_def in meta_graph_def.signature_def.items():
if signature_def.inputs:
input_names, input_specs = zip(*signature_def.inputs.items())
else:
input_names = []
input_specs = []
# TODO(allenl): Support optional arguments
signature_fn = wrapped.prune(
feeds=[wrapped.graph.as_graph_element(inp.name)
for inp in input_specs],
fetches={name: wrapped.graph.as_graph_element(out.name)
for name, out in signature_def.outputs.items()})
# pylint: disable=protected-access
signature_fn._arg_keywords = input_names
if len(input_names) == 1:
# Allowing positional arguments does not create any ambiguity if there's
# only one.
signature_fn._num_positional_args = 1
else:
signature_fn._num_positional_args = 0
# pylint: enable=protected-access
signature_functions[signature_key] = signature_fn
return signature_functions
def load(self, tags):
"""Creates an object from the MetaGraph identified by `tags`."""
meta_graph_def = self.get_meta_graph_def_from_tags(tags)
load_graph_returns = [None]
wrapped = wrap_function.wrap_function(
functools.partial(self.load_graph, load_graph_returns, meta_graph_def),
signature=[])
saver, = load_graph_returns
self.restore_variables(wrapped, saver)
with wrapped.graph.as_default():
init_op = loader_impl.get_init_op(meta_graph_def)
root = tracking.AutoTrackable()
if init_op is not None:
asset_feed_tensors = []
asset_paths = []
for tensor_name, value in loader_impl.get_asset_tensors(
self._export_dir, meta_graph_def).items():
asset_feed_tensors.append(wrapped.graph.as_graph_element(tensor_name))
asset_paths.append(tracking.TrackableAsset(value))
init_fn = wrapped.prune(
feeds=asset_feed_tensors,
fetches=[wrapped.graph.as_graph_element(init_op)])
initializer = _Initializer(init_fn, asset_paths)
initializer._initialize() # pylint: disable=protected-access
root.initializer = initializer
root.asset_paths = asset_paths
else:
root.asset_paths = []
signature_functions = self._extract_signatures(wrapped, meta_graph_def)
root.signatures = signature_serialization.create_signature_map(
signature_functions)
root.variables = list(wrapped.graph.variables)
return root
def load(export_dir, tags):
"""Load a v1-style SavedModel as an object."""
loader = _EagerSavedModelLoader(export_dir)
return loader.load(tags=tags)
|
liuqiaoping7/tensorflow | tensorflow/python/keras/callbacks.py | <filename>tensorflow/python/keras/callbacks.py
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-import-not-at-top
"""Callbacks: utilities called at certain points during model training.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import csv
import io
import json
import os
import tempfile
import time
import numpy as np
import six
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.distribute import distribute_coordinator_context as dc_context
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.utils.data_utils import Sequence
from tensorflow.python.keras.utils.generic_utils import Progbar
from tensorflow.python.keras.utils.mode_keys import ModeKeys
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import summary_ops_v2
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util.tf_export import keras_export
try:
import requests
except ImportError:
requests = None
def configure_callbacks(callbacks,
model,
do_validation=False,
batch_size=None,
epochs=None,
steps_per_epoch=None,
samples=None,
verbose=1,
count_mode='steps',
mode=ModeKeys.TRAIN):
"""Configures callbacks for use in various training loops.
Arguments:
callbacks: List of Callbacks.
model: Model being trained.
do_validation: Whether or not validation loop will be run.
batch_size: Number of samples per batch.
epochs: Number of epoch to train.
steps_per_epoch: Number of batches to run per training epoch.
samples: Number of training samples.
verbose: int, 0 or 1. Keras logging verbosity to pass to ProgbarLogger.
count_mode: One of 'steps' or 'samples'. Per-batch or per-sample count.
mode: String. One of ModeKeys.TRAIN, ModeKeys.TEST, or ModeKeys.PREDICT.
Which loop mode to configure callbacks for.
Returns:
Instance of CallbackList used to control all Callbacks.
"""
# Check if callbacks have already been configured.
if isinstance(callbacks, CallbackList):
return callbacks
if not callbacks:
callbacks = []
# Add additional callbacks during training.
if mode == ModeKeys.TRAIN:
model.history = History()
callbacks = [BaseLogger()] + (callbacks or []) + [model.history]
if verbose:
callbacks.append(ProgbarLogger(count_mode))
callback_list = CallbackList(callbacks)
# Set callback model
callback_model = model._get_callback_model() # pylint: disable=protected-access
callback_list.set_model(callback_model)
set_callback_parameters(
callback_list,
model,
do_validation=do_validation,
batch_size=batch_size,
epochs=epochs,
steps_per_epoch=steps_per_epoch,
samples=samples,
verbose=verbose,
mode=mode)
callback_list.model.stop_training = False
return callback_list
def set_callback_parameters(callback_list,
model,
do_validation=False,
batch_size=None,
epochs=None,
steps_per_epoch=None,
samples=None,
verbose=1,
mode=ModeKeys.TRAIN):
"""Sets callback parameters.
Arguments:
callback_list: CallbackList instance.
model: Model being trained.
do_validation: Whether or not validation loop will be run.
batch_size: Number of samples per batch.
epochs: Number of epoch to train.
steps_per_epoch: Number of batches to run per training epoch.
samples: Number of training samples.
verbose: int, 0 or 1. Keras logging verbosity to pass to ProgbarLogger.
mode: String. One of ModeKeys.TRAIN, ModeKeys.TEST, or ModeKeys.PREDICT.
Which loop mode to configure callbacks for.
"""
for cbk in callback_list:
if isinstance(cbk, (BaseLogger, ProgbarLogger)):
cbk.stateful_metrics = model.metrics_names[1:] # Exclude `loss`
# Set callback parameters
callback_metrics = []
# When we have deferred build scenario with iterator input, we will compile
# when we standardize first batch of data.
if mode != ModeKeys.PREDICT and hasattr(model, 'metrics_names'):
callback_metrics = copy.copy(model.metrics_names)
if do_validation:
callback_metrics += ['val_' + n for n in model.metrics_names]
callback_params = {
'batch_size': batch_size,
'epochs': epochs,
'steps': steps_per_epoch,
'samples': samples,
'verbose': verbose,
'do_validation': do_validation,
'metrics': callback_metrics,
}
callback_list.set_params(callback_params)
def _is_generator_like(data):
"""Checks if data is a generator, Sequence, or Iterator."""
return (hasattr(data, 'next') or hasattr(data, '__next__') or isinstance(
data, (Sequence, iterator_ops.Iterator, iterator_ops.IteratorV2)))
def make_logs(model, logs, outputs, mode, prefix=''):
"""Computes logs for sending to `on_batch_end` methods."""
if mode in {ModeKeys.TRAIN, ModeKeys.TEST}:
if hasattr(model, 'metrics_names'):
for label, output in zip(model.metrics_names, outputs):
logs[prefix + label] = output
else:
logs['outputs'] = outputs
return logs
class CallbackList(object):
"""Container abstracting a list of callbacks.
Arguments:
callbacks: List of `Callback` instances.
queue_length: Queue length for keeping
running statistics over callback execution time.
"""
def __init__(self, callbacks=None, queue_length=10):
callbacks = callbacks or []
self.callbacks = [c for c in callbacks]
self.queue_length = queue_length
self.params = {}
self.model = None
self._reset_batch_timing()
def _reset_batch_timing(self):
self._delta_t_batch = 0.
self._delta_ts = collections.defaultdict(
lambda: collections.deque([], maxlen=self.queue_length))
def append(self, callback):
self.callbacks.append(callback)
def set_params(self, params):
self.params = params
for callback in self.callbacks:
callback.set_params(params)
def set_model(self, model):
self.model = model
for callback in self.callbacks:
callback.set_model(model)
def _call_batch_hook(self, mode, hook, batch, logs=None):
"""Helper function for all batch_{begin | end} methods."""
if not self.callbacks:
return
hook_name = 'on_{mode}_batch_{hook}'.format(mode=mode, hook=hook)
if hook == 'begin':
self._t_enter_batch = time.time()
if hook == 'end':
# Batch is ending, calculate batch time.
self._delta_t_batch = time.time() - self._t_enter_batch
logs = logs or {}
t_before_callbacks = time.time()
for callback in self.callbacks:
batch_hook = getattr(callback, hook_name)
batch_hook(batch, logs)
self._delta_ts[hook_name].append(time.time() - t_before_callbacks)
delta_t_median = np.median(self._delta_ts[hook_name])
if (self._delta_t_batch > 0. and
delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1):
logging.warning(
'Method (%s) is slow compared '
'to the batch update (%f). Check your callbacks.', hook_name,
delta_t_median)
def _call_begin_hook(self, mode):
"""Helper function for on_{train|test|predict}_begin methods."""
if mode == ModeKeys.TRAIN:
self.on_train_begin()
elif mode == ModeKeys.TEST:
self.on_test_begin()
else:
self.on_predict_begin()
def _call_end_hook(self, mode):
"""Helper function for on_{train|test|predict}_end methods."""
if mode == ModeKeys.TRAIN:
self.on_train_end()
elif mode == ModeKeys.TEST:
self.on_test_end()
else:
self.on_predict_end()
def on_batch_begin(self, batch, logs=None):
self._call_batch_hook(ModeKeys.TRAIN, 'begin', batch, logs=logs)
def on_batch_end(self, batch, logs=None):
self._call_batch_hook(ModeKeys.TRAIN, 'end', batch, logs=logs)
def on_epoch_begin(self, epoch, logs=None):
"""Calls the `on_epoch_begin` methods of its callbacks.
This function should only be called during TRAIN mode.
Arguments:
epoch: integer, index of epoch.
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_epoch_begin(epoch, logs)
self._reset_batch_timing()
def on_epoch_end(self, epoch, logs=None):
"""Calls the `on_epoch_end` methods of its callbacks.
This function should only be called during TRAIN mode.
Arguments:
epoch: integer, index of epoch.
logs: dict, metric results for this training epoch, and for the
validation epoch if validation is performed. Validation result keys
are prefixed with `val_`.
"""
logs = logs or {}
for callback in self.callbacks:
callback.on_epoch_end(epoch, logs)
def on_train_batch_begin(self, batch, logs=None):
"""Calls the `on_train_batch_begin` methods of its callbacks.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Has keys `batch` and `size` representing the current batch
number and the size of the batch.
"""
self._call_batch_hook(ModeKeys.TRAIN, 'begin', batch, logs=logs)
def on_train_batch_end(self, batch, logs=None):
"""Calls the `on_train_batch_end` methods of its callbacks.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Metric results for this batch.
"""
self._call_batch_hook(ModeKeys.TRAIN, 'end', batch, logs=logs)
def on_test_batch_begin(self, batch, logs=None):
"""Calls the `on_test_batch_begin` methods of its callbacks.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Has keys `batch` and `size` representing the current batch
number and the size of the batch.
"""
self._call_batch_hook(ModeKeys.TEST, 'begin', batch, logs=logs)
def on_test_batch_end(self, batch, logs=None):
"""Calls the `on_test_batch_end` methods of its callbacks.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Metric results for this batch.
"""
self._call_batch_hook(ModeKeys.TEST, 'end', batch, logs=logs)
def on_predict_batch_begin(self, batch, logs=None):
"""Calls the `on_predict_batch_begin` methods of its callbacks.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Has keys `batch` and `size` representing the current batch
number and the size of the batch.
"""
self._call_batch_hook(ModeKeys.PREDICT, 'begin', batch, logs=logs)
def on_predict_batch_end(self, batch, logs=None):
"""Calls the `on_predict_batch_end` methods of its callbacks.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Metric results for this batch.
"""
self._call_batch_hook(ModeKeys.PREDICT, 'end', batch, logs=logs)
def on_train_begin(self, logs=None):
"""Calls the `on_train_begin` methods of its callbacks.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
for callback in self.callbacks:
callback.on_train_begin(logs)
def on_train_end(self, logs=None):
"""Calls the `on_train_end` methods of its callbacks.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
for callback in self.callbacks:
callback.on_train_end(logs)
def on_test_begin(self, logs=None):
"""Calls the `on_test_begin` methods of its callbacks.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
for callback in self.callbacks:
callback.on_test_begin(logs)
def on_test_end(self, logs=None):
"""Calls the `on_test_end` methods of its callbacks.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
for callback in self.callbacks:
callback.on_test_end(logs)
def on_predict_begin(self, logs=None):
"""Calls the 'on_predict_begin` methods of its callbacks.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
for callback in self.callbacks:
callback.on_predict_begin(logs)
def on_predict_end(self, logs=None):
"""Calls the `on_predict_end` methods of its callbacks.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
for callback in self.callbacks:
callback.on_predict_end(logs)
def __iter__(self):
return iter(self.callbacks)
@keras_export('keras.callbacks.Callback')
class Callback(object):
"""Abstract base class used to build new callbacks.
Attributes:
params: dict. Training parameters
(eg. verbosity, batch size, number of epochs...).
model: instance of `keras.models.Model`.
Reference of the model being trained.
validation_data: Deprecated. Do not use.
The `logs` dictionary that callback methods
take as argument will contain keys for quantities relevant to
the current batch or epoch.
Currently, the `.fit()` method of the `Model` class
will include the following quantities in the `logs` that
it passes to its callbacks:
on_epoch_end: logs include `acc` and `loss`, and
optionally include `val_loss`
(if validation is enabled in `fit`), and `val_acc`
(if validation and accuracy monitoring are enabled).
on_batch_begin: logs include `size`,
the number of samples in the current batch.
on_batch_end: logs include `loss`, and optionally `acc`
(if accuracy monitoring is enabled).
"""
def __init__(self):
self.validation_data = None
self.model = None
# Whether this Callback should only run on the chief worker in a
# Multi-Worker setting.
# TODO(omalleyt): Make this attr public once solution is stable.
self._chief_worker_only = None
def set_params(self, params):
self.params = params
def set_model(self, model):
self.model = model
def on_batch_begin(self, batch, logs=None):
"""A backwards compatibility alias for `on_train_batch_begin`."""
def on_batch_end(self, batch, logs=None):
"""A backwards compatibility alias for `on_train_batch_end`."""
def on_epoch_begin(self, epoch, logs=None):
"""Called at the start of an epoch.
Subclasses should override for any actions to run. This function should only
be called during TRAIN mode.
Arguments:
epoch: integer, index of epoch.
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
def on_epoch_end(self, epoch, logs=None):
"""Called at the end of an epoch.
Subclasses should override for any actions to run. This function should only
be called during TRAIN mode.
Arguments:
epoch: integer, index of epoch.
logs: dict, metric results for this training epoch, and for the
validation epoch if validation is performed. Validation result keys
are prefixed with `val_`.
"""
def on_train_batch_begin(self, batch, logs=None):
"""Called at the beginning of a training batch in `fit` methods.
Subclasses should override for any actions to run.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Has keys `batch` and `size` representing the current batch
number and the size of the batch.
"""
# For backwards compatibility.
self.on_batch_begin(batch, logs=logs)
def on_train_batch_end(self, batch, logs=None):
"""Called at the end of a training batch in `fit` methods.
Subclasses should override for any actions to run.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Metric results for this batch.
"""
# For backwards compatibility.
self.on_batch_end(batch, logs=logs)
def on_test_batch_begin(self, batch, logs=None):
"""Called at the beginning of a batch in `evaluate` methods.
Also called at the beginning of a validation batch in the `fit`
methods, if validation data is provided.
Subclasses should override for any actions to run.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Has keys `batch` and `size` representing the current batch
number and the size of the batch.
"""
def on_test_batch_end(self, batch, logs=None):
"""Called at the end of a batch in `evaluate` methods.
Also called at the end of a validation batch in the `fit`
methods, if validation data is provided.
Subclasses should override for any actions to run.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Metric results for this batch.
"""
def on_predict_batch_begin(self, batch, logs=None):
"""Called at the beginning of a batch in `predict` methods.
Subclasses should override for any actions to run.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Has keys `batch` and `size` representing the current batch
number and the size of the batch.
"""
def on_predict_batch_end(self, batch, logs=None):
"""Called at the end of a batch in `predict` methods.
Subclasses should override for any actions to run.
Arguments:
batch: integer, index of batch within the current epoch.
logs: dict. Metric results for this batch.
"""
def on_train_begin(self, logs=None):
"""Called at the beginning of training.
Subclasses should override for any actions to run.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
def on_train_end(self, logs=None):
"""Called at the end of training.
Subclasses should override for any actions to run.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
def on_test_begin(self, logs=None):
"""Called at the beginning of evaluation or validation.
Subclasses should override for any actions to run.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
def on_test_end(self, logs=None):
"""Called at the end of evaluation or validation.
Subclasses should override for any actions to run.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
def on_predict_begin(self, logs=None):
"""Called at the beginning of prediction.
Subclasses should override for any actions to run.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
def on_predict_end(self, logs=None):
"""Called at the end of prediction.
Subclasses should override for any actions to run.
Arguments:
logs: dict. Currently no data is passed to this argument for this method
but that may change in the future.
"""
@keras_export('keras.callbacks.BaseLogger')
class BaseLogger(Callback):
"""Callback that accumulates epoch averages of metrics.
This callback is automatically applied to every Keras model.
Arguments:
stateful_metrics: Iterable of string names of metrics that
should *not* be averaged over an epoch.
Metrics in this list will be logged as-is in `on_epoch_end`.
All others will be averaged in `on_epoch_end`.
"""
def __init__(self, stateful_metrics=None):
super(BaseLogger, self).__init__()
self.stateful_metrics = set(stateful_metrics or [])
def on_epoch_begin(self, epoch, logs=None):
self.seen = 0
self.totals = {}
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
# In case of distribution strategy we can potentially run multiple steps
# at the same time, we should account for that in the `seen` calculation.
num_steps = logs.get('num_steps', 1)
self.seen += batch_size * num_steps
for k, v in logs.items():
if k in self.stateful_metrics:
self.totals[k] = v
else:
if k in self.totals:
self.totals[k] += v * batch_size
else:
self.totals[k] = v * batch_size
def on_epoch_end(self, epoch, logs=None):
if logs is not None:
for k in self.params['metrics']:
if k in self.totals:
# Make value available to next callbacks.
if k in self.stateful_metrics:
logs[k] = self.totals[k]
else:
logs[k] = self.totals[k] / self.seen
@keras_export('keras.callbacks.TerminateOnNaN')
class TerminateOnNaN(Callback):
"""Callback that terminates training when a NaN loss is encountered.
"""
def on_batch_end(self, batch, logs=None):
logs = logs or {}
loss = logs.get('loss')
if loss is not None:
if np.isnan(loss) or np.isinf(loss):
print('Batch %d: Invalid loss, terminating training' % (batch))
self.model.stop_training = True
@keras_export('keras.callbacks.ProgbarLogger')
class ProgbarLogger(Callback):
"""Callback that prints metrics to stdout.
Arguments:
count_mode: One of "steps" or "samples".
Whether the progress bar should
count samples seen or steps (batches) seen.
stateful_metrics: Iterable of string names of metrics that
should *not* be averaged over an epoch.
Metrics in this list will be logged as-is.
All others will be averaged over time (e.g. loss, etc).
Raises:
ValueError: In case of invalid `count_mode`.
"""
def __init__(self, count_mode='samples', stateful_metrics=None):
super(ProgbarLogger, self).__init__()
if count_mode == 'samples':
self.use_steps = False
elif count_mode == 'steps':
self.use_steps = True
else:
raise ValueError('Unknown `count_mode`: ' + str(count_mode))
self.stateful_metrics = set(stateful_metrics or [])
def on_train_begin(self, logs=None):
self.verbose = self.params['verbose']
self.epochs = self.params['epochs']
def on_epoch_begin(self, epoch, logs=None):
self.seen = 0
if self.use_steps:
self.target = self.params['steps']
else:
self.target = self.params['samples']
if self.verbose:
if self.epochs > 1:
print('Epoch %d/%d' % (epoch + 1, self.epochs))
self.progbar = Progbar(
target=self.target,
verbose=self.verbose,
stateful_metrics=self.stateful_metrics,
unit_name='step' if self.use_steps else 'sample')
def on_batch_begin(self, batch, logs=None):
self.log_values = []
def on_batch_end(self, batch, logs=None):
logs = logs or {}
batch_size = logs.get('size', 0)
# In case of distribution strategy we can potentially run multiple steps
# at the same time, we should account for that in the `seen` calculation.
num_steps = logs.get('num_steps', 1)
if self.use_steps:
self.seen += num_steps
else:
self.seen += batch_size * num_steps
for k in self.params['metrics']:
if k in logs:
self.log_values.append((k, logs[k]))
# Skip progbar update for the last batch;
# will be handled by on_epoch_end.
if self.verbose and (self.target is None or self.seen < self.target):
self.progbar.update(self.seen, self.log_values)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
for k in self.params['metrics']:
if k in logs:
self.log_values.append((k, logs[k]))
if self.verbose:
self.progbar.update(self.seen, self.log_values)
@keras_export('keras.callbacks.History')
class History(Callback):
"""Callback that records events into a `History` object.
This callback is automatically applied to
every Keras model. The `History` object
gets returned by the `fit` method of models.
"""
def on_train_begin(self, logs=None):
self.epoch = []
self.history = {}
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
self.epoch.append(epoch)
for k, v in logs.items():
self.history.setdefault(k, []).append(v)
@keras_export('keras.callbacks.ModelCheckpoint')
class ModelCheckpoint(Callback):
"""Save the model after every epoch.
`filepath` can contain named formatting options,
which will be filled the value of `epoch` and
keys in `logs` (passed in `on_epoch_end`).
For example: if `filepath` is `weights.{epoch:02d}-{val_loss:.2f}.hdf5`,
then the model checkpoints will be saved with the epoch number and
the validation loss in the filename.
Arguments:
filepath: string, path to save the model file.
monitor: quantity to monitor.
verbose: verbosity mode, 0 or 1.
save_best_only: if `save_best_only=True`, the latest best model according
to the quantity monitored will not be overwritten.
mode: one of {auto, min, max}. If `save_best_only=True`, the decision to
overwrite the current save file is made based on either the maximization
or the minimization of the monitored quantity. For `val_acc`, this
should be `max`, for `val_loss` this should be `min`, etc. In `auto`
mode, the direction is automatically inferred from the name of the
monitored quantity.
save_weights_only: if True, then only the model's weights will be saved
(`model.save_weights(filepath)`), else the full model is saved
(`model.save(filepath)`).
save_freq: `'epoch'` or integer. When using `'epoch'`, the callback saves
the model after each epoch. When using integer, the callback saves the
model at end of a batch at which this many samples have been seen since
last saving. Note that if the saving isn't aligned to epochs, the
monitored metric may potentially be less reliable (it could reflect as
little as 1 batch, since the metrics get reset every epoch). Defaults to
`'epoch'`
load_weights_on_restart: Whether the training should restore the model. If
True, the model will attempt to load the checkpoint file from `filepath`
at the start of `model.fit()`. This saves the need of manually calling
`model.load_weights()` before `model.fit(). In multi-worker distributed
training, this provides fault-tolerance and loads the model
automatically upon recovery of workers. The callback gives up loading if
the filepath does not exist, and raises ValueError if format does not
match. Defaults to False.
**kwargs: Additional arguments for backwards compatibility. Possible key
is `period`.
"""
def __init__(self,
filepath,
monitor='val_loss',
verbose=0,
save_best_only=False,
save_weights_only=False,
mode='auto',
save_freq='epoch',
load_weights_on_restart=False,
**kwargs):
super(ModelCheckpoint, self).__init__()
self.monitor = monitor
self.verbose = verbose
self.filepath = filepath
self.save_best_only = save_best_only
self.save_weights_only = save_weights_only
self.save_freq = save_freq
self.load_weights_on_restart = load_weights_on_restart
self.epochs_since_last_save = 0
self._samples_seen_since_last_saving = 0
# Deprecated field `period` is for the number of epochs between which
# the model is saved.
if 'period' in kwargs:
self.period = kwargs['period']
logging.warning('`period` argument is deprecated. Please use `save_freq` '
'to specify the frequency in number of samples seen.')
else:
self.period = 1
if mode not in ['auto', 'min', 'max']:
logging.warning('ModelCheckpoint mode %s is unknown, '
'fallback to auto mode.', mode)
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
self.best = np.Inf
elif mode == 'max':
self.monitor_op = np.greater
self.best = -np.Inf
else:
if 'acc' in self.monitor or self.monitor.startswith('fmeasure'):
self.monitor_op = np.greater
self.best = -np.Inf
else:
self.monitor_op = np.less
self.best = np.Inf
if self.save_freq != 'epoch' and not isinstance(self.save_freq, int):
raise ValueError('Unrecognized save_freq: {}'.format(self.save_freq))
# Only the chief worker writes model checkpoints, but all workers
# restore checkpoint at on_train_begin().
self._chief_worker_only = False
def set_model(self, model):
self.model = model
# Use name matching rather than `isinstance` to avoid circular dependencies.
if (not self.save_weights_only and
not model._is_graph_network and # pylint: disable=protected-access
model.__class__.__name__ != 'Sequential'):
self.save_weights_only = True
def on_train_begin(self, logs=None):
# TODO(rchao): Replace dc_context reference with
# distributed_training_utils.should_current_worker_load_model() once
# distributed_training_utils.py no longer depends on callbacks.py.
if K.in_multi_worker_mode(
) and not dc_context.get_current_worker_context().experimental_should_init:
# For multi-worker training, it should not restore a model in certain
# worker setting (e.g. non-chief worker in ParameterServerStrategy).
return
if (self.load_weights_on_restart and self.filepath is not None and
os.path.exists(self.filepath)):
try:
self.model.load_weights(self.filepath)
except (IOError, ValueError) as e:
raise ValueError('Error loading file from {}. Reason: {}'.format(
self.filepath, e))
def on_batch_end(self, batch, logs=None):
logs = logs or {}
if isinstance(self.save_freq, int):
self._samples_seen_since_last_saving += logs.get('size', 1)
if self._samples_seen_since_last_saving >= self.save_freq:
self._save_model(epoch=self._current_epoch, logs=logs)
self._samples_seen_since_last_saving = 0
def on_epoch_begin(self, epoch, logs=None):
self._current_epoch = epoch
def on_epoch_end(self, epoch, logs=None):
self.epochs_since_last_save += 1
if self.save_freq == 'epoch':
self._save_model(epoch=epoch, logs=logs)
def _save_model(self, epoch, logs):
"""Saves the model.
Arguments:
epoch: the epoch this iteration is in.
logs: the `logs` dict passed in to `on_batch_end` or `on_epoch_end`.
"""
logs = logs or {}
if isinstance(self.save_freq,
int) or self.epochs_since_last_save >= self.period:
self.epochs_since_last_save = 0
# TODO(rchao): Replace dc_context reference with
# distributed_training_utils.should_current_worker_checkpoint() once
# distributed_training_utils.py no longer depends on callbacks.py.
if not K.in_multi_worker_mode() or dc_context.get_current_worker_context(
).should_checkpoint:
filepath = self.filepath.format(epoch=epoch + 1, **logs)
else:
# If this is multi-worker training, and this worker should not
# save checkpoint, we replace the filepath with a dummy filepath so
# it writes to a file that will be removed at the end of _save_model()
# call. This is because the SyncOnReadVariable needs to be synced across
# all the workers in order to be read, and all workers need to initiate
# that.
file_handle, temp_file_name = tempfile.mkstemp()
extension = os.path.splitext(self.filepath)[1]
filepath = temp_file_name + extension
if self.save_best_only:
current = logs.get(self.monitor)
if current is None:
logging.warning('Can save best model only with %s available, '
'skipping.', self.monitor)
else:
if self.monitor_op(current, self.best):
if self.verbose > 0:
print('\nEpoch %05d: %s improved from %0.5f to %0.5f,'
' saving model to %s' % (epoch + 1, self.monitor, self.best,
current, filepath))
self.best = current
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
else:
if self.verbose > 0:
print('\nEpoch %05d: %s did not improve from %0.5f' %
(epoch + 1, self.monitor, self.best))
else:
if self.verbose > 0:
print('\nEpoch %05d: saving model to %s' % (epoch + 1, filepath))
if self.save_weights_only:
self.model.save_weights(filepath, overwrite=True)
else:
self.model.save(filepath, overwrite=True)
# Remove the file in multi-worker training where this worker should
# not checkpoint.
if K.in_multi_worker_mode(
) and not dc_context.get_current_worker_context().should_checkpoint:
os.close(file_handle)
os.remove(filepath)
@keras_export('keras.callbacks.EarlyStopping')
class EarlyStopping(Callback):
"""Stop training when a monitored quantity has stopped improving.
Arguments:
monitor: Quantity to be monitored.
min_delta: Minimum change in the monitored quantity
to qualify as an improvement, i.e. an absolute
change of less than min_delta, will count as no
improvement.
patience: Number of epochs with no improvement
after which training will be stopped.
verbose: verbosity mode.
mode: One of `{"auto", "min", "max"}`. In `min` mode,
training will stop when the quantity
monitored has stopped decreasing; in `max`
mode it will stop when the quantity
monitored has stopped increasing; in `auto`
mode, the direction is automatically inferred
from the name of the monitored quantity.
baseline: Baseline value for the monitored quantity.
Training will stop if the model doesn't show improvement over the
baseline.
restore_best_weights: Whether to restore model weights from
the epoch with the best value of the monitored quantity.
If False, the model weights obtained at the last step of
training are used.
Example:
```python
callback = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=3)
# This callback will stop the training when there is no improvement in
# the validation loss for three consecutive epochs.
model.fit(data, labels, epochs=100, callbacks=[callback],
validation_data=(val_data, val_labels))
```
"""
def __init__(self,
monitor='val_loss',
min_delta=0,
patience=0,
verbose=0,
mode='auto',
baseline=None,
restore_best_weights=False):
super(EarlyStopping, self).__init__()
self.monitor = monitor
self.patience = patience
self.verbose = verbose
self.baseline = baseline
self.min_delta = abs(min_delta)
self.wait = 0
self.stopped_epoch = 0
self.restore_best_weights = restore_best_weights
self.best_weights = None
if mode not in ['auto', 'min', 'max']:
logging.warning('EarlyStopping mode %s is unknown, '
'fallback to auto mode.', mode)
mode = 'auto'
if mode == 'min':
self.monitor_op = np.less
elif mode == 'max':
self.monitor_op = np.greater
else:
if 'acc' in self.monitor:
self.monitor_op = np.greater
else:
self.monitor_op = np.less
if self.monitor_op == np.greater:
self.min_delta *= 1
else:
self.min_delta *= -1
def on_train_begin(self, logs=None):
# Allow instances to be re-used
self.wait = 0
self.stopped_epoch = 0
if self.baseline is not None:
self.best = self.baseline
else:
self.best = np.Inf if self.monitor_op == np.less else -np.Inf
def on_epoch_end(self, epoch, logs=None):
current = self.get_monitor_value(logs)
if current is None:
return
if self.monitor_op(current - self.min_delta, self.best):
self.best = current
self.wait = 0
if self.restore_best_weights:
self.best_weights = self.model.get_weights()
else:
self.wait += 1
if self.wait >= self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
if self.restore_best_weights:
if self.verbose > 0:
print('Restoring model weights from the end of the best epoch.')
self.model.set_weights(self.best_weights)
def on_train_end(self, logs=None):
if self.stopped_epoch > 0 and self.verbose > 0:
print('Epoch %05d: early stopping' % (self.stopped_epoch + 1))
def get_monitor_value(self, logs):
logs = logs or {}
monitor_value = logs.get(self.monitor)
if monitor_value is None:
logging.warning('Early stopping conditioned on metric `%s` '
'which is not available. Available metrics are: %s',
self.monitor, ','.join(list(logs.keys())))
return monitor_value
@keras_export('keras.callbacks.RemoteMonitor')
class RemoteMonitor(Callback):
"""Callback used to stream events to a server.
Requires the `requests` library.
Events are sent to `root + '/publish/epoch/end/'` by default. Calls are
HTTP POST, with a `data` argument which is a
JSON-encoded dictionary of event data.
If send_as_json is set to True, the content type of the request will be
application/json. Otherwise the serialized JSON will be sent within a form.
Arguments:
root: String; root url of the target server.
path: String; path relative to `root` to which the events will be sent.
field: String; JSON field under which the data will be stored.
The field is used only if the payload is sent within a form
(i.e. send_as_json is set to False).
headers: Dictionary; optional custom HTTP headers.
send_as_json: Boolean; whether the request should be
sent as application/json.
"""
def __init__(self,
root='http://localhost:9000',
path='/publish/epoch/end/',
field='data',
headers=None,
send_as_json=False):
super(RemoteMonitor, self).__init__()
self.root = root
self.path = path
self.field = field
self.headers = headers
self.send_as_json = send_as_json
def on_epoch_end(self, epoch, logs=None):
if requests is None:
raise ImportError('RemoteMonitor requires the `requests` library.')
logs = logs or {}
send = {}
send['epoch'] = epoch
for k, v in logs.items():
send[k] = v
try:
if self.send_as_json:
requests.post(self.root + self.path, json=send, headers=self.headers)
else:
requests.post(
self.root + self.path, {self.field: json.dumps(send)},
headers=self.headers)
except requests.exceptions.RequestException:
logging.warning('Warning: could not reach RemoteMonitor '
'root server at ' + str(self.root))
@keras_export('keras.callbacks.LearningRateScheduler')
class LearningRateScheduler(Callback):
"""Learning rate scheduler.
Arguments:
schedule: a function that takes an epoch index as input
(integer, indexed from 0) and returns a new
learning rate as output (float).
verbose: int. 0: quiet, 1: update messages.
"""
def __init__(self, schedule, verbose=0):
super(LearningRateScheduler, self).__init__()
self.schedule = schedule
self.verbose = verbose
def on_epoch_begin(self, epoch, logs=None):
if not hasattr(self.model.optimizer, 'lr'):
raise ValueError('Optimizer must have a "lr" attribute.')
try: # new API
lr = float(K.get_value(self.model.optimizer.lr))
lr = self.schedule(epoch, lr)
except TypeError: # Support for old API for backward compatibility
lr = self.schedule(epoch)
if not isinstance(lr, (float, np.float32, np.float64)):
raise ValueError('The output of the "schedule" function '
'should be float.')
K.set_value(self.model.optimizer.lr, lr)
if self.verbose > 0:
print('\nEpoch %05d: LearningRateScheduler reducing learning '
'rate to %s.' % (epoch + 1, lr))
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs['lr'] = K.get_value(self.model.optimizer.lr)
@keras_export('keras.callbacks.TensorBoard', v1=[])
class TensorBoard(Callback):
# pylint: disable=line-too-long
"""Enable visualizations for TensorBoard.
TensorBoard is a visualization tool provided with TensorFlow.
This callback logs events for TensorBoard, including:
* Metrics summary plots
* Training graph visualization
* Activation histograms
* Sampled profiling
If you have installed TensorFlow with pip, you should be able
to launch TensorBoard from the command line:
```sh
tensorboard --logdir=path_to_your_logs
```
You can find more information about TensorBoard
[here](https://www.tensorflow.org/get_started/summaries_and_tensorboard).
Arguments:
log_dir: the path of the directory where to save the log files to be
parsed by TensorBoard.
histogram_freq: frequency (in epochs) at which to compute activation and
weight histograms for the layers of the model. If set to 0, histograms
won't be computed. Validation data (or split) must be specified for
histogram visualizations.
write_graph: whether to visualize the graph in TensorBoard. The log file
can become quite large when write_graph is set to True.
write_images: whether to write model weights to visualize as image in
TensorBoard.
update_freq: `'batch'` or `'epoch'` or integer. When using `'batch'`,
writes the losses and metrics to TensorBoard after each batch. The same
applies for `'epoch'`. If using an integer, let's say `1000`, the
callback will write the metrics and losses to TensorBoard every 1000
samples. Note that writing too frequently to TensorBoard can slow down
your training.
profile_batch: Profile the batch to sample compute characteristics. By
default, it will profile the second batch. Set profile_batch=0 to
disable profiling. Must run in TensorFlow eager mode.
Raises:
ValueError: If histogram_freq is set and no validation data is provided.
"""
# pylint: enable=line-too-long
def __init__(self,
log_dir='logs',
histogram_freq=0,
write_graph=True,
write_images=False,
update_freq='epoch',
profile_batch=2,
**kwargs):
super(TensorBoard, self).__init__()
self._validate_kwargs(kwargs)
self.log_dir = log_dir
self.histogram_freq = histogram_freq
self.write_graph = write_graph
self.write_images = write_images
if update_freq == 'batch':
self.update_freq = 1
else:
self.update_freq = update_freq
self._samples_seen = 0
self._samples_seen_at_last_write = 0
self._current_batch = 0
self._total_batches_seen = 0
self._total_val_batches_seen = 0
# A collection of file writers currently in use, to be closed when
# training ends for this callback. Writers are keyed by the
# directory name under the root logdir: e.g., "train" or
# "validation".
self._writers = {}
self._train_run_name = 'train'
self._validation_run_name = 'validation'
self._profile_batch = profile_batch
# True when a trace is running.
self._is_tracing = False
# TensorBoard should only write summaries on the chief when in a
# Multi-Worker setting.
self._chief_worker_only = True
def _validate_kwargs(self, kwargs):
"""Handle arguments were supported in V1."""
if kwargs.get('write_grads', False):
logging.warning('`write_grads` will be ignored in TensorFlow 2.0 '
'for the `TensorBoard` Callback.')
if kwargs.get('embeddings_freq', False):
logging.warning('Embeddings will be ignored in TensorFlow 2.0 '
'for the `TensorBoard` Callback.')
if kwargs.get('batch_size', False):
logging.warning('`batch_size` is no longer needed in the '
'`TensorBoard` Callback and will be ignored '
'in TensorFlow 2.0.')
unrecognized_kwargs = set(kwargs.keys()) - {
'write_grads', 'embeddings_freq', 'embeddings_layer_names',
'embeddings_metadata', 'embeddings_data', 'batch_size'
}
# Only allow kwargs that were supported in V1.
if unrecognized_kwargs:
raise ValueError('Unrecognized arguments in `TensorBoard` '
'Callback: ' + str(unrecognized_kwargs))
def set_model(self, model):
"""Sets Keras model and writes graph if specified."""
self.model = model
with context.eager_mode():
self._close_writers()
if self.write_graph:
with self._get_writer(self._train_run_name).as_default():
with summary_ops_v2.always_record_summaries():
if not model.run_eagerly:
summary_ops_v2.graph(K.get_graph(), step=0)
summary_writable = (
self.model._is_graph_network or # pylint: disable=protected-access
self.model.__class__.__name__ == 'Sequential') # pylint: disable=protected-access
if summary_writable:
summary_ops_v2.keras_model('keras', self.model, step=0)
def _close_writers(self):
"""Close all remaining open file writers owned by this callback.
If there are no such file writers, this is a no-op.
"""
with context.eager_mode():
for writer in six.itervalues(self._writers):
writer.close()
self._writers.clear()
def _get_writer(self, writer_name):
"""Get a summary writer for the given subdirectory under the logdir.
A writer will be created if it does not yet exist.
Arguments:
writer_name: The name of the directory for which to create or
retrieve a writer. Should be either `self._train_run_name` or
`self._validation_run_name`.
Returns:
A `SummaryWriter` object.
"""
if writer_name not in self._writers:
path = os.path.join(self.log_dir, writer_name)
writer = summary_ops_v2.create_file_writer_v2(path)
self._writers[writer_name] = writer
return self._writers[writer_name]
def on_train_begin(self, logs=None):
if self._profile_batch == 1:
summary_ops_v2.trace_on(graph=True, profiler=True)
self._is_tracing = True
def on_batch_end(self, batch, logs=None):
"""Writes scalar summaries for metrics on every training batch.
Performs profiling if current batch is in profiler_batches.
Arguments:
batch: Integer, index of batch within the current epoch.
logs: Dict. Metric results for this batch.
"""
# Don't output batch_size and batch number as TensorBoard summaries
logs = logs or {}
self._samples_seen += logs.get('size', 1)
samples_seen_since = self._samples_seen - self._samples_seen_at_last_write
if self.update_freq != 'epoch' and samples_seen_since >= self.update_freq:
self._log_metrics(logs, prefix='batch_', step=self._total_batches_seen)
self._samples_seen_at_last_write = self._samples_seen
self._total_batches_seen += 1
if self._is_tracing:
self._log_trace()
elif (not self._is_tracing and
self._total_batches_seen == self._profile_batch - 1):
self._enable_trace()
def on_epoch_end(self, epoch, logs=None):
"""Runs metrics and histogram summaries at epoch end."""
step = epoch if self.update_freq == 'epoch' else self._samples_seen
self._log_metrics(logs, prefix='epoch_', step=step)
if self.histogram_freq and epoch % self.histogram_freq == 0:
self._log_weights(epoch)
def on_train_end(self, logs=None):
if self._is_tracing:
self._log_trace()
self._close_writers()
def _enable_trace(self):
if context.executing_eagerly():
summary_ops_v2.trace_on(graph=True, profiler=True)
self._is_tracing = True
def _log_trace(self):
if context.executing_eagerly():
with self._get_writer(self._train_run_name).as_default(), \
summary_ops_v2.always_record_summaries():
# TODO(b/126388999): Remove step info in the summary name.
summary_ops_v2.trace_export(
name='batch_%d' % self._total_batches_seen,
step=self._total_batches_seen,
profiler_outdir=os.path.join(self.log_dir, 'train'))
self._is_tracing = False
def _log_metrics(self, logs, prefix, step):
"""Writes metrics out as custom scalar summaries.
Arguments:
logs: Dict. Keys are scalar summary names, values are NumPy scalars.
prefix: String. The prefix to apply to the scalar summary names.
step: Int. The global step to use for TensorBoard.
"""
if logs is None:
logs = {}
# Group metrics by the name of their associated file writer. Values
# are lists of metrics, as (name, scalar_value) pairs.
logs_by_writer = {
self._train_run_name: [],
self._validation_run_name: [],
}
validation_prefix = 'val_'
for (name, value) in logs.items():
if name in ('batch', 'size', 'num_steps'):
# Scrub non-metric items.
continue
if name.startswith(validation_prefix):
name = name[len(validation_prefix):]
writer_name = self._validation_run_name
else:
writer_name = self._train_run_name
name = prefix + name # assign batch or epoch prefix
logs_by_writer[writer_name].append((name, value))
with context.eager_mode():
with summary_ops_v2.always_record_summaries():
for writer_name in logs_by_writer:
these_logs = logs_by_writer[writer_name]
if not these_logs:
# Don't create a "validation" events file if we don't
# actually have any validation data.
continue
writer = self._get_writer(writer_name)
with writer.as_default():
for (name, value) in these_logs:
summary_ops_v2.scalar(name, value, step=step)
def _log_weights(self, epoch):
"""Logs the weights of the Model to TensorBoard."""
writer = self._get_writer(self._train_run_name)
with context.eager_mode(), \
writer.as_default(), \
summary_ops_v2.always_record_summaries():
for layer in self.model.layers:
for weight in layer.weights:
weight_name = weight.name.replace(':', '_')
with ops.init_scope():
weight = K.get_value(weight)
summary_ops_v2.histogram(weight_name, weight, step=epoch)
if self.write_images:
self._log_weight_as_image(weight, weight_name, epoch)
writer.flush()
def _log_weight_as_image(self, weight, weight_name, epoch):
"""Logs a weight as a TensorBoard image."""
w_img = array_ops.squeeze(weight)
shape = K.int_shape(w_img)
if len(shape) == 1: # Bias case
w_img = array_ops.reshape(w_img, [1, shape[0], 1, 1])
elif len(shape) == 2: # Dense layer kernel case
if shape[0] > shape[1]:
w_img = array_ops.transpose(w_img)
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [1, shape[0], shape[1], 1])
elif len(shape) == 3: # ConvNet case
if K.image_data_format() == 'channels_last':
# Switch to channels_first to display every kernel as a separate
# image.
w_img = array_ops.transpose(w_img, perm=[2, 0, 1])
shape = K.int_shape(w_img)
w_img = array_ops.reshape(w_img, [shape[0], shape[1], shape[2], 1])
shape = K.int_shape(w_img)
# Not possible to handle 3D convnets etc.
if len(shape) == 4 and shape[-1] in [1, 3, 4]:
summary_ops_v2.image(weight_name, w_img, step=epoch)
@keras_export('keras.callbacks.ReduceLROnPlateau')
class ReduceLROnPlateau(Callback):
"""Reduce learning rate when a metric has stopped improving.
Models often benefit from reducing the learning rate by a factor
of 2-10 once learning stagnates. This callback monitors a
quantity and if no improvement is seen for a 'patience' number
of epochs, the learning rate is reduced.
Example:
```python
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2,
patience=5, min_lr=0.001)
model.fit(X_train, Y_train, callbacks=[reduce_lr])
```
Arguments:
monitor: quantity to be monitored.
factor: factor by which the learning rate will
be reduced. new_lr = lr * factor
patience: number of epochs with no improvement
after which learning rate will be reduced.
verbose: int. 0: quiet, 1: update messages.
mode: one of {auto, min, max}. In `min` mode,
lr will be reduced when the quantity
monitored has stopped decreasing; in `max`
mode it will be reduced when the quantity
monitored has stopped increasing; in `auto`
mode, the direction is automatically inferred
from the name of the monitored quantity.
min_delta: threshold for measuring the new optimum,
to only focus on significant changes.
cooldown: number of epochs to wait before resuming
normal operation after lr has been reduced.
min_lr: lower bound on the learning rate.
"""
def __init__(self,
monitor='val_loss',
factor=0.1,
patience=10,
verbose=0,
mode='auto',
min_delta=1e-4,
cooldown=0,
min_lr=0,
**kwargs):
super(ReduceLROnPlateau, self).__init__()
self.monitor = monitor
if factor >= 1.0:
raise ValueError('ReduceLROnPlateau ' 'does not support a factor >= 1.0.')
if 'epsilon' in kwargs:
min_delta = kwargs.pop('epsilon')
logging.warning('`epsilon` argument is deprecated and '
'will be removed, use `min_delta` instead.')
self.factor = factor
self.min_lr = min_lr
self.min_delta = min_delta
self.patience = patience
self.verbose = verbose
self.cooldown = cooldown
self.cooldown_counter = 0 # Cooldown counter.
self.wait = 0
self.best = 0
self.mode = mode
self.monitor_op = None
self._reset()
def _reset(self):
"""Resets wait counter and cooldown counter.
"""
if self.mode not in ['auto', 'min', 'max']:
logging.warning('Learning Rate Plateau Reducing mode %s is unknown, '
'fallback to auto mode.', self.mode)
self.mode = 'auto'
if (self.mode == 'min' or
(self.mode == 'auto' and 'acc' not in self.monitor)):
self.monitor_op = lambda a, b: np.less(a, b - self.min_delta)
self.best = np.Inf
else:
self.monitor_op = lambda a, b: np.greater(a, b + self.min_delta)
self.best = -np.Inf
self.cooldown_counter = 0
self.wait = 0
def on_train_begin(self, logs=None):
self._reset()
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
logs['lr'] = K.get_value(self.model.optimizer.lr)
current = logs.get(self.monitor)
if current is None:
logging.warning('Reduce LR on plateau conditioned on metric `%s` '
'which is not available. Available metrics are: %s',
self.monitor, ','.join(list(logs.keys())))
else:
if self.in_cooldown():
self.cooldown_counter -= 1
self.wait = 0
if self.monitor_op(current, self.best):
self.best = current
self.wait = 0
elif not self.in_cooldown():
self.wait += 1
if self.wait >= self.patience:
old_lr = float(K.get_value(self.model.optimizer.lr))
if old_lr > self.min_lr:
new_lr = old_lr * self.factor
new_lr = max(new_lr, self.min_lr)
K.set_value(self.model.optimizer.lr, new_lr)
if self.verbose > 0:
print('\nEpoch %05d: ReduceLROnPlateau reducing learning '
'rate to %s.' % (epoch + 1, new_lr))
self.cooldown_counter = self.cooldown
self.wait = 0
def in_cooldown(self):
return self.cooldown_counter > 0
@keras_export('keras.callbacks.CSVLogger')
class CSVLogger(Callback):
"""Callback that streams epoch results to a csv file.
Supports all values that can be represented as a string,
including 1D iterables such as np.ndarray.
Example:
```python
csv_logger = CSVLogger('training.log')
model.fit(X_train, Y_train, callbacks=[csv_logger])
```
Arguments:
filename: filename of the csv file, e.g. 'run/log.csv'.
separator: string used to separate elements in the csv file.
append: True: append if file exists (useful for continuing
training). False: overwrite existing file,
"""
def __init__(self, filename, separator=',', append=False):
self.sep = separator
self.filename = filename
self.append = append
self.writer = None
self.keys = None
self.append_header = True
if six.PY2:
self.file_flags = 'b'
self._open_args = {}
else:
self.file_flags = ''
self._open_args = {'newline': '\n'}
super(CSVLogger, self).__init__()
def on_train_begin(self, logs=None):
if self.append:
if os.path.exists(self.filename):
with open(self.filename, 'r' + self.file_flags) as f:
self.append_header = not bool(len(f.readline()))
mode = 'a'
else:
mode = 'w'
self.csv_file = io.open(self.filename,
mode + self.file_flags,
**self._open_args)
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
def handle_value(k):
is_zero_dim_ndarray = isinstance(k, np.ndarray) and k.ndim == 0
if isinstance(k, six.string_types):
return k
elif isinstance(k, collections.Iterable) and not is_zero_dim_ndarray:
return '"[%s]"' % (', '.join(map(str, k)))
else:
return k
if self.keys is None:
self.keys = sorted(logs.keys())
if self.model.stop_training:
# We set NA so that csv parsers do not fail for this last epoch.
logs = dict([(k, logs[k]) if k in logs else (k, 'NA') for k in self.keys])
if not self.writer:
class CustomDialect(csv.excel):
delimiter = self.sep
fieldnames = ['epoch'] + self.keys
if six.PY2:
fieldnames = [unicode(x) for x in fieldnames]
self.writer = csv.DictWriter(
self.csv_file,
fieldnames=fieldnames,
dialect=CustomDialect)
if self.append_header:
self.writer.writeheader()
row_dict = collections.OrderedDict({'epoch': epoch})
row_dict.update((key, handle_value(logs[key])) for key in self.keys)
self.writer.writerow(row_dict)
self.csv_file.flush()
def on_train_end(self, logs=None):
self.csv_file.close()
self.writer = None
@keras_export('keras.callbacks.LambdaCallback')
class LambdaCallback(Callback):
r"""Callback for creating simple, custom callbacks on-the-fly.
This callback is constructed with anonymous functions that will be called
at the appropriate time. Note that the callbacks expects positional
arguments, as:
- `on_epoch_begin` and `on_epoch_end` expect two positional arguments:
`epoch`, `logs`
- `on_batch_begin` and `on_batch_end` expect two positional arguments:
`batch`, `logs`
- `on_train_begin` and `on_train_end` expect one positional argument:
`logs`
Arguments:
on_epoch_begin: called at the beginning of every epoch.
on_epoch_end: called at the end of every epoch.
on_batch_begin: called at the beginning of every batch.
on_batch_end: called at the end of every batch.
on_train_begin: called at the beginning of model training.
on_train_end: called at the end of model training.
Example:
```python
# Print the batch number at the beginning of every batch.
batch_print_callback = LambdaCallback(
on_batch_begin=lambda batch,logs: print(batch))
# Stream the epoch loss to a file in JSON format. The file content
# is not well-formed JSON but rather has a JSON object per line.
import json
json_log = open('loss_log.json', mode='wt', buffering=1)
json_logging_callback = LambdaCallback(
on_epoch_end=lambda epoch, logs: json_log.write(
json.dumps({'epoch': epoch, 'loss': logs['loss']}) + '\n'),
on_train_end=lambda logs: json_log.close()
)
# Terminate some processes after having finished model training.
processes = ...
cleanup_callback = LambdaCallback(
on_train_end=lambda logs: [
p.terminate() for p in processes if p.is_alive()])
model.fit(...,
callbacks=[batch_print_callback,
json_logging_callback,
cleanup_callback])
```
"""
def __init__(self,
on_epoch_begin=None,
on_epoch_end=None,
on_batch_begin=None,
on_batch_end=None,
on_train_begin=None,
on_train_end=None,
**kwargs):
super(LambdaCallback, self).__init__()
self.__dict__.update(kwargs)
if on_epoch_begin is not None:
self.on_epoch_begin = on_epoch_begin
else:
self.on_epoch_begin = lambda epoch, logs: None
if on_epoch_end is not None:
self.on_epoch_end = on_epoch_end
else:
self.on_epoch_end = lambda epoch, logs: None
if on_batch_begin is not None:
self.on_batch_begin = on_batch_begin
else:
self.on_batch_begin = lambda batch, logs: None
if on_batch_end is not None:
self.on_batch_end = on_batch_end
else:
self.on_batch_end = lambda batch, logs: None
if on_train_begin is not None:
self.on_train_begin = on_train_begin
else:
self.on_train_begin = lambda logs: None
if on_train_end is not None:
self.on_train_end = on_train_end
else:
self.on_train_end = lambda logs: None
|
prashanthellina/livereload_server | setup.py | from setuptools import setup, find_packages
setup(
name="livereload_server",
version='0.1',
description="A livereloading HTTP server for static files",
keywords='http,server,livereload',
author='<NAME>',
author_email="Use the github issues",
url="https://github.com/prashanthellina/livereload_server",
license='MIT License',
install_requires=[
'tornado',
'watchdog',
],
package_dir={'livereload_server': 'livereload_server'},
packages=find_packages('.'),
include_package_data=True,
entry_points = {
'console_scripts': [
'livereload_server = livereload_server:main',
],
},
)
|
prashanthellina/livereload_server | livereload_server/__init__.py | #!/usr/bin/env python
import os
import re
import argparse
import mimetypes
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import tornado.ioloop
import tornado.web
# This JS snippet below is embedded dynamically in every
# HTML response sent back from this server. The JS code
# establishes a connection with the server using Ajax.
# The server terminates connection when *any* file changes
# thus forcing a reload from JS in browser
JS = '''
<script>
function livereload() {
var xmlhttp;
if (window.XMLHttpRequest) {
// code for IE7+, Firefox, Chrome, Opera, Safari
xmlhttp = new XMLHttpRequest();
} else {
// code for IE6, IE5
xmlhttp = new ActiveXObject("Microsoft.XMLHTTP");
}
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState==4 && xmlhttp.status==200) {
location.reload();
}
}
xmlhttp.open("GET", "/_listen", true);
xmlhttp.send();
}
livereload();
</script>
'''
class ListenHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
# add connection to list of known client
# connections and then keep the conn open
self.application.conns[id(self)] = self
def on_connection_close(self):
# if connection is closed by client for whatever
# reason, ensure we clean up the state
if id(self) in self.application.conns:
del self.application.conns[id(self)]
class LocalFSEventHandler(FileSystemEventHandler):
def __init__(self, on_change):
self.on_change = on_change
def on_any_event(self, evt):
self.on_change()
def close_all_conns(app):
'''
Close all the client connections on this server
thereby forcing JS client code to issues reload
to the browser.
'''
conns = app.conns
app.conns = {}
for c in conns.itervalues():
c.finish()
class StaticFileHandler(tornado.web.RequestHandler):
def get(self, path):
path = os.path.join(self.application.args.path, path)
# default file is index.html
if os.path.isdir(path):
path = os.path.join(path, 'index.html')
if not os.path.exists(path): return
# guess mime type from file extension
ctype, _ = mimetypes.guess_type(path)
self.set_header("Content-Type", ctype)
# read file contents and embed JS before
# writing to client
d = open(path).read()
d = re.sub(r'<\s*/\s*body\s*>(?i)', JS + '</body>', d)
self.write(d)
def main():
parser = argparse.ArgumentParser(description='Run a HTTP server serving'
' static files with live reload functionality')
parser.add_argument('path', help='Directory to serve')
parser.add_argument('--address', default='127.0.0.1')
parser.add_argument('--port', default=8000)
args = parser.parse_args()
app = tornado.web.Application([
(r"/_listen", ListenHandler),
(r"/(.*)", StaticFileHandler),
])
app.args = args
app.conns = {}
# initialize the filesystem listener
# to track change events
app.fs_observer = Observer()
app.fs_observer.schedule(LocalFSEventHandler(lambda: close_all_conns(app)),
args.path, recursive=True)
app.fs_observer.start()
# start server
app.listen(args.port, args.address)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
ServiceAndMaintenance/SNM-Email-Crawler | email_crawler.py | <reponame>ServiceAndMaintenance/SNM-Email-Crawler
import urllib,re
fot = open("emails.csv", "a+")
filepath = 'urls.csv'
with open(filepath) as fp:
line = fp.readline()
cnt = 1
while line:
print("Line {}: {}".format(cnt, line.strip()))
line = fp.readline()
site = line
f = urllib.urlopen(line)
s = f.read()
re.findall(r"\+\d{2}\s?0?\d{10}",s)
email = re.findall(r"[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}",s)
email = str(email)
email = email.translate(None, '\'[]')
if email == "":
email = "NA"
print site + " = " + email + "\n"
fot.write("\""+site+"\""+",\""+email+"\"\n")
cnt += 1
fot.close()
fp.close()
|
RenaudBCEREMA/gpn-gitlearn | exercice/script.py |
class Complex():
def __init__(self, Re, Im) :
self.Re = Re
self.Im = Im
def add(self, A) :
pass
def sub(self,B) :
pass
def norm(self) :
pass
def arg(self):
pass
def __str__(self) -> str:
return f""
if __name__ == '__main__':
A = Complex(11,55)
B = Complex(71,5)
A.add(B)
A.sub(B)
print(A.norm())
A.
|
observerss/python.ts | plugins/Dummy/__init__.py | <gh_stars>10-100
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class Dummy:
def dummy(self):
return "dummy"
|
observerss/python.ts | plugins/MultiProcessor.py | <gh_stars>10-100
#!/usr/bin/env
# -*- coding: utf-8 -*-
import multiprocessing as mp
import random
import time
from concurrent.futures import ProcessPoolExecutor
if mp.get_start_method() == "fork":
methods = mp.get_all_start_methods()
mp.set_start_method("forkserver" if "forkserver" in methods else "spawn", force=True)
executor = ProcessPoolExecutor(2)
def run():
wait = random.random() / 10000
time.sleep(wait)
return wait
def run_in_executor():
res = executor.submit(run)
return res.result()
|
observerss/python.ts | plugins/Excel/__init__.py | import openpyxl
class Excel:
def __init__(self):
self.workbook = None
def open_workbook(self, name):
self.workbook = openpyxl.open(name)
def read_cell(self, row: int, col: int):
return self.workbook.active._cells[(row + 1, col + 1)].value
def write_cell(self, row: int, col: int, value: object):
self.workbook.active._cells[(row + 1, col + 1)].value = value
|
observerss/python.ts | plugins/Threader.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Threader: 用于测试多线程的模块"""
import random
import threading
import time
from concurrent.futures import ThreadPoolExecutor
executor = ThreadPoolExecutor(10)
lock = threading.Lock()
counter = 0
def incr(value: int):
global counter
# sleep 0 - 0.0001 second
time.sleep(random.random() / 10000)
with lock:
counter += value
def incr_in_thread(value: int):
threading.Thread(target=incr, args=(value,)).start()
def incr_in_executor(value: int):
executor.submit(incr, value)
|
vathanahim/API_Clean_Up | test.py | import numpy as np
import streamlit as st
import pandas as pd
import requests
import json
from pandas.io.json import json_normalize
import matplotlib.pyplot as plt
import plotly.express as px
from streamlit.file_util import streamlit_read
url = "https://api.yelp.com/v3/businesses/search"
api_key = '<KEY>'
def get_data(input1, input2):
headers = {'Authorization': 'Bearer %s' % api_key}
params = {'term': str(input1), 'location': str(input2)}
resp = requests.get(url, headers=headers, params=params, verify=False)
return json.loads(resp.text)
def main():
with st.form(key = "searchform"):
nav1, nav2, nav3 = st.beta_columns([3,2,1])
with nav1:
search_term = st.text_input("Search Input")
with nav2:
search_term2 = st.text_input("Search Input2")
with nav3:
st.text("Search")
search = st.form_submit_button(label="Search")
st.success("Searching for {} in {}".format(search_term, search_term2))
#Results
if search:
result = get_data(search_term, search_term2)
df = json_normalize(result["businesses"], sep="_", record_path = "categories", meta=["name", 'alias',
"rating", ["coordinates", "latitude"],
["coordinates", 'longitude']],
meta_prefix="biz_")
st.write(df)
df['biz_rating'] = df['biz_rating'].astype(float)
fig = px.bar(df, x='alias', y='biz_rating')
st.plotly_chart(fig)
if __name__ == "__main__":
main() |
Alberto-X3/Alberto-X3 | Utils.py | <reponame>Alberto-X3/Alberto-X3
import discord
from typing import *
from NewClass import AttrDict
from json import load
from traceback import format_exc
from io import BytesIO
'''
You can see all the events in the following URL:
https://discordpy.readthedocs.io/en/latest/api.html#event-reference
'''
EVENT = AttrDict({
"on_connect": 0, # argument(s) --> client: discord.Client, _event: int
"on_shard_connect": 1, # argument(s) --> client: discord.Client, _event: int, shard_id: int
"on_disconnect": 2, # argument(s) --> client: discord.Client, _event: int
"on_shard_disconnect": 3, # argument(s) --> client: discord.Client, _event: int, shard_id: int
"on_ready": 4, # argument(s) --> client: discord.Client, _event: int
"on_shard_ready": 5, # argument(s) --> client: discord.Client, _event: int, shard_id: int
"on_resumed": 6, # argument(s) --> client: discord.Client, _event: int
"on_shard_resumed": 7, # argument(s) --> client: discord.Client, _event: int, shard_id: int
"on_error": 8, # argument(s) --> client: discord.Client, _event: int, event: str, *args, **kwargs
"on_socket_raw_receive": 9, # argument(s) --> client: discord.Client, _event: int, message: Union[bytes, str]
"on_socket_raw_send": 11, # argument(s) --> client: discord.Client, _event: int, payload: Union[bytes, str]
"on_typing": 12, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.Messageable, user: Union[discord.User, discord.Member], when: datetime.datetime
"on_message": 13, # argument(s) --> client: discord.Client, _event: int, message: discord.Message
"on_message_delete": 14, # argument(s) --> client: discord.Client, _event: int, message: discord.Message
"on_bulk_message_delete": 15, # argument(s) --> client: discord.Client, _event: int, messages: List[discord.Message]
"on_raw_message_delete": 16, # argument(s) --> client: discord.Client, _event: int, payload: discord.RawMessageDeleteEvent
"on_message_edit": 17, # argument(s) --> client: discord.Client, _event: int, before: discord.Message, after: discord.Message
"on_raw_message_edit": 18, # argument(s) --> client: discord.Client, _event: int, payload: discord.RawMessageUpdateEvent
"on_reaction_add": 19, # argument(s) --> client: discord.Client, _event: int, reaction: discord.Reaction, user: Union[discord.Member, discord.User]
"on_raw_reaction_add": 20, # argument(s) --> client: discord.Client, _event: int, payload: discord.RawReactionActionEvent
"on_reaction_remove": 21, # argument(s) --> client: discord.Client, _event: int, reaction: discord.Reaction, user: Union[discord.Member, discord.User]
"on_raw_reaction_remove": 22, # argument(s) --> client: discord.Client, _event: int, payload: discord.RawReactionActionEvent
"on_reaction_clear": 23, # argument(s) --> client: discord.Client, _event: int, message: discord.Message, reactions: List[discord.Reaction]
"on_raw_reaction_clear": 24, # argument(s) --> client: discord.Client, _event: int, payload: discord.RawReactionClearEvent
"on_reaction_clear_emoji": 25, # argument(s) --> client: discord.Client, _event: int, reaction: discord.Reaction
"on_raw_reaction_clear_emoji": 26, # argument(s) --> client: discord.Client, _event: int, payload: discord.RawReactionClearEmojiEvent
"on_private_channel_delete": 27, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.PrivateChannel
"on_private_channel_create": 28, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.PrivateChannel
"on_private_channel_update": 29, # argument(s) --> client: discord.Client, _event: int, before: discord.GroupChannel, after: discord.GroupChannel
"on_private_channel_pins_update": 30, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.PrivateChannel, last_pin: Optional[datetime.datetime]
"on_guild_channel_delete": 31, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.GuildChannel
"on_guild_channel_create": 32, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.GuildChannel
"on_guild_channel_update": 33, # argument(s) --> client: discord.Client, _event: int, before: discord.GroupChannel, after: discord.GroupChannel
"on_guild_channel_pins_update": 34, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.PrivateChannel, last_pin: Optional[datetime.datetime]
"on_guild_integrations_update": 35, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild
"on_webhooks_update": 36, # argument(s) --> client: discord.Client, _event: int, channel: discord.abc.GuildChannel
"on_member_join": 37, # argument(s) --> client: discord.Client, _event: int, member: discord.Member
"on_member_remove": 38, # argument(s) --> client: discord.Client, _event: int, member: discord.Member
"on_member_update": 39, # argument(s) --> client: discord.Client, _event: int, before: discord.Member, after: discord.Member
"on_user_update": 40, # argument(s) --> client: discord.Client, _event: int, before: discord.User, after: discord.User
"on_guild_join": 41, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild
"on_guild_remove": 42, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild
"on_guild_update": 43, # argument(s) --> client: discord.Client, _event: int, before: discord.Guild, after: discord.Guild
"on_guild_role_create": 44, # argument(s) --> client: discord.Client, _event: int, role: discord.Role
"on_guild_role_delete": 45, # argument(s) --> client: discord.Client, _event: int, role: discord.Role
"on_guild_role_update": 46, # argument(s) --> client: discord.Client, _event: int, before: discord.Role, after: discord.Role
"on_guild_emojis_update": 47, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild, before: Sequence[discord.Emoji], after: Sequence[discord.Emoji]
"on_guild_available": 48, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild
"on_guild_unavailable": 49, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild
"on_voice_state_update": 50, # argument(s) --> client: discord.Client, _event: int, member: discord.Member, before: discord.VoiceState, after: discord.VoiceState
"on_member_ban": 51, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild, user: Union[discord.User, discord.Member]
"on_member_unban": 52, # argument(s) --> client: discord.Client, _event: int, guild: discord.Guild, user: discord.User
"on_invite_create": 53, # argument(s) --> client: discord.Client, _event: int, invite: discord.Invite
"on_invite_delete": 54, # argument(s) --> client: discord.Client, _event: int, invite: discord.Invite
"on_group_join": 55, # argument(s) --> client: discord.Client, _event: int, channel: discord.GroupChannel, user: discord.User
"on_group_remove": 56, # argument(s) --> client: discord.Client, _event: int, channel: discord.GroupChannel, user: discord.User
})
DATA = AttrDict(load(open("Configs.json")))
Prefix = DATA.CONSTANTS.Prefix
class Help(object):
direct_help_default = "*Please contact the developer to add a help for this!*"
def __init__(self, _help: Optional[str] = None, direct_help: Union[str, bool] = False,
vanish: bool = False, order_1793: bool = False, order_2004: bool = False):
"""
_help: :class:`str`
is the printed value of the help
direct_help: :class:Union[`str`,`bool`]
is the printed value of the help when this function is called, when it is a `str`,
if it's `True` it is the `direct_help_default`, otherwise it's `_help`
vanish: :class:`bool`
makes it invisible
order_1793: :class:`bool`
activates it every time when a message without the prefix was send
order_2004: :class:`bool`
activates it every time when a message was send
"""
self.help = _help
if isinstance(direct_help, str):
self.direct_help = direct_help
elif direct_help:
self.direct_help = self.direct_help_default
else:
self.direct_help = self.help
self.vanish = vanish
self.order_1793 = order_1793
self.order_2004 = order_2004
if self.order_1793 is True and self.order_2004 is True:
self.order_1793 = False
def __str__(self):
return self.help if self.supports() else "There is no help set!"
def supports(self):
return False if self.help is None else True
class Logger:
from datetime import datetime
def __init__(self, channel: discord.TextChannel) -> None:
self.channel = channel
self.messages = AttrDict({
"join": "*{}* joined this server! ID: {}; account creation: {}, {} days ago",
"left": "*{}* left this server! ID: {}; account creation: {}, {} days ago; server joined: {}, {} days ago",
"rules": "*{}* accepted the rules",
"kick": "*{}* kicked *{}* with reason *{}*",
"ban": "*{}* banned *{}* with reason *{}*",
"unban": "*{}* unbanned *{}* with reason *{}*",
"softban": "*{}* softbanned *{}* with reason *{}*",
"delete": "*{}* deleted *{}* messages in *{}*",
"not implemented": "{}; {}; {}; {}; {}"
})
async def join(self,
member: discord.Member
) -> None:
await self.channel.send(
self.messages.join.format(member.mention, member.id, member.created_at, (self.datetime.now() - member.created_at).days)
)
async def left(self,
member: discord.Member
) -> None:
await self.channel.send(
self.messages.left.format(member.mention, member.id, member.created_at, (self.datetime.now() - member.created_at).days, member.joined_at, (self.datetime.now() - member.joined_at).days)
)
async def rules(self,
user: discord.User
) -> None:
await self.channel.send(
self.messages.rules.format(user.mention)
)
async def kick(self,
user: discord.User,
target: discord.User,
reason: str
) -> None:
await self.channel.send(
self.messages.kick.format(user.mention, target.mention, reason)
)
async def ban(self,
user: discord.User,
target: discord.User,
reason: str
) -> None:
await self.channel.send(
self.messages.ban.format(user.mention, target.mention, reason)
)
async def unban(self,
user: discord.User,
target: discord.User,
reason: str
) -> None:
await self.channel.send(
self.messages.unban.format(user.mention, target.mention, reason)
)
async def softban(self,
user: discord.User,
target: discord.User,
reason: str
) -> None:
await self.channel.send(
self.messages.softban.format(user.mention, target.mention, reason)
)
async def delete(self,
user: discord.User,
count: int,
channel: discord.TextChannel
) -> None:
await self.channel.send(
self.messages.delete.format(user.mention, count, channel.mention)
)
async def not_implemented(self,
content: str = None,
user: discord.User = None,
target: discord.User = None,
comments: str = None,
channel: discord.TextChannel = None
) -> None:
await self.channel.send(
self.messages.not_implemented.format(f"{content=}", f"{user=}", f"{target=}", f"{comments=}", f"{channel=}")
)
def perms(_id: str) -> AttrDict:
from json import load, dump
PERMS = AttrDict(load(open("perms.json")))
try:
user_perms = PERMS[_id]
except KeyError:
user_perms = PERMS.default
PERMS[_id] = user_perms
dump(PERMS, open("perms.json", "w"), indent=2)
return user_perms
async def send_exception(client: discord.Client, exception: Exception, source_name: str, mention_role: Optional[int] = 820974562770550816, pin: bool = True, timestamp: bool = True):
"""
sends the exception into a channel if `DATA.debug` ist disabled
"""
if not DATA.debug:
super_log: discord.TextChannel = client.get_channel(DATA.IDs.Channels.Super_Log)
file = discord.File(BytesIO(format_exc().encode()), "exception.log")
embed: discord.Embed = discord.Embed(title=source_name,
description=f"{exception.__class__.__name__}: {exception.__str__()}\n",
color=discord.Color.magenta())
message: discord.Message = await super_log.send(embed=embed, file=file)
if timestamp:
from discord.utils import snowflake_time
embed.add_field(name="datetime.datetime",
value=snowflake_time(message.id).__str__())
await message.edit(embed=embed)
if pin:
await message.pin()
if mention_role:
await super_log.send(f"<@&{mention_role}>")
else:
raise exception
|
Alberto-X3/Alberto-X3 | Modules/nicks.py | <reponame>Alberto-X3/Alberto-X3
from discord import Client, Member
from Utils import Help, EVENT, send_exception
HELP = Help(vanish=True, order_1793=True)
EVENTS = [EVENT.on_member_update, EVENT.on_ready]
sep = " | "
DEV = "Dev"
MOD = "Mod"
SUP = "Sup"
VIP = "VIP"
SLY = "Sly"
ALB = "Aty"
nicks = {
832244630582067270: DEV, # @Developer
733966411663278141: MOD, # @Moderaty
828370010866843668: SUP, # @Supporty
861648252445392896: SLY, # @News Sammly
674637758869930007: VIP, # @VIP
831629269414182943: ALB, # @Level 30
831628612171071498: ALB, # @Level 25
831628364803997757: ALB, # @Level 20
831628201406627871: ALB, # @Level 15
831628459222237227: ALB, # @Level 10
831915216475914273: ALB, # @Level 5
707911694277673081: ALB, # @50er
638630227966296074: ALB, # @Albertany
}
async def __main__(client: Client, _event: int,
before: Member = None, after: Member = None):
try:
if _event == EVENT.on_ready:
for member in client.get_guild(632526390113337346).members:
await repair(member)
if _event == EVENT.on_member_update:
await repair(after)
except Exception as e:
await send_exception(client=client, exception=e, source_name=__name__)
async def repair(member: Member) -> None:
if member.top_role.id in nicks:
prefix = nicks[member.top_role.id]
if member.nick is None or not member.nick.startswith(prefix+sep):
nick = prefix+sep+member.display_name
if len(nick) > 32:
nick = nick[:32]
await member.edit(nick=nick)
|
Alberto-X3/Alberto-X3 | Modules/ban.py | import discord
import Utils
HELP = Utils.Help("bans a user by iD", f"_{Utils.Prefix}ban iD (reason)_\nrequires Admin.ban")
EVENTS = [Utils.EVENT.on_message]
ALIASES = []
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
user_perms = Utils.perms(str(message.author.id))
if user_perms.Admin.ban:
if len(message.content.split()) >= 2:
if message.content.split()[1].isnumeric():
if len(message.content.split()) > 2:
reason = " ".join(message.content.split()[2:])
else:
reason = "No reason..."
logger = Utils.Logger(channel=await client.fetch_channel(Utils.DATA.IDs.Channels.Logs))
channel: discord.TextChannel = message.channel
guild: discord.Guild = message.guild # type: ignore
user: discord.User = await client.fetch_user(int(message.content.split()[1]))
handler: discord.User = message.author
try:
await user.send(f"You where banned from the {guild} Server.\n_{reason}_")
except discord.Forbidden:
pass
try:
await guild.ban(user=user, reason=reason)
await logger.ban(user=handler, target=user, reason=reason)
await message.delete()
except ValueError:
await channel.send(":x: ERROR :x:")
except discord.Forbidden:
pass
else:
await message.channel.send(":x: Please enter the User-iD")
else:
await message.channel.send(":x: Please enter the User-iD")
else:
await message.channel.send(":x: requires Admin.ban")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | NewClass/__init__.py | <reponame>Alberto-X3/Alberto-X3<filename>NewClass/__init__.py
__author__ = "<NAME>"
from .AttrDict import AttrDict
|
Alberto-X3/Alberto-X3 | Modules/ping.py | <gh_stars>1-10
import discord
import Utils
import datetime
HELP = Utils.Help("shows the ping")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["🏓"]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
api: float = round(client.latency, 2)
msg: float = round((datetime.datetime.utcnow() - message.created_at).total_seconds(), 2)
await message.channel.send(f"Pong 🏓\n> API latency: {api} seconds\n> Message latency: {msg} seconds")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/credits.py | import discord
import Utils
HELP = Utils.Help("displays the credits", "here are the credits with the code...")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["c", "code"]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
await message.channel.send(embed=discord.Embed(description=f"""
~~Modular Code: [here](https://github.com/Alberto-X3/ModularDiscordPyBot)~~
My Code: [here](https://github.com/Alberto-X3/Alberto-X3)
source code by <@546320163276849162>
profile pictures by <@665288034274639873>
"""))
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/restart.py | import discord
import Utils
HELP = Utils.Help("restarts the Bot", "requires Admin.Bot.restart")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["re"]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
user_perms = Utils.perms(str(message.author.id))
if user_perms.Admin.Bot.restart:
coder = await client.fetch_user(Utils.DATA.Author_id)
await coder.send(f"**__starting restart by {message.author}__**")
await client.change_presence(status=discord.Status.offline)
await client.close()
else:
await message.channel.send(":x: requires Admin.Bot.restart")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/antispam.py | import discord
import Utils
from datetime import timedelta
HELP = Utils.Help(vanish=True, order_1793=True)
EVENTS = [Utils.EVENT.on_message]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
if message.author.bot or message.author.id in Utils.DATA.IDs.Admins or message.channel.id == 757587246441562153:
return
async for message_ in message.channel.history(limit=20, before=message.created_at):
message_: discord.Message
if message_.author == message.author and not message_.author == client.user:
if message_.content == message.content and message_.attachments == message.attachments and message_.embeds == message.embeds:
if message.created_at - message_.created_at > timedelta(minutes=30):
break
await message.delete()
await message.channel.send(
f"**__Anti-spam__**\n"
f":x: Please don't spam {message.author.mention}!",
delete_after=5)
break
except discord.NotFound:
pass
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/blacklisted_chars.py | <reponame>Alberto-X3/Alberto-X3<filename>Modules/blacklisted_chars.py
"""
deletes all messages with blacklisted content only
"""
from discord import Message, Client
from Utils import Help, EVENT, send_exception
HELP = Help(vanish=True, order_1793=True)
EVENTS = [EVENT.on_message]
BLACKLISTED = (
"\u200b",
"\u200c",
"\u200d",
"\n",
" "
)
async def __main__(client: Client, _event: int, message: Message):
try:
if message.author.id == client.user.id or not message.content:
return
for c in message.content or "":
if c not in BLACKLISTED:
return
await message.delete()
await message.channel.send(
f"**__Anti-spam__**\n"
f":x: Don't send empty messages {message.author.mention}!",
delete_after=5)
except Exception as e:
await send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | NewClass/AttrDict.py | from typing import *
_KT = TypeVar('KT') # Key type.
_VT = TypeVar('VT') # Value type.
_VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
class AttrDict(dict):
def __init__(self, data: dict) -> None:
for key in list(data.keys()):
if type(data[key]) == dict:
self.__dict__[key.replace(" ", "_")] = self.__class__(data[key])
else:
self.__dict__[key.replace(" ", "_")] = data[key]
def __repr__(self) -> str:
def recursive(data) -> str:
try:
__ = ""
for key in list(data.__dict__.keys()):
if isinstance(data[key], self.__class__):
__ += f"{key!r}: " "{" + recursive(data[key]) + "}, "
else:
__ += f"{key!r}: {data[key]!r}, "
return __[:-2]
except RecursionError:
return "The Data in this class might be to big and because of that a RecursionError occurred"
return "{" + recursive(self) + "}"
__str__ = __repr__
def __getitem__(self, item): return self.__dict__.__getitem__(item.replace(" ", "_"))
def __getattr__(self, item): return self.__getitem__(item)
def __setitem__(self, key, value): self.__dict__[key.replace(" ", "_")] = value
def __setattr__(self, key, value): return self.__setitem__(key, value)
def __contains__(self, item): return self.__dict__.__contains__(item)
def __delitem__(self, key): return self.__dict__.__delitem__(key)
def __eq__(self, other): return self.__dict__.__eq__(other)
def __ge__(self, other): return self.__dict__.__ge__(other)
def __gt__(self, other): return self.__dict__.__gt__(other)
def __iter__(self): return self.__dict__.__iter__()
def __len__(self): return self.__dict__.__len__()
def __le__(self, other): return self.__dict__.__le__(other)
def __lt__(self, other): return self.__dict__.__lt__(other)
def __ne__(self, other): return self.__dict__.__ne__(other)
def __reversed__(self): return self.__dict__.__reversed__()
def __sizeof__(self): return self.__dict__.__sizeof__()
def clear(self) -> None: return self.__dict__.clear()
def copy(self) -> Dict[_KT, _VT]: return self.__dict__.copy()
def get(self, k: _KT) -> Optional[_VT_co]: return self.__dict__.get(k)
def items(self) -> ItemsView[_KT, _VT]: return self.__dict__.items()
def keys(self) -> KeysView[_KT]: return self.__dict__.keys()
def pop(self, k: _KT) -> _VT: return self.__dict__.pop(k)
def popitem(self) -> Tuple[_KT, _VT]: return self.__dict__.popitem()
def setdefault(self, k: _KT, default: _VT = ...) -> _VT: return self.__dict__.setdefault(k, default)
def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: return self.__dict__.update(__m, **kwargs)
def values(self) -> ValuesView[_VT]: return self.__dict__.values()
|
Alberto-X3/Alberto-X3 | Modules/new_talk.py | import discord
import Utils
from random import randrange
EVENTS = [Utils.EVENT.on_voice_state_update]
name_range = [1, 1793]
async def __main__(client: discord.Client, _event: int, member: discord.Member,
before: discord.VoiceState, after: discord.VoiceState):
try:
if before.channel is not None:
if before.channel.name.startswith("Talk "):
for name in range(*name_range):
if before.channel.name == f"Talk {name}":
if len(before.channel.members) == 0:
await before.channel.delete()
break
if after.channel is not None:
if after.channel.id == 829778748026257458:
name = randrange(*name_range)
new_talk: discord.VoiceChannel = await after.channel.clone(
name=f"Talk {name}")
await member.move_to(new_talk)
except discord.NotFound:
pass
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/super_logger.py | from discord import Client, Message, Member, User, VoiceState, Role, TextChannel, Embed, Color, File
from Utils import Help, Union, EVENT, DATA, send_exception
HELP = Help(vanish=True, order_2004=True)
EVENTS = [
EVENT.on_message,
EVENT.on_message_delete,
EVENT.on_message_edit,
EVENT.on_ready,
EVENT.on_voice_state_update,
EVENT.on_user_update,
EVENT.on_member_update
]
async def __main__(client: Client, _event: int, *args: Union[Message, Member, VoiceState, User, Member, Role]):
try:
super_log: TextChannel = client.get_channel(DATA.IDs.Channels.Super_Log)
attachments = None
if _event == EVENT.on_message:
datetime_edit = False
if args[0].channel.id != super_log.id:
embed: Embed = Embed(title=f"on_message | "
f"<{args[0].jump_url}> |" +
(f" {args[0].channel.category} |"
f" {args[0].channel.mention}" if hasattr(args[0].channel, "category") else " DM"),
description=args[0].content+(" | EMBED" if args[0].embeds else "")+(" | ATTACHMENT" if args[0].attachments else ""),
color=Color.gold())
embed.set_author(name=args[0].author, url=args[0].author.avatar_url)
embed.add_field(name="datetime.datetime",
value=args[0].created_at)
from io import BytesIO
if args[0].attachments:
attachments = []
for attachment in args[0].attachments:
fp = BytesIO()
await attachment.save(fp)
attachments.append(File(fp, attachment.filename))
from json import dump
if args[0].embeds:
attachments = []
for i, attachment in enumerate(args[0].embeds):
with open("_.log", "w") as fp:
dump(attachment.to_dict(), fp, indent=2)
with open("_.log", "rb") as fp:
attachments.append(File(fp, f"EMBED-{i}.json"))
else:
return
elif _event == EVENT.on_message_delete:
datetime_edit = False
embed: Embed = Embed(title=f"on_message_delete | "
f"<{args[0].jump_url}> |" +
(f" {args[0].channel.category} |"
f" {args[0].channel.mention}" if hasattr(args[0].channel, "category") else " DM"),
description=args[0].content+(" | EMBED" if args[0].embeds else "")+(" | ATTACHMENT" if args[0].attachments else ""),
color=Color.gold())
embed.set_author(name=args[0].author, url=args[0].author.avatar_url)
embed.add_field(name="datetime.datetime",
value=args[0].created_at)
from io import BytesIO
if args[0].attachments:
attachments = []
for attachment in args[0].attachments:
fp = BytesIO()
await attachment.save(fp)
attachments.append(File(fp, attachment.filename))
from json import dump
if args[0].embeds:
attachments = []
for i, attachment in enumerate(args[0].embeds):
with open("_.log", "w") as fp:
dump(attachment.to_dict(), fp, indent=2)
with open("_.log", "rb") as fp:
attachments.append(File(fp, f"EMBED-{i}.json"))
elif _event == EVENT.on_message_edit:
datetime_edit = False
if args[0].author.id != client.user.id:
embed: Embed = Embed(title=f"on_message_edit | "
f"<{args[0].jump_url}> |" +
(f" {args[0].channel.category} |"
f" {args[0].channel.mention}" if hasattr(args[0].channel, "category") else " DM"),
color=Color.gold())
embed.set_author(name=args[0].author, url=args[0].author.avatar_url)
embed.add_field(name=f"before ({args[0].created_at})",
value=args[0].content+(" | EMBED" if args[0].embeds else "")+(" | ATTACHMENT" if args[0].attachments else ""))
embed.add_field(name=f"after ({args[0].edited_at})",
value=args[1].content+(" | EMBED" if args[0].embeds else "")+(" | ATTACHMENT" if args[0].attachments else ""))
from io import BytesIO
if args[0].attachments:
attachments = []
for attachment in args[0].attachments:
fp = BytesIO()
await attachment.save(fp)
attachments.append(File(fp, attachment.filename))
from json import dump
if args[0].embeds:
attachments = []
for i, attachment in enumerate(args[0].embeds):
with open("_.log", "w") as fp:
dump(attachment.to_dict(), fp, indent=2)
with open("_.log", "rb") as fp:
attachments.append(File(fp, f"EMBED-OLD-{i}.json"))
if args[1].attachments:
attachments = []
for attachment in args[1].attachments:
fp = BytesIO()
await attachment.save(fp)
attachments.append(File(fp, attachment.filename))
if args[1].embeds:
attachments = []
for i, attachment in enumerate(args[1].embeds):
with open("_.log", "w") as fp:
dump(attachment.to_dict(), fp, indent=2)
with open("_.log", "rb") as fp:
attachments.append(File(fp, f"EMBED-NEW-{i}.json"))
else:
return
elif _event == EVENT.on_ready:
datetime_edit = True
embed: Embed = Embed(title=f"on_ready",
color=Color.green())
elif _event == EVENT.on_voice_state_update:
datetime_edit = True
embed: Embed = Embed(title=f"on_voice_state_update",
color=Color.greyple())
embed.set_author(name=args[0].__str__(), url=args[0].avatar_url)
if args[1].channel is None:
embed.description = f"joined {args[2].channel}"
if args[2].channel is None:
embed.description = f"leaved {args[1].channel}"
if args[1].channel is not None and args[2].channel is not None and args[1].channel != args[2].channel:
embed.add_field(name="moved", value=f"{args[1].channel} -> {args[2].channel}")
if args[1].deaf != args[2].deaf:
embed.add_field(name="server deafen", value=f"{args[1].deaf} -> {args[2].deaf}")
if args[1].mute != args[2].mute:
embed.add_field(name="server mute", value=f"{args[1].mute} -> {args[2].mute}")
if args[1].self_deaf != args[2].self_deaf:
embed.add_field(name="self self_deafen", value=f"{args[1].self_deaf} -> {args[2].self_deaf}")
if args[1].self_mute != args[2].self_mute:
embed.add_field(name="self mute", value=f"{args[1].self_mute} -> {args[2].self_mute}")
if args[1].self_stream != args[2].self_stream:
embed.add_field(name="self stream", value=f"{args[1].self_stream} -> {args[2].self_stream}")
if args[1].self_video != args[2].self_video:
embed.add_field(name="self video", value=f"{args[1].self_video} -> {args[2].self_video}")
if args[1].afk != args[2].afk:
embed.add_field(name="afk", value=f"{args[1].afk} -> {args[2].afk}")
elif _event == EVENT.on_user_update:
datetime_edit = True
embed: Embed = Embed(title=f"on_user_update",
color=Color.blurple())
embed.set_author(name=args[0].__str__(), url=args[0].avatar_url)
if args[0].avatar != args[1].avatar:
embed.add_field(name="avatar", value=f"[IMG]({args[0].avatar_url}) -> [IMG]({args[1].avatar_url})")
if args[0].name != args[1].name:
embed.add_field(name="name", value=f"{args[0].name} -> {args[1].name}")
if args[0].discriminator != args[1].discriminator:
embed.add_field(name="discriminator", value=f"{args[0].discriminator} -> {args[1].discriminator}")
elif _event == EVENT.on_member_update:
datetime_edit = True
embed: Embed = Embed(title=f"on_member_update",
color=Color.blue())
embed.set_author(name=args[0].__str__(), url=args[0].avatar_url)
if args[0].nick != args[1].nick:
embed.add_field(name="nickname", value=f"{args[0].nick} -> {args[1].nick}")
if args[0].roles != args[1].roles:
if len(args[0].roles) < len(args[1].roles):
meta = list()
for role in args[1].roles:
if role not in args[0].roles:
meta.append(f"**+** `{role.name}` `{role.id}`")
embed.add_field(name="role", value="\n".join(meta))
if len(args[0].roles) > len(args[1].roles):
meta = list()
for role in args[0].roles:
if role not in args[1].roles:
meta.append(f"**-** `{role.name}` `{role.id}`")
embed.add_field(name="role", value="\n".join(meta))
if len(args[0].roles) == len(args[1].roles):
embed.add_field(name="role", value="***__PLEASE CONTACT A <@820974562770550816>!!!__***")
if len(embed.fields) == 0:
return
else:
datetime_edit = True
embed = Embed()
message: Message = await super_log.send(embed=embed, files=attachments)
if datetime_edit:
from discord.utils import snowflake_time
embed.add_field(name="datetime.datetime",
value=snowflake_time(message.id).__str__(),
inline=False)
await message.edit(embed=embed)
except Exception as e:
await send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/no_dead_chat.py | <reponame>Alberto-X3/Alberto-X3<gh_stars>1-10
"""
deletes message with "https://tenor.com/view/pacman-gif-21447981" only
content and sends "https://tenor.com/view/server-chat-is-dead-stayin-alive-ive-been-kick-aroud-active-members-gif-17860742"
to the chat
"""
from discord import Message, Client
from Utils import Help, EVENT, send_exception
HELP = Help(vanish=True, order_1793=True)
EVENTS = [EVENT.on_message]
BAD = "https://tenor.com/view/pacman-gif-21447981"
GOOD = "https://tenor.com/view/server-chat-is-dead-stayin-alive-ive-been-kick-aroud-active-members-gif-17860742"
async def __main__(client: Client, _event: int, message: Message):
try:
if message.author.id == client.user.id:
return
if message.content == BAD:
await message.reply(GOOD)
await message.delete()
except Exception as e:
await send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | levels_config_sqlite.py | <filename>levels_config_sqlite.py
"""
DB-Structure:
========================
lvl:
------------------------
:user:<int>
ID from the user
:level:<int>
LVL from the user
:xp:<int>
XP from the user
"""
from sqlite3 import connect
create = "CREATE TABLE IF NOT EXISTS lvl\n" \
"(user integer, level integer, xp integer)"
con = connect("levels.sqlite")
cur = con.cursor()
cur.execute(create)
con.commit()
con.close()
|
Alberto-X3/Alberto-X3 | Modules/ping_me_for_help.py | import discord
import Utils
HELP = Utils.Help(vanish=True, order_1793=True)
EVENTS = [Utils.EVENT.on_message]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
if message.content == client.user.mention:
await message.channel.send(f"My Prefix is `{Utils.Prefix}`")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/debugger.py | import discord
import Utils
HELP = Utils.Help(_help="Is a module for debugging the bot", vanish=True)
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["db", "debug", "bug", "bugs", "exception", "exceptions", "error", "errors"]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
if hasattr(message.author, "roles"):
if any(role.id == 820974562770550816 for role in message.author.roles):
Utils.DATA.debug = not Utils.DATA.debug
await message.channel.send(f"Toggled `debug` to `{Utils.DATA.debug}`")
else:
await message.channel.send("Please use me on a server!")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/delete.py | import discord
import Utils
HELP = Utils.Help("deletes the amount of messages (max 99)", f"_{Utils.Prefix}delete iD (reason)_")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["del"]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
user_perms = Utils.perms(str(message.author.id))
if user_perms.Admin.delete:
if len(message.content.split(" ")) == 2:
if message.content.split(" ")[1].isnumeric():
await message.delete()
limit = 99
if 0 < int(message.content.split()[1]) < limit:
limit = int(message.content.split()[1])
log = await message.channel.purge(limit=limit, check=lambda msg_: not msg_.pinned)
channel_for_logging = await client.fetch_channel(Utils.DATA.IDs.Channels.Logs)
await Utils.Logger(channel_for_logging).delete(user=message.author, count=len(log), channel=message.channel)
super_log: discord.TextChannel = client.get_channel(Utils.DATA.IDs.Channels.Super_Log)
embed = discord.Embed(title=f"#{message.channel}", color=discord.Color(Utils.DATA.colors.red))
embed.set_author(name=f"{message.author}", icon_url=message.author.avatar_url)
for msg in log[::-1]:
msg: discord.Message
await super_log.trigger_typing()
content = f"{msg.content}"
embed.add_field(name=f"{msg.author} | {msg.author.id}, {msg.created_at}", value=content if content else f"***__EMBED__***", inline=False)
await super_log.send(embed=embed)
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/archived_modules/admin.py | <gh_stars>1-10
import discord
import Utils
from os import listdir
from json import load
DATA = Utils.AttrDict(load(open("Configs.json")))
Prefix = DATA.CONSTANTS.Prefix
author = DATA.Author
author_id = DATA.Author_id
HELP = Utils.Help(vanish=True)
EVENTS = [Utils.EVENT.on_message]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
if message.author.id == author_id:
if len(message.content.split()) == 1:
await message.author.send("""
Possible:
> _update_ <_-root_> **+** _File.py_
> _get_ _filename.py_ <_-root_>
> _system_
""")
elif message.content.split()[1] == "update":
await update(message)
elif message.content.split()[1] == "get":
await get(message)
elif message.content.split()[1] == "system":
await system(message)
else:
...
else:
await message.channel.send(f":x: **YOU ARE NOT {author}!!!**")
async def update(message: discord.Message):
if message.attachments:
if message.content.split()[-1] == "-root":
path = ""
else:
path = "Modules/"
await message.attachments[0].save(f"./{path}{message.attachments[0].filename}")
await message.author.send(f"__*{message.attachments[0].filename}*__ successfully added {'' if path else 'to root'}")
else:
await message.author.send("Please add a file...")
async def get(message: discord.Message):
if len(message.content.split()) == 3 or len(message.content.split()) == 4:
if message.content.split()[-1] == "-root":
path = ""
else:
path = "Modules/"
if message.content.split(" ")[2] in listdir(f"./{path}"):
await message.author.send(f"Here is your file {'' if path else 'from root'}...", file=discord.File(open(f"./{path}{message.content.split(' ')[2]}", "rb"), filename=message.content.split(' ')[2]))
else:
await message.author.send("Please name a valid filename...")
else:
await message.author.send("Please name a filename...")
async def system(message: discord.Message):
log = {}
from datetime import datetime
import platform
try: import psutil
except ImportError:
await message.author.send("`psutil` not found...")
else:
await message.author.trigger_typing()
uname = platform.uname()
boot_time_timestamp = psutil.boot_time()
bt = datetime.fromtimestamp(boot_time_timestamp)
_1 = []
for i, perc in enumerate(psutil.cpu_percent(percpu=True, interval=1)):
_1 += [f"Core {i}: {perc}%"]
virtual_mem = psutil.virtual_memory()
def adjust_size(size):
factor = 1024
for short in ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB", "BB"]:
if size > factor:
size = size / factor
else:
return f"{size:.3f}{short}"
swap = psutil.swap_memory()
...
log["Sys Info"] = ""
log["system"] = uname.system
log["node name"] = uname.node
log["release"] = uname.release
log["version"] = uname.version
log["machine"] = uname.machine
log["processor"] = uname.processor
log["1"] = "---"
log["boot time"] = f"{bt.day}.{bt.month}.{bt.year} {bt.hour}:{bt.minute}:{bt.second}"
log["2"] = "---"
log["CPU Info"] = ""
log["actual cores"] = psutil.cpu_count(logical=False)
log["logical cores"] = psutil.cpu_count(logical=True)
log["max frequency"] = f"{psutil.cpu_freq().max:.1f}Mhz"
log["current frequency"] = f"{psutil.cpu_freq().current:.1f}Mhz"
log["CPU Usage"] = f"{psutil.cpu_percent()}%"
log["CPU Usage/Core"] = "\n".join(_1)
log["3"] = "---"
log["RAM Info"] = ""
log["total"] = f"{adjust_size(virtual_mem.total)}"
log["available"] = f"{adjust_size(virtual_mem.available)}"
log["used"] = f"{adjust_size(virtual_mem.used)}"
log["percentage"] = f"{virtual_mem.percent}%"
log["4"] = "---"
log["SWAP"] = ""
log["total"] = f"{adjust_size(swap.total)}"
log["free"] = f"{adjust_size(swap.free)}"
log["used"] = f"{adjust_size(swap.used)}"
log["percentage"] = f"{swap.percent}%"
with open("_.log", "w") as fp:
fp.write("\n".join([f"{key.title()}: {log[key]}" if log[key] != "---" else "\n" for key in log]))
await message.author.send(file=discord.File(fp="_.log", filename=f"LOGS {datetime.now()}.log"))
|
Alberto-X3/Alberto-X3 | Modules/img.py | <reponame>Alberto-X3/Alberto-X3<filename>Modules/img.py
import discord
import Utils
HELP = Utils.Help("lists all profiles", "shows you the history from all profiles of the Bot and the creator")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["imgs", "profile", "profiles"]
_00 = "https://media.discordapp.net/attachments/779620254396579850/831499354270335006/00-Alberto-X3_ft.Albert.png"
_01 = "https://media.discordapp.net/attachments/779620254396579850/831501108731314196/01-Alberto-X3_ft.Dr.Negativ.png"
_02 = "https://media.discordapp.net/attachments/779620254396579850/831501111676239882/02-Alberto-X3_ft.Dr.Negativ.png"
_03 = "https://media.discordapp.net/attachments/779620254396579850/835973105251647508/03-Alberto-X3_ft.Dr.Negativ.png"
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
await message.channel.send(embed=discord.Embed(description=f"""
__History:__
• [__`00`__]({_00}) by <@546320163276849162>
• [__`01`__]({_01}) by <@665288034274639873>
• [__`02`__]({_02}) by <@665288034274639873>
• [__`03`__]({_03}) by <@665288034274639873>
"""))
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/sticky_roles.py | """
adds automatically a role to a member to have it "clued" on the member
"""
from discord import Member, Client, Guild
from Utils import EVENT, send_exception
EVENTS = [EVENT.on_member_update, EVENT.on_ready]
ROLES = {
"WARNING: SPOILER!!!": 861238496484130816,
"WARNING: NO PERMS FOR THIS PERSON!!!": 861327480554782751
}
STICKY = {
752762890288758784: { # Nxnx
"roles": [
ROLES["WARNING: SPOILER!!!"]
]
},
644068957627744266: { # Ingo2004
"roles": [
ROLES["WARNING: NO PERMS FOR THIS PERSON!!!"]
]
}
}
async def __main__(client: Client, _event: int, *member: Member):
try:
guild: Guild = client.guilds[0]
if _event == EVENT.on_member_update:
if member[0].roles != member[1].roles and member[1].id in STICKY:
gr = guild.get_role
await member[1].add_roles(
*[gr(r) for r in STICKY[member[1].id]["roles"]
if gr(r) is not None]
)
elif _event == EVENT.on_ready:
gr = guild.get_role
for m in STICKY:
m = guild.get_member(m)
await m.add_roles(
*[gr(r) for r in STICKY[m.id]["roles"]
if gr(r) is not None]
)
except Exception as e:
await send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/rules.py | import discord
import Utils
EVENTS = [Utils.EVENT.on_raw_reaction_add, Utils.EVENT.on_ready]
async def __main__(client: discord.Client, _event: int, reaction: discord.RawReactionActionEvent = None):
try:
if _event == Utils.EVENT.on_raw_reaction_add:
await accept(client, reaction)
if _event == Utils.EVENT.on_ready:
await accepted(client)
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
async def accept(client: discord.Client, reaction: discord.RawReactionActionEvent):
if reaction.member == client.user:
return
logger = Utils.Logger(channel=await client.fetch_channel(Utils.DATA.IDs.Channels.Logs))
channel_id = Utils.DATA.IDs.Channels.Rules
message_id = Utils.DATA.IDs.Messages.Rules
role_id = Utils.DATA.IDs.Roles.Rules
guild: discord.Guild = client.get_guild(reaction.guild_id)
role = discord.utils.get(guild.roles, id=role_id)
news1: discord.Role = discord.utils.get(guild.roles, id=831831226146750484)
news2: discord.Role = discord.utils.get(guild.roles, id=861649282910519328)
if reaction.channel_id == channel_id:
if reaction.message_id == message_id:
if reaction.emoji.name == "✅":
if role not in reaction.member.roles:
await reaction.member.add_roles(role, reason="Rules accepted...")
await logger.rules(user=reaction.member)
channel: discord.TextChannel = client.get_channel(id=reaction.channel_id)
message: discord.Message = await channel.fetch_message(reaction.message_id)
await message.remove_reaction(emoji=reaction.emoji, member=reaction.member)
if reaction.emoji.name == "📯":
await reaction.member.add_roles(news1)
if reaction.emoji.name == "📰":
await reaction.member.add_roles(news2)
if reaction.emoji.name == "❌":
await reaction.member.remove_roles(news1, news2)
if reaction.emoji.name == "🚪":
await reaction.member.kick(reason="No rules...")
await message.add_reaction("✅")
await message.add_reaction("📯")
await message.add_reaction("📰")
await message.add_reaction("❌")
await message.add_reaction("🚪")
async def accepted(client: discord.Client):
logger = Utils.Logger(channel=await client.fetch_channel(Utils.DATA.IDs.Channels.Logs))
guild: discord.guild = client.get_guild(id=632526390113337346)
role: discord.Role = discord.utils.get(guild.roles, id=Utils.DATA.IDs.Roles.Rules)
news1: discord.Role = discord.utils.get(guild.roles, id=831831226146750484)
news2: discord.Role = discord.utils.get(guild.roles, id=861649282910519328)
channel: discord.TextChannel = await client.fetch_channel(Utils.DATA.IDs.Channels.Rules)
message: discord.Message = await channel.fetch_message(Utils.DATA.IDs.Messages.Rules)
for reaction in message.reactions:
async for member in reaction.users():
if member == client.user:
continue
await message.remove_reaction(emoji=reaction.emoji, member=member)
if role not in member.roles and reaction.emoji.name == "✅":
await member.add_roles(role, reason="Rules accepted...")
await logger.rules(user=member)
if role not in member.roles and reaction.emoji.name == "📯":
await member.add_roles(news1)
if role not in member.roles and reaction.emoji.name == "📰":
await member.add_roles(news1)
if role not in member.roles and reaction.emoji.name == "❌":
await member.remove_roles(news1, news2)
if role not in member.roles and reaction.emoji.name == "🚪":
await member.kick(reason="No rules...")
|
Alberto-X3/Alberto-X3 | Modules/kill.py | import discord
import Utils
HELP = Utils.Help("~~kills the Bot...~~ really dangerous", "is very dangerous... so be careful with it\nrequires Admin.Bot.kill")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["most-dangerous-cmd"]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
user_perms = Utils.perms(str(message.author.id))
if user_perms.Admin.Bot.kill:
coder = await client.fetch_user(Utils.DATA.Author_id)
await coder.send(f"**__starting kill by {message.author}__**")
await client.change_presence(status=discord.Status.offline)
await client.close()
input("Press <ENTER>...")
else:
await message.channel.send(":x: requires Admin.Bot.kill")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/kick.py | <reponame>Alberto-X3/Alberto-X3
import discord
import Utils
HELP = Utils.Help(f"kicks a user by iD", f"_{Utils.Prefix}kick iD (reason)_\nrequires Admin.kick")
EVENTS = [Utils.EVENT.on_message]
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
user_perms = Utils.perms(str(message.author.id))
if user_perms.Admin.kick:
if len(message.content.split()) >= 2:
if message.content.split()[1].isnumeric():
if len(message.content.split()) > 2:
reason = message.content.split(maxsplit=2)[2]
else:
reason = "No reason..."
logger = Utils.Logger(channel=await client.fetch_channel(Utils.DATA.IDs.Channels.Logs))
channel: discord.TextChannel = message.channel
guild: discord.Guild = message.guild # type: ignore
user: discord.User = await client.fetch_user(int(message.content.split()[1]))
handler: discord.User = message.author
try:
await user.send(f"You where kicked from the {guild} Server.\n_{reason}_")
except discord.Forbidden:
pass
try:
await guild.kick(user=user, reason=reason)
await logger.kick(user=handler, target=user, reason=reason)
await message.delete()
except ValueError:
await channel.send(":x: ERROR :x:")
except discord.Forbidden:
pass
else:
await message.channel.send(":x: Please enter the User-iD")
else:
await message.channel.send(":x: Please enter the User-iD")
else:
await message.channel.send(":x: requires Admin.kick")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/join.py | import discord
import Utils
import datetime
EVENTS = [Utils.EVENT.on_member_join, Utils.EVENT.on_member_remove]
async def __main__(client: discord.Client, _event: int, member: discord.Member):
try:
channel = await client.fetch_channel(Utils.DATA.IDs.Channels.Welcome)
if _event == Utils.EVENT.on_member_join:
embed = discord.Embed(title=f"Member {member} joined us!",
description=f"Here some Information about "
f"{member.mention}:",
color=Utils.DATA.colors.green)
embed.set_thumbnail(url=member.avatar_url)
embed.add_field(name="Account creation:",
value=f"{member.created_at}\n"
f"{(datetime.datetime.now() - member.created_at).days} days ago",
inline=False)
embed.add_field(name="Name:",
value=member.name,
inline=False)
embed.add_field(name="ID:",
value=member.id,
inline=False)
await Utils.Logger(channel=await client.fetch_channel(Utils.DATA.IDs.Channels.Logs)).join(member=member)
else:
embed = discord.Embed(title=f"(ex-) Member {member} left us!",
description=f"Here some Information about "
f"{member.mention}:",
color=Utils.DATA.colors.red)
embed.set_thumbnail(url=member.avatar_url)
embed.add_field(name="Account creation:",
value=f"{member.created_at}\n"
f"{(datetime.datetime.now() - member.created_at).days} days ago",
inline=False)
embed.add_field(name="Server joined:",
value=f"{member.joined_at}\n"
f"{(datetime.datetime.now() - member.joined_at).days} days ago",
inline=False)
embed.add_field(name="Name:",
value=member.name,
inline=False)
embed.add_field(name="ID:",
value=member.id,
inline=False)
await Utils.Logger(channel=await client.fetch_channel(Utils.DATA.IDs.Channels.Logs)).left(member=member)
await channel.send(embed=embed)
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/unban.py | import discord
import Utils
from json import load
HELP = Utils.Help("unbans a user with the given iD", f"_{Utils.Prefix}unban iD (reason)_\nrequires Admin.unban")
EVENTS = [Utils.EVENT.on_message]
ALIASES = []
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
user_perms = Utils.perms(str(message.author.id))
DATA = Utils.AttrDict(load(open("Configs.json")))
if user_perms.Admin.ban:
if len(message.content.split()) >= 2:
if message.content.split()[1].isnumeric():
if len(message.content.split()) > 2:
reason = message.content.split(maxsplit=2)[2]
else:
reason = "No reason..."
logger = Utils.Logger(channel=await client.fetch_channel(DATA.IDs.Channels.Logs))
channel: discord.TextChannel = message.channel
guild: discord.Guild = message.guild
try:
user: discord.User = await client.fetch_user(int(message.content.split()[1]))
except discord.NotFound:
await channel.send(":x: Invalid User-iD :x:")
return
handler: discord.User = message.author
try:
await guild.unban(user=user, reason=reason)
await logger.unban(user=handler, target=user, reason=reason)
# await user.send(f"You where unbanned from the {guild} Server.\n_{reason}_") # they have to be on a same guild
await message.delete()
except ValueError:
await channel.send(":x: ERROR :x:")
except discord.NotFound:
await channel.send(":x: This user isn't banned :x:")
except discord.Forbidden:
pass
else:
await message.channel.send(f":x: Please enter the User-iD")
else:
await message.channel.send(f":x: Please enter the User-iD")
else:
await message.channel.send(":x: requires Admin.unban")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/perms.py | <gh_stars>1-10
import discord
import Utils
HELP = Utils.Help(f"shows you your permissions", f"""
> *{Utils.Prefix}perms*
shows you your perms
> *{Utils.Prefix}perms [USER-ID|USER-MENTION]*
shows you perms from other
> *{Utils.Prefix}perms [USER-ID|USER-MENTION] [add|+] [PERMISSION]*
adds permissions for a user
> *{Utils.Prefix}perms [USER-ID|USER-MENTION] [remove|-] [PERMISSION]*
removes permissions from a user
""")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["p"]
super_log: discord.TextChannel
_client: discord.Client
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
global super_log
super_log = client.get_channel(Utils.DATA.IDs.Channels.Super_Log)
global _client
_client = client
user_perms = Utils.perms(str(message.author.id))
# not 'seeOwn'
if len(message.content.split()) > 1:
if message.content.split()[1].replace("<", "").replace("@", "").replace("!", "").replace(">", "").isnumeric() and len(message.content.split()) == 2:
# 'seeOther'
if len(message.content.split()) == 2:
if user_perms.User.Perms.seeOther:
await perms(message, message.content.split()[1].replace("<", "").replace("@", "").replace("!", "").replace(">", ""), Utils.perms(message.content.split()[1].replace("<", "").replace("@", "").replace("!", "").replace(">", "")))
else:
await message.channel.send(":x: requires User.Perms.seeOther")
# 'set'
elif len(message.content.split()) == 4:
if message.content.split()[1].replace("<", "").replace("@", "").replace("!", "").replace(">", "").isnumeric():
if message.content.split()[2] == "add" or message.content.split()[2] == "+":
await set_(message, user_perms, True)
elif message.content.split()[2] == "remove" or message.content.split()[2] == "-":
await set_(message, user_perms, False)
else:
await message.channel.send(":x: Please enter a ID or a MENTION")
else:
await message.channel.send(":x: Invalid Syntax")
# 'seeOwn'
else:
if user_perms.User.Perms.seeOwn:
await perms(message, str(message.author.id), user_perms)
else:
await message.channel.send(":x: requires User.Perms.seeOwn")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
def finder(data: Utils.AttrDict, path="") -> list:
found = []
for key in data.__dict__:
if isinstance(data[key], Utils.AttrDict):
found += finder(data[key], f"{path}{'.' if path else ''}{key}")
else:
found += [f"{path}{'.' if path else ''}{key}"] if data[key] else []
return found
async def perms(message: discord.Message, _id: str, user_perms: Utils.AttrDict):
log = "\n".join(finder(user_perms))
await message.channel.send(f"**__perms from <@{_id}>:__**\n{log}")
def replace(root: Utils.Union[Utils.AttrDict, bool], keys: list, value: Utils.Any):
if not keys:
return value
else:
root[keys[0]] = replace(root[keys[0]], keys[1:], value)
return root
def replaces(root: Utils.Union[Utils.AttrDict, bool], value: Utils.Optional[bool]):
for key in root:
if isinstance(root[key], bool):
root[key] = value
else:
root[key] = replaces(root[key], value)
return root
async def set_(message: discord.Message, user_perms: Utils.AttrDict, new: bool):
if user_perms.User.Perms.set or message.author.id == Utils.DATA.Author_id:
try:
perm = eval(f"user_perms.{message.content.split()[-1]}")
except KeyError:
await message.channel.send(":x: This permission doesn't exist! :x:")
else:
id_ = message.content.split()[1].replace("<", "").replace("@", "").replace("!", "").replace(">", "")
if id_ == str(message.author.id) and not message.author.id == Utils.DATA.Author_id:
await message.channel.send("**__:x: Please don't modify yourself! :x:__**\nhere is a 🃏 for you \\:)")
return
root = Utils.perms(id_)
keys = message.content.split()[-1].split('.')
if isinstance(perm, bool):
if perm:
new_perms = replace(root, keys, new)
else:
await message.channel.send(":x: You don't have this permission! :x:")
return
else:
if new is False:
if user_perms.User.Perms.removeMultiple:
new_perms = replace(root, keys, replaces(eval(f"root.{message.content.split()[-1]}"), new))
else:
await message.channel.send(":x: You don't have permissions to remove multiple permissions at once! :x:")
return
else:
if message.author.id == Utils.DATA.Author_id:
new_perms = replace(root, keys, replaces(eval(f"root.{message.content.split()[-1]}"), new))
else:
await message.channel.send(":x: You don't have permissions to add multiple permissions at once! :x:")
return
from json import load, dump
permissions = Utils.AttrDict(load(open("perms.json")))
permissions[id_] = new_perms
dump(permissions, open("perms.json", "w"), indent=2)
await message.channel.send(f"**Successfully {'added permissions to' if new is True else 'removed permissions from'} <@{id_}>**")
embed: discord.Embed = discord.Embed(color=Utils.DATA.colors.green if new is True else Utils.DATA.colors.red)
embed.title = "Permission Update"
try:
embed.set_author(name=f"{message.author} ({message.author.id}) | {await _client.fetch_user(int(id_))} ({id_})")
except discord.NotFound:
embed.set_author(name=f"{message.author} ({message.author.id}) | {id_}")
embed.add_field(name="__Permission:__", value=message.content.split()[-1])
await super_log.send(embed=embed)
else:
await message.channel.send(":x: requires User.Perms.set")
|
Alberto-X3/Alberto-X3 | Modules/event_cardgifter2020_cards.py | import discord
import Utils
HELP = Utils.Help(order_2004=True, vanish=True)
EVENTS = [Utils.EVENT.on_message]
ALIASES = []
async def __main__(client: discord.Client, _event: int, message: discord.Message):
try:
if message.channel.id != 821843251195412530:
return
log: discord.TextChannel = client.get_channel(821845250451439636)
await log.send(f"__{message.author.mention}__\n"
f"```\n{message.content}\n```\n"
f"{message.author.avatar_url}")
await message.add_reaction("✅")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/level.py | from discord import Embed, Client, Message, Role, NotFound, Member, User
from Utils import Help, EVENT, send_exception, Prefix, DATA
from sqlite3 import connect
from random import choice
from typing import Tuple, Dict, Union
from datetime import datetime, timedelta
HELP = Help("shows you your XP", f"{Prefix}level (iD/ping)", order_1793=True)
EVENTS = [EVENT.on_message]
ALIASES = ["lvl", "rank"]
possible_xps = [2, 2,
3, 3, 3, 3,
4, 4, 4,
5, 5]
free = "<:XP0:831578582026813480>"
full = "<:XP1:831578621092691978>"
len_bar = 20
formula = 1 / 3
latency = timedelta(minutes=1)
lvl_rewards = {
"25": 831628612171071498,
"20": 831628364803997757,
"15": 831628201406627871,
"10": 831628459222237227,
"5": 831915216475914273
}
recent: Dict[int, datetime] = {}
needed = {
# 1: 8,
# 2: 27,
# 3: 65,
# 4: 125,
5: 217,
# 6: 344,
# 7: 513,
# 8: 730,
# 9: 1001,
10: 1332,
# 11: 1729,
# 12: 2198,
# 13: 2745,
# 14: 3376,
15: 4097,
# 16: 4914,
# 17: 5833,
# 18: 6860,
# 19: 8001,
20: 9262,
# 21: 10649,
# 22: 12168,
# 23: 13825,
# 24: 15626,
25: 17577,
# 26: 19684,
# 27: 21953,
# 28: 24390,
# 29: 27001,
30: 29792,
# 31: 32769,
# 32: 35938,
# 33: 39305,
# 34: 42876,
35: 46657,
# 36: 50654,
# 37: 54873,
# 38: 59320,
# 39: 64001,
40: 68922
}
needed_info = """```
Level: XP:
{}
```""".format("\n".join(f"{str(k):8}{needed[k]}" for k in list(needed)))
async def __main__(client: Client, _event: int, message: Message):
try:
if message.guild is None or DATA.debug:
return
try:
user = int(message.content.split()[-1].replace("<", "")
.replace("@", "")
.replace("!", "")
.replace(">", ""))
message.author = await client.fetch_user(user)
except (ValueError, NotFound, IndexError):
user = message.author.id
db = connect("levels.sqlite")
cursor = db.cursor()
cursor.execute(f"SELECT * FROM lvl WHERE user=={user}")
fetched = cursor.fetchone()
if fetched is None:
cursor.execute(f"INSERT INTO lvl VALUES ({user}, 0, 0)")
db.commit()
data: Tuple[int, int, int] = fetched or (user, 0, 0)
cursor.execute("SELECT * from lvl ORDER BY xp DESC LIMIT 10")
rank = cursor.fetchall()
ranking = {
"inline": False,
"name": f"__Ranking #{len(rank)}:__",
"value": "\n".join(f"LVL **{l}**; "
f"XP **{x}**; "
f"<@{u}>" for u, l, x in rank)
}
cursor.execute("SELECT null from lvl")
len_user = len(cursor.fetchall())
xp = data[2]
lvl = data[1]
if message.content.startswith(("!!", "++", "..", "??")):
"""
`!!` -> prefix for <@714477299042615361> (Josef#0189)
`++` -> prefix for <@772085213987209226> (Red-Rainbow#0836)
`..` -> prefix for <@751157545728606239> (Alberto-X3#9164)
`??` -> prefix for <@756196727748296855> (CardGifter2020#2871)
"""
if not message.content.startswith(".."):
return
user_level = xp ** formula
user_progress = int(str(user_level).split(".")[1][:2])
len_filled = int(len_bar * user_progress / 100)
bar = f"{'#' * len_filled:-<{len_bar}}"
bar = bar.replace("#", full)
bar = bar.replace("-", free)
cursor.execute(f"SELECT level FROM lvl where xp>{xp}")
rank = len(cursor.fetchall()) + 1
embed = Embed(color=0x275591,
description=f"You are the number __**#{rank}**__ {message.author.mention}!")
embed.set_author(name=message.author.name,
icon_url=message.author.avatar_url)
embed.set_footer(text=f"total {len_user} user in ranking")
embed.add_field(inline=False,
name="__Your LVL:__", value=str(lvl))
embed.add_field(inline=False,
name="__Your XP:__", value=f"{xp}\n{bar}")
embed.add_field(**ranking)
embed.add_field(name="__Info:__", value=needed_info, inline=False)
await message.channel.send(embed=embed)
return
# below is only without prefix and just leveling
if message.channel.slowmode_delay:
return
author: Member = message.guild.get_member(message.author.id)
try:
if recent[user] + latency > datetime.utcnow():
if not message.content.startswith("\u200B"):
if recent[user] + latency / 2 > datetime.utcnow():
return
else:
return
except KeyError:
pass
recent[user] = datetime.utcnow()
xp += choice(possible_xps)
old_lvl = lvl
lvl = int(xp ** formula) - 1
cursor.execute(f"UPDATE lvl SET level={lvl} WHERE user=={user}")
cursor.execute(f"UPDATE lvl SET xp={xp} WHERE user=={user}")
db.commit()
db.close()
if lvl != old_lvl:
await client.get_channel(831625194803298314).send(
f"Congratulations __**{message.author.mention}**__!\n"
f"You are now __*Level {lvl}*__ {lvl*'🥳'}\n")
if str(lvl) in lvl_rewards:
reward: Role = message.guild.get_role(lvl_rewards[str(lvl)])
await author.add_roles(reward, reason="Leveling reward")
except Exception as e:
await send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Modules/on_ready.py | import discord
import Utils
from typing import List
from asyncio import sleep
from datetime import datetime, timedelta
from aiohttp import ClientSession
EVENTS = [Utils.EVENT.on_ready]
async def __main__(client: discord.Client, _event: int):
try:
print(f"Logged in as {client.user}")
await client.change_presence(activity=discord.Activity(name=f"{Utils.Prefix}help", type=discord.ActivityType.listening), status=discord.Status.online)
...
id_channel: int = 808742319066579014
id_message: int = 809048308358184981
channel: discord.TextChannel = client.get_channel(id_channel)
message: discord.Message = await channel.fetch_message(id_message)
tag_cases = b"<h1>Coronavirus Cases:</h1>"
tag_deaths = b"<h1>Deaths:</h1>"
tag_recovered = b"<h1>Recovered:</h1>"
tag_end = b"</span>"
url = r"https://www.worldometers.info/coronavirus/"
dis_cases = 86
dis_deaths = 56
dis_recovered = 82
first = True
old_list_cases = []
old_list_deaths = []
old_list_recovered = []
old_list_active = []
while True:
async with ClientSession() as session:
resp = session.get(url)
data = await resp
content = await data.read()
try:
pos_cases = content.find(tag_cases)
pos_deaths = content.find(tag_deaths)
pos_recovered = content.find(tag_recovered)
cases = content[pos_cases+dis_cases:pos_cases+dis_cases+content[pos_cases+dis_cases:].find(tag_end)]
deaths = content[pos_deaths+dis_deaths:pos_deaths+dis_deaths+content[pos_deaths+dis_deaths:].find(tag_end)]
if deaths.startswith(b">"):
deaths = content[pos_deaths+dis_deaths+1:pos_deaths+dis_deaths+1+content[pos_deaths+dis_deaths+1:].find(tag_end)]
recovered = content[pos_recovered+dis_recovered:pos_recovered+dis_recovered+content[pos_recovered+dis_recovered:].find(tag_end)]
cases = cases.replace(b" ", b"")
deaths = deaths.replace(b" ", b"")
recovered = recovered.replace(b" ", b"")
int_cases = int(cases.replace(b",", b""))
int_deaths = int(deaths.replace(b",", b""))
int_recovered = int(recovered.replace(b",", b""))
int_active = int_cases - int_deaths - int_recovered
active = ("".join(str(int_active)[::-1][i]+"," if i % 3 == 2 else str(int_active)[::-1][i] for i in range(len(str(int_active)))))[::-1]
if active.startswith(","):
active = active[1:]
history = len(old_list_cases)
old_list_cases = ut(old_list_cases, int_cases)
old_list_deaths = ut(old_list_deaths, int_deaths)
old_list_recovered = ut(old_list_recovered, int_recovered)
old_list_active = ut(old_list_active, int_active)
new_cases = dif(old_list_cases)
new_deaths = dif(old_list_deaths)
new_recovered = dif(old_list_recovered)
new_active = dif(old_list_active)
if first:
first = False
msg = f"""
__**🌐 World Wide**__
```md
COVID-19 Cases
------------------------
{cases.decode():13}{
('-' if new_cases < 0 else '+')+str(abs(new_cases)):7}
Deaths
------------------------
{deaths.decode():13}{
('-' if new_deaths < 0 else '+') + str(abs(new_deaths)):7}
Recovered
------------------------
{recovered.decode():13}{
('-' if new_recovered < 0 else '+') + str(abs(new_recovered)):7}
Active
------------------------
{active:13}{
('-' if new_active < 0 else '+') + str(abs(new_active)):7}
> UTC {datetime.utcnow().date()} {datetime.utcnow().hour}:{"0"+str(datetime.utcnow().minute) if datetime.utcnow().minute < 10 else datetime.utcnow().minute}```
@here is the source: <{url}> :)
[`+`/`-` are the differences from the past {history} minutes]
"""
await message.edit(content=msg)
await sleep((timedelta(minutes=1)-timedelta(seconds=datetime.utcnow().second, microseconds=datetime.utcnow().microsecond)).total_seconds())
except (KeyError, IndexError, ValueError, TypeError,
AttributeError, RuntimeError):
pass
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
def dif(obj: List[int]) -> int:
if len(obj) <= 1:
return 0
return obj[0] - obj[-1]
def update_timeline(obj: List[int], value: int,
max_len: int = 10) -> List[int]:
obj.insert(0, value)
return obj[:max_len]
ut = update_timeline
|
Alberto-X3/Alberto-X3 | Modules/__init__.py | from os import listdir
import importlib
import Utils
MODULES = [module.replace(".py", "") for module in listdir("./Modules") if module.endswith(".py") and not module == "__init__.py"]
libs = {}
for lib in MODULES.copy():
libs[lib] = importlib.import_module(f"Modules.{lib}")
if not hasattr(libs[lib], "__main__"):
del libs[lib]
MODULES.remove(lib)
continue
if not hasattr(libs[lib], "EVENTS"):
del libs[lib]
MODULES.remove(lib)
continue
if not hasattr(libs[lib], "ALIASES"):
libs[lib].ALIASES = []
if not hasattr(libs[lib], "HELP"):
libs[lib].HELP = Utils.Help(_help="*Please contact the developer to add a help for this!*")
|
Alberto-X3/Alberto-X3 | Modules/mars.py | from aiohttp import ClientSession, ClientResponse
from discord import Client, Message, Embed
from datetime import datetime, timedelta
from asyncio import sleep as asleep
import Utils
HELP = Utils.Help("shows recent pictures from Mars")
EVENTS = [Utils.EVENT.on_message]
ALIASES = ["m"]
# only got better reading
days = seconds = int
# CONSTANTS
LOADING_MSG = "<a:loading:832383867700772866> loading `api.nasa.gov`..."
API_KEY = Utils.DATA.CONSTANTS.KEY_01
BASE_URL = "https://api.nasa.gov/mars-photos/api/v1/rovers/curiosity/photos" \
"?earth_date={date}&api_key=" + API_KEY
MAX_HISTORY: days = 4
MAX_IMAGES: int = 90
TIME_TO_SLEEP: seconds = 10
del days, seconds
def get_date(history) -> str:
"""returns the date for the images"""
return (datetime.utcnow()-timedelta(days=history)).strftime("%Y-%m-%d")
async def __main__(client: Client, _event: int, message: Message):
try:
message: Message = await message.channel.send(LOADING_MSG)
data = []
async with ClientSession() as session:
for history in range(1, MAX_HISTORY+1):
url = BASE_URL.format(date=get_date(history))
resp: ClientResponse = await (session.get(url))
data += (await resp.json())["photos"]
# I convert the images and sols to a set to prevent multiplying
images_set = {(i["img_src"], i["sol"], i["rover"]["name"])
for i in data if "/ncam/" in i["img_src"]}
del data
images = [*sorted(images_set, key=lambda t: t[1], reverse=True)]
del images_set
if len(images) > MAX_IMAGES:
images = images[:MAX_IMAGES]
for image in images:
embed: Embed = Embed(title=f"Mars Sol {image[1]}",
description="Recent images from Mars!")
embed.set_image(url=image[0])
embed.set_footer(text=f"Image from rover {image[2]} taken with "
f"the Navigation Camera.")
await message.edit(embed=embed, content="")
await asleep(10)
del images
await message.edit(content=f"To restart the slide-show type "
f"`{Utils.Prefix}{__name__.split('.')[-1]}`")
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
|
Alberto-X3/Alberto-X3 | Bot.py | <gh_stars>1-10
from typing import *
from asyncio import create_task
import datetime
import discord
import Modules
import Utils
# to be sure, that there is a DB
import levels_config_sqlite
del levels_config_sqlite
intents = discord.Intents.all()
client = discord.Client(intents=intents)
TOKEN = Utils.DATA.CONSTANTS.Token
Prefix = Utils.Prefix
exceptions = True
# this boolean tells you, if your code will actually throw a error (what isn't so mad...) or
# that you get the errors in your cmd (this is my favorite, because you know exactly what for
# a error it exactly is (and where you can find it in your code))
#
# `True` normal errors in the cmd (default)
# `False` just a feedback, in what for a event a error was raised
'''
You can see all the events in the following URL:
https://discordpy.readthedocs.io/en/latest/api.html#event-reference
'''
@client.event
async def on_connect():
for module in Modules.MODULES:
if Utils.EVENT.on_connect in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_connect))
@client.event
async def on_shard_connect(shard_id: int):
for module in Modules.MODULES:
if Utils.EVENT.on_shard_connect in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_shard_connect, shard_id))
@client.event
async def on_disconnect():
for module in Modules.MODULES:
if Utils.EVENT.on_disconnect in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_disconnect))
@client.event
async def on_shard_disconnect(shard_id: int):
for module in Modules.MODULES:
if Utils.EVENT.on_shard_disconnect in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_shard_disconnect, shard_id))
@client.event
async def on_ready():
for module in Modules.MODULES:
if Utils.EVENT.on_ready in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_ready))
@client.event
async def on_shard_ready(shard_id: int):
for module in Modules.MODULES:
if Utils.EVENT.on_shard_ready in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_shard_ready, shard_id))
@client.event
async def on_resumed():
for module in Modules.MODULES:
if Utils.EVENT.on_resumed in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_resumed))
@client.event
async def on_shard_resumed(shard_id: int):
for module in Modules.MODULES:
if Utils.EVENT.on_shard_resumed in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_shard_resumed, shard_id))
if not exceptions:
@client.event
async def on_error(event: str, *args, **kwargs):
print("ERROR BY DC!!!")
print(f"{event=}")
print(f"{args=}")
print(f"{kwargs=}")
for module in Modules.MODULES:
if Utils.EVENT.on_error in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_error, event, *args, **kwargs))
@client.event
async def on_socket_raw_receive(msg: Union[bytes, str]):
for module in Modules.MODULES:
if Utils.EVENT.on_socket_raw_receive in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_socket_raw_receive, msg))
@client.event
async def on_socket_raw_send(payload: Union[bytes, str]):
for module in Modules.MODULES:
if Utils.EVENT.on_socket_raw_send in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_socket_raw_send, payload))
@client.event
async def on_typing(channel: discord.abc.Messageable, user: Union[discord.User, discord.Member], when: datetime.datetime):
for module in Modules.MODULES:
if Utils.EVENT.on_typing in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_typing, channel, user, when))
@client.event
async def on_message(message: discord.Message):
if message.content.startswith(Prefix):
if message.content.split()[0] == f"{Prefix}help" or message.content.split()[0] == f"{Prefix}h":
embed = discord.Embed()
embed.set_footer(text=f"requested by {message.author}", icon_url=message.author.avatar_url)
_module = ""
if len(message.content.split()) == 2:
found = False
for module in Modules.MODULES:
if message.content.split()[1] == module:
found = True
_module = module
break
else:
for alias in Modules.libs[module].ALIASES:
if message.content.split()[1] == alias:
found = True
_module = module
break
if found:
break
if not found:
embed.title = "**__:x: invalid cmd__**"
embed.add_field(name="", value=f"`{message.content.split()[1]}` isn't a cmd...")
else:
embed.title = f"**__help: {message.content.split()[1]}__**"
embed.add_field(name=_module, value=f"""
Aliases: {Modules.libs[_module].ALIASES}
~~--------~~
{Modules.libs[_module].HELP.direct_help}
""")
else:
embed.title = "**__help | h__**"
embed.add_field(name="better help:", value=f"> {Prefix}help NAME", inline=False)
for module in Modules.MODULES:
if Utils.EVENT.on_message in Modules.libs[module].EVENTS:
if not Modules.libs[module].HELP.vanish:
_ = ""
for alias in Modules.libs[module].ALIASES:
_ += f" **|** _{Prefix}{alias}_"
embed.add_field(name=f"_{Prefix}{module}_{_}", value=f"{Modules.libs[module].HELP}\n\n")
create_task(message.channel.send(embed=embed))
else:
for module in Modules.MODULES:
if Utils.EVENT.on_message in Modules.libs[module].EVENTS:
if message.content.split()[0] == f"{Prefix}{module}" or Modules.libs[module].HELP.order_2004:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_message, message))
else:
for alias in Modules.libs[module].ALIASES:
if message.content.split()[0] == f"{Prefix}{alias}":
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_message, message))
break
elif message.content.replace("!", "") == client.user.mention:
create_task(message.channel.send(f"My Prefix is `{Prefix}`."))
else:
for module in Modules.MODULES:
if Modules.libs[module].HELP.order_1793 or Modules.libs[module].HELP.order_2004:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_message, message))
@client.event
async def on_message_delete(message: discord.Message):
for module in Modules.MODULES:
if Utils.EVENT.on_message_delete in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_message_delete, message))
@client.event
async def on_bulk_message_delete(messages: List[discord.Message]):
for module in Modules.MODULES:
if Utils.EVENT.on_bulk_message_delete in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_bulk_message_delete, messages))
@client.event
async def on_raw_message_delete(payload: discord.RawMessageDeleteEvent):
for module in Modules.MODULES:
if Utils.EVENT.on_raw_message_delete in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_raw_message_delete, payload))
@client.event
async def on_message_edit(before: discord.Message, after: discord.Message):
for module in Modules.MODULES:
if Utils.EVENT.on_message_edit in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_message_edit, before, after))
@client.event
async def on_raw_message_edit(payload: discord.RawMessageUpdateEvent):
for module in Modules.MODULES:
if Utils.EVENT.on_raw_message_edit in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_raw_message_edit, payload))
@client.event
async def on_reaction_add(reaction: discord.Reaction, user: Union[discord.Member, discord.User]):
for module in Modules.MODULES:
if Utils.EVENT.on_reaction_add in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_reaction_add, reaction, user))
@client.event
async def on_raw_reaction_add(payload: discord.RawReactionActionEvent):
for module in Modules.MODULES:
if Utils.EVENT.on_raw_reaction_add in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_raw_reaction_add, payload))
@client.event
async def on_reaction_remove(reaction: discord.Reaction, user: Union[discord.Member, discord.User]):
for module in Modules.MODULES:
if Utils.EVENT.on_reaction_remove in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_reaction_remove, reaction, user))
@client.event
async def on_raw_reaction_remove(payload: discord.RawReactionActionEvent):
for module in Modules.MODULES:
if Utils.EVENT.on_raw_reaction_remove in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_raw_reaction_remove, payload))
@client.event
async def on_reaction_clear(message: discord.Message, reactions: List[discord.Reaction]):
for module in Modules.MODULES:
if Utils.EVENT.on_reaction_clear in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_reaction_clear, message, reactions))
@client.event
async def on_raw_reaction_clear(payload: discord.RawReactionClearEvent):
for module in Modules.MODULES:
if Utils.EVENT.on_raw_reaction_clear in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_raw_reaction_clear, payload))
@client.event
async def on_reaction_clear_emoji(reaction: discord.Reaction):
for module in Modules.MODULES:
if Utils.EVENT.on_reaction_clear_emoji in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_reaction_clear_emoji, reaction))
@client.event
async def on_raw_reaction_clear_emoji(payload: discord.RawReactionClearEmojiEvent):
for module in Modules.MODULES:
if Utils.EVENT.on_raw_reaction_clear_emoji in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_raw_reaction_clear_emoji, payload))
@client.event
async def on_private_channel_delete(channel: discord.abc.PrivateChannel):
for module in Modules.MODULES:
if Utils.EVENT.on_private_channel_delete in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_private_channel_delete, channel))
@client.event
async def on_private_channel_create(channel: discord.abc.PrivateChannel):
for module in Modules.MODULES:
if Utils.EVENT.on_private_channel_create in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_private_channel_create, channel))
@client.event
async def on_private_channel_update(before: discord.GroupChannel, after: discord.GroupChannel):
for module in Modules.MODULES:
if Utils.EVENT.on_private_channel_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_private_channel_update, before, after))
@client.event
async def on_private_channel_pins_update(channel: discord.abc.PrivateChannel, last_pin: Optional[datetime.datetime]):
for module in Modules.MODULES:
if Utils.EVENT.on_private_channel_pins_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_private_channel_pins_update, channel, last_pin))
@client.event
async def on_guild_channel_delete(channel: discord.abc.GuildChannel):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_channel_delete in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_channel_delete, channel))
@client.event
async def on_guild_channel_create(channel: discord.abc.GuildChannel):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_channel_create in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_channel_create, channel))
@client.event
async def on_guild_channel_update(before: discord.GroupChannel, after: discord.GroupChannel):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_channel_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_channel_update, before, after))
@client.event
async def on_guild_channel_pins_update(channel: discord.abc.PrivateChannel, last_pin: Optional[datetime.datetime]):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_channel_pins_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_channel_pins_update, channel, last_pin))
@client.event
async def on_guild_integrations_update(guild: discord.Guild):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_integrations_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_integrations_update, guild))
@client.event
async def on_webhooks_update(channel: discord.abc.GuildChannel):
for module in Modules.MODULES:
if Utils.EVENT.on_webhooks_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_webhooks_update, channel))
@client.event
async def on_member_join(member: discord.Member):
for module in Modules.MODULES:
if Utils.EVENT.on_member_join in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_member_join, member))
@client.event
async def on_member_remove(member: discord.Member):
for module in Modules.MODULES:
if Utils.EVENT.on_member_remove in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_member_remove, member))
@client.event
async def on_member_update(before: discord.Member, after: discord.Member):
for module in Modules.MODULES:
if Utils.EVENT.on_member_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_member_update, before, after))
@client.event
async def on_user_update(before: discord.User, after: discord.User):
for module in Modules.MODULES:
if Utils.EVENT.on_user_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_user_update, before, after))
@client.event
async def on_guild_join(guild: discord.Guild):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_join in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_join, guild))
@client.event
async def on_guild_remove(guild: discord.Guild):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_remove in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_remove, guild))
@client.event
async def on_guild_update(before: discord.Guild, after: discord.Guild):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_update, before, after))
@client.event
async def on_guild_role_create(role: discord.Role):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_role_create in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_role_create, role))
@client.event
async def on_guild_role_delete(role: discord.Role):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_role_delete in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_role_delete, role))
@client.event
async def on_guild_role_update(before: discord.Role, after: discord.Role):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_role_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_role_update, before, after))
@client.event
async def on_guild_emojis_update(guild: discord.Guild, before: Sequence[discord.Emoji], after: Sequence[discord.Emoji]):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_emojis_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_emojis_update, guild, before, after))
@client.event
async def on_guild_available(guild: discord.Guild):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_available in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_available, guild))
@client.event
async def on_guild_unavailable(guild: discord.Guild):
for module in Modules.MODULES:
if Utils.EVENT.on_guild_unavailable in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_guild_unavailable, guild))
@client.event
async def on_voice_state_update(member: discord.Member, before: discord.VoiceState, after: discord.VoiceState):
for module in Modules.MODULES:
if Utils.EVENT.on_voice_state_update in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_voice_state_update, member, before, after))
@client.event
async def on_member_ban(guild: discord.Guild, user: Union[discord.User, discord.Member]):
for module in Modules.MODULES:
if Utils.EVENT.on_member_ban in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_member_ban, guild, user))
@client.event
async def on_member_unban(guild: discord.Guild, user: discord.User):
for module in Modules.MODULES:
if Utils.EVENT.on_member_unban in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_member_unban, guild, user))
@client.event
async def on_invite_create(invite: discord.Invite):
for module in Modules.MODULES:
if Utils.EVENT.on_invite_create in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_invite_create, invite))
@client.event
async def on_invite_delete(invite: discord.Invite):
for module in Modules.MODULES:
if Utils.EVENT.on_invite_delete in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_invite_delete, invite))
@client.event
async def on_group_join(channel: discord.GroupChannel, user: discord.User):
for module in Modules.MODULES:
if Utils.EVENT.on_group_join in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_group_join, channel, user))
@client.event
async def on_group_remove(channel: discord.GroupChannel, user: discord.User):
for module in Modules.MODULES:
if Utils.EVENT.on_group_remove in Modules.libs[module].EVENTS:
create_task(Modules.libs[module].__main__(client, Utils.EVENT.on_group_remove, channel, user))
client.run(TOKEN)
|
Alberto-X3/Alberto-X3 | Modules/corona_status.py | <filename>Modules/corona_status.py
import discord
import Utils
from datetime import datetime, timedelta
from asyncio import sleep
from aiohttp import ClientSession, ClientResponse
EVENTS = [Utils.EVENT.on_raw_reaction_add, Utils.EVENT.on_ready]
id_channel: int = 808742319066579014
id_message: int = 809048491186716734
msg_ids = [809048529246617630,
809048553640165376,
809048671080939550,
809048677951602688,
809048681420029982,
809048685329252372,
809048691264454676,
809048696078991400,
809048707353935952,
809048712051163197,
809048718082048061,
809049260527452161,
809049436340224081,
809049491273023519]
supported = Utils.AttrDict({
"USA": {"reaction": "🇺🇸", "tag": b"USA"},
"India": {"reaction": "🇮🇳", "tag": b"India"},
"Brazil": {"reaction": "🇧🇷", "tag": b"Brazil"},
"Russia": {"reaction": "🇷🇺", "tag": b"Russia"},
"UK": {"reaction": "🇬🇧", "tag": b"UK"},
"France": {"reaction": "🇫🇷", "tag": b"France"},
"Spain": {"reaction": "🇪🇸", "tag": b"Spain"},
"Italy": {"reaction": "🇮🇹", "tag": b"Italy"},
"Turkey": {"reaction": "🇹🇷", "tag": b"Turkey"},
"Germany": {"reaction": "🇩🇪", "tag": b"Germany"},
"Colombia": {"reaction": "🇨🇴", "tag": b"Colombia"},
"Argentina": {"reaction": "🇦🇷", "tag": b"Argentina"},
"Mexico": {"reaction": "🇲🇽", "tag": b"Mexico"},
"Poland": {"reaction": "🇵🇱", "tag": b"Poland"},
"Iran": {"reaction": "🇮🇷", "tag": b"Iran"},
"South Africa": {"reaction": "🇿🇦", "tag": b"South Africa"},
"Ukraine": {"reaction": "🇺🇦", "tag": b"Ukraine"},
"Peru": {"reaction": "🇵🇪", "tag": b"Peru"},
"Indonesia": {"reaction": "🇮🇩", "tag": b"Indonesia"},
"Czech Republic": {"reaction": "🇨🇿", "tag": b"Czechia"},
"Netherlands": {"reaction": "🇳🇱", "tag": b"Netherlands"},
"Canada": {"reaction": "🇨🇦", "tag": b"Canada"},
"Portugal": {"reaction": "🇵🇹", "tag": b"Portugal"},
"Chile": {"reaction": "🇨🇱", "tag": b"Chile"},
"Romania": {"reaction": "🇷🇴", "tag": b"Romania"},
"Belgium": {"reaction": "🇧🇪", "tag": b"Belgium"},
"Israel": {"reaction": "🇮🇱", "tag": b"Israel"},
"Iraq": {"reaction": "🇮🇶", "tag": b"Iraq"},
"Sweden": {"reaction": "🇸🇪", "tag": b"Sweden"},
"Pakistan": {"reaction": "🇵🇰", "tag": b"Pakistan"},
"Philippines": {"reaction": "🇵🇭", "tag": b"Philippines"},
"Bangladesh": {"reaction": "🇧🇩", "tag": b"Bangladesh"},
"Switzerland": {"reaction": "🇨🇭", "tag": b"Switzerland"},
"Morocco": {"reaction": "🇲🇦", "tag": b"Morocco"},
"Austria": {"reaction": "🇦🇹", "tag": b"Austria"},
"Serbia": {"reaction": "🇷🇸", "tag": b"Serbia"},
"Japan": {"reaction": "🇯🇵", "tag": b"Japan"},
"Hungary": {"reaction": "🇭🇺", "tag": b"Hungary"},
"Saudi Arabia": {"reaction": "🇸🇦", "tag": b"Saudi Arabia"},
"Jordan": {"reaction": "🇯🇴", "tag": b"Jordan"},
"United Arabic Emirates": {"reaction": "🇦🇪", "tag": b"UAE"},
"Panama": {"reaction": "🇵🇦", "tag": b"Panama"},
"Lebanon": {"reaction": "🇱🇧", "tag": b"Lebanon"},
"Nepal": {"reaction": "🇳🇵", "tag": b"Nepal"},
"Slovakia": {"reaction": "🇸🇰", "tag": b"Slovakia"},
"Georgia": {"reaction": "🇬🇪", "tag": b"Georgia"},
"Belarus": {"reaction": "🇧🇾", "tag": b"Belarus"},
"Ecuador": {"reaction": "🇪🇨", "tag": b"Ecuador"},
"Malaysia": {"reaction": "🇲🇾", "tag": b"Malaysia"},
"Croatia": {"reaction": "🇭🇷", "tag": b"Croatia"},
"Azerbaijan": {"reaction": "🇦🇿", "tag": b"Azerbaijan"},
"Bolivia": {"reaction": "🇧🇴", "tag": b"Bolivia"},
"Bulgaria": {"reaction": "🇧🇬", "tag": b"Bulgaria"},
"Dominican Republic": {"reaction": "🇩🇴", "tag": b"Dominican Republic"},
"Tunisia": {"reaction": "🇹🇳", "tag": b"Tunisia"},
"Ireland": {"reaction": "🇮🇪", "tag": b"Ireland"},
"Denmark": {"reaction": "🇩🇰", "tag": b"Denmark"},
"Kazakhstan": {"reaction": "🇰🇿", "tag": b"Kazakhstan"},
"Costa Rica": {"reaction": "🇨🇷", "tag": b"Costa Rica"},
"Lithuania": {"reaction": "🇱🇹", "tag": b"Lithuania"},
"Slovenia": {"reaction": "🇸🇮", "tag": b"Slovenia"},
"Kuwait": {"reaction": "🇰🇼", "tag": b"Kuwait"},
"Egypt": {"reaction": "🇪🇬", "tag": b"Egypt"},
"Armenia": {"reaction": "🇦🇲", "tag": b"Armenia"},
"Greece": {"reaction": "🇬🇷", "tag": b"Greece"},
"Moldova": {"reaction": "🇲🇩", "tag": b"Moldova"},
"Guatemala": {"reaction": "🇬🇹", "tag": b"Guatemala"},
"Palestine": {"reaction": "🇵🇸", "tag": b"Palestine"},
"Honduras": {"reaction": "🇭🇳", "tag": b"Honduras"},
"Qatar": {"reaction": "🇶🇦", "tag": b"Qatar"},
"Ethiopia": {"reaction": "🇪🇹", "tag": b"Ethiopia"},
"Myanmar": {"reaction": "🇲🇲", "tag": b"Myanmar"},
"Nigeria": {"reaction": "🇳🇬", "tag": b"Nigeria"},
"Paraguay": {"reaction": "🇵🇾", "tag": b"Paraguay"},
"Oman": {"reaction": "🇴🇲", "tag": b"Oman"},
"Venezuela": {"reaction": "🇻🇪", "tag": b"Venezuela"},
"Libya": {"reaction": "🇱🇾", "tag": b"Libya"},
"Bosnia and Herzegovina": {"reaction": "🇧🇦", "tag": b"Bosnia and Herzegovina"},
"Algeria": {"reaction": "🇩🇿", "tag": b"Algeria"},
"Bahrain": {"reaction": "🇧🇭", "tag": b"Bahrain"},
"Kenya": {"reaction": "🇰🇪", "tag": b"Kenya"},
"North Macedonia": {"reaction": "🇲🇰", "tag": b"North Macedonia"},
"China": {"reaction": "🇨🇳", "tag": b"China"},
"Albania": {"reaction": "🇦🇱", "tag": b"Albania"},
"Kyrgyzstan": {"reaction": "🇰🇬", "tag": b"Kyrgyzstan"},
"South Korea": {"reaction": "🇰🇷", "tag": b"S. Korea"},
"Uzbekistan": {"reaction": "🇺🇿", "tag": b"Uzbekistan"},
"Latvia": {"reaction": "🇱🇻", "tag": b"Latvia"},
"Ghana": {"reaction": "🇬🇭", "tag": b"Ghana"},
"Sri Lanka": {"reaction": "🇱🇰", "tag": b"Sri Lanka"},
"Montenegro": {"reaction": "🇲🇪", "tag": b"Montenegro"},
"Norway": {"reaction": "🇳🇴", "tag": b"Norway"},
"Zambia": {"reaction": "🇿🇲", "tag": b"Zambia"},
"Singapore": {"reaction": "🇸🇬", "tag": b"Singapore"},
"El Salvador": {"reaction": "🇸🇻", "tag": b"El Salvador"},
"Afghanistan": {"reaction": "🇦🇫", "tag": b"Afghanistan"},
"Luxembourg": {"reaction": "🇱🇺", "tag": b"Luxembourg"},
"Estonia": {"reaction": "🇪🇪", "tag": b"Estonia"},
"Finland": {"reaction": "🇫🇮", "tag": b"Finland"},
"Uruguay": {"reaction": "🇺🇾", "tag": b"Uruguay"},
"Mozambique": {"reaction": "🇲🇿", "tag": b"Mozambique"},
"Uganda": {"reaction": "🇺🇬", "tag": b"Uganda"},
"Namibia": {"reaction": "🇳🇦", "tag": b"Namibia"},
"Zimbabwe": {"reaction": "🇿🇼", "tag": b"Zimbabwe"},
"Cuba": {"reaction": "🇨🇺", "tag": b"Cuba"},
"Cyprus": {"reaction": "🇨🇾", "tag": b"Cyprus"},
"Cameroon": {"reaction": "🇨🇲", "tag": b"Cameroon"},
"Ivory Coast": {"reaction": "🇨🇮", "tag": b"Ivory Coast"},
"Senegal": {"reaction": "🇸🇳", "tag": b"Senegal"},
"Australia": {"reaction": "🇦🇺", "tag": b"Australia"},
"Malawi": {"reaction": "🇲🇼", "tag": b"Malawi"},
"Sudan": {"reaction": "🇸🇩", "tag": b"Sudan"},
"Botswana": {"reaction": "🇧🇼", "tag": b"Botswana"},
"Thailand": {"reaction": "🇹🇭", "tag": b"Thailand"},
"Democratic Republic of Congo": {"reaction": "🇨🇬", "tag": b"DRC"},
"Angola": {"reaction": "🇦🇴", "tag": b"Angola"},
"Madagascar": {"reaction": "🇲🇬", "tag": b"Madagascar"},
"Malta": {"reaction": "🇲🇹", "tag": b"Malta"},
"French Polynesia": {"reaction": "🇵🇫", "tag": b"French Polynesia"},
"Jamaica": {"reaction": "🇯🇲", "tag": b"Jamaica"},
"Maldives": {"reaction": "🇲🇻", "tag": b"Maldives"},
"Mauritania": {"reaction": "🇲🇷", "tag": b"Mauritania"},
"Rwanda": {"reaction": "🇷🇼", "tag": b"Rwanda"},
"French Guiana": {"reaction": "🇬🇫", "tag": b"French Guiana"},
"Swaziland": {"reaction": "🇸🇿", "tag": b"Eswatini"},
"Guinea": {"reaction": "🇬🇳", "tag": b"Guinea"},
"Syria": {"reaction": "🇸🇾", "tag": b"Syria"},
"Cape Verde": {"reaction": "🇨🇻", "tag": b"Cabo Verde"},
"Tajikistan": {"reaction": "🇹🇯", "tag": b"Tajikistan"},
"Belize": {"reaction": "🇧🇿", "tag": b"Belize"},
"Haiti": {"reaction": "🇭🇹", "tag": b"Haiti"},
"Gabon": {"reaction": "🇬🇦", "tag": b"Gabon"},
"Mayotte": {"reaction": "🇾🇹", "tag": b"Mayotte"},
"Burkina Faso": {"reaction": "🇧🇫", "tag": b"Burkina Faso"},
"Hong Kong": {"reaction": "🇭🇰", "tag": b"Hong Kong"},
"Réunion": {"reaction": "🇷🇪", "tag": b"Réunion"},
"Andorra": {"reaction": "🇦🇩", "tag": b"Andorra"},
"Lesotho": {"reaction": "🇱🇸", "tag": b"Lesotho"},
"Guadeloupe": {"reaction": "🇬🇵", "tag": b"Guadeloupe"},
"Suriname": {"reaction": "🇸🇷", "tag": b"Suriname"},
"Bahamas": {"reaction": "🇧🇸", "tag": b"Bahamas"},
"Mali": {"reaction": "🇲🇱", "tag": b"Mali"},
"Congo": {"reaction": "🇨🇩", "tag": b"Congo"},
"Guyana": {"reaction": "🇬🇾", "tag": b"Guyana"},
"Trinidad & Tobago": {"reaction": "🇹🇹", "tag": b"Trinidad and Tobago"},
"Aruba": {"reaction": "🇦🇼", "tag": b"Aruba"},
"Martinique": {"reaction": "🇲🇶", "tag": b"Martinique"},
"Nicaragua": {"reaction": "🇳🇮", "tag": b"Nicaragua"},
"Iceland": {"reaction": "🇮🇸", "tag": b"Iceland"},
"Djibouti": {"reaction": "🇩🇯", "tag": b"Djibouti"},
"Equatorial Guinea": {"reaction": "🇬🇶", "tag": b"Equatorial Guinea"},
"Togo": {"reaction": "🇹🇬", "tag": b"Togo"},
"Central African Republic": {"reaction": "🇨🇫", "tag": b"CAR"},
"Somalia": {"reaction": "🇸🇴", "tag": b"Somalia"},
"South Sudan": {"reaction": "🇸🇸", "tag": b"South Sudan"},
"Niger": {"reaction": "🇳🇪", "tag": b"Niger"},
"Curaçao": {"reaction": "🇨🇼", "tag": b"Curaçao"},
"Gambia": {"reaction": "🇬🇲", "tag": b"Gambia"},
"Benin": {"reaction": "🇧🇯", "tag": b"Benin"},
"Gibraltar": {"reaction": "🇬🇮", "tag": b"Gibraltar"},
"Jersey": {"reaction": "🇯🇪", "tag": b"Channel Islands"},
"Sierra Leone": {"reaction": "🇸🇱", "tag": b"Sierra Leone"},
"Chad": {"reaction": "🇹🇩", "tag": b"Chad"},
"San Marino": {"reaction": "🇸🇲", "tag": b"San Marino"},
"Comoros": {"reaction": "🇰🇲", "tag": b"Comoros"},
"Guinea-Bissau": {"reaction": "🇬🇼", "tag": b"Guinea-Bissau"},
"Liechtenstein": {"reaction": "🇱🇮", "tag": b"Liechtenstein"},
"Eritrea": {"reaction": "🇪🇷", "tag": b"Eritrea"},
"New Zealand": {"reaction": "🇳🇿", "tag": b"New Zealand"},
"Mongolia": {"reaction": "🇲🇳", "tag": b"Mongolia"},
"Yemen": {"reaction": "🇾🇪", "tag": b"Yemen"},
"Vietnam": {"reaction": "🇻🇳", "tag": b"Vietnam"},
"Saint Lucia": {"reaction": "🇱🇨", "tag": b"Saint Lucia"},
"Liberia": {"reaction": "🇱🇷", "tag": b"Liberia"},
"Sint Maarten": {"reaction": "🇸🇽", "tag": b"Sint Maarten"},
"Barbados": {"reaction": "🇧🇧", "tag": b"Barbados"},
"Turks & Caicos Islands": {"reaction": "🇹🇨", "tag": b"Turks and Caicos"},
"Burundi": {"reaction": "🇧🇮", "tag": b"Burundi"},
"Monaco": {"reaction": "🇲🇨", "tag": b"Monaco"},
"Seychelles": {"reaction": "🇸🇨", "tag": b"Seychelles"},
"São Tomé & Príncipe": {"reaction": "🇸🇹", "tag": b"Sao Tome and Principe"},
"St. Vincent & Grenadines": {"reaction": "🇻🇨", "tag": b"St. Vincent Grenadines"},
"Saint Martin": {"reaction": "🇲🇫", "tag": b"Saint Martin"},
"Taiwan": {"reaction": "🇹🇼", "tag": b"Taiwan"},
"Papua New Guinea": {"reaction": "🇵🇬", "tag": b"Papua New Guinea"},
"Bhutan": {"reaction": "🇧🇹", "tag": b"Bhutan"},
"Diamond Princess": {"reaction": "🚢", "tag": b"Diamond Princess"},
"Bermuda": {"reaction": "🇧🇲", "tag": b"Bermuda"},
"Faroe Islands": {"reaction": "🇫🇴", "tag": b"Faeroe Islands"},
"Mauritius": {"reaction": "🇲🇺", "tag": b"Mauritius"},
"Tanzania": {"reaction": "🇹🇿", "tag": b"Tanzania"},
"Cambodia": {"reaction": "🇰🇭", "tag": b"Cambodia"},
"Isle of Man": {"reaction": "🇮🇲", "tag": b"Isle of Man"},
"Cayman Islands": {"reaction": "🇰🇾", "tag": b"Cayman Islands"},
"Caribbean Netherlands": {"reaction": "🇧🇶", "tag": b"Caribbean Netherlands"},
"St. Barthélemy": {"reaction": "🇧🇱", "tag": b"St. Barth"},
"Antigua and Barbuda": {"reaction": "🇦🇬", "tag": b"Antigua and Barbuda"},
"Brunei": {"reaction": "🇧🇳", "tag": b"Brunei "},
"Grenada": {"reaction": "🇬🇩", "tag": b"Grenada"},
"Dominica": {"reaction": "🇩🇲", "tag": b"Dominica"},
"British Virgin Islands": {"reaction": "🇻🇬", "tag": b"British Virgin Islands"},
"Timor-Leste": {"reaction": "🇹🇱", "tag": b"Timor-Leste"},
"Fiji": {"reaction": "🇫🇯", "tag": b"Fiji"},
"Falkland Islands": {"reaction": "🇫🇰", "tag": b"Falkland Islands"},
"New Caledonia": {"reaction": "🇳🇨", "tag": b"New Caledonia"},
"Macao Sar China": {"reaction": "🇲🇴", "tag": b"Macao"},
"Laos": {"reaction": "🇱🇦", "tag": b"Laos"},
"Saint Kitts and Nevis": {"reaction": "🇰🇳", "tag": b"Saint Kitts and Nevis"},
"Greenland": {"reaction": "🇬🇱", "tag": b"Greenland"},
"Vatican City": {"reaction": "🇻🇦", "tag": b"Vatican City"},
"Saint Pierre Miquelon": {"reaction": "🇵🇲", "tag": b"Saint Pierre Miquelon"},
"Montserrat": {"reaction": "🇲🇸", "tag": b"Montserrat"},
"Anguilla": {"reaction": "🇦🇮", "tag": b"Anguilla"},
"Solomon Islands": {"reaction": "🇸🇧", "tag": b"Solomon Islands"},
"Western Sahara": {"reaction": "🇪🇭", "tag": b"Western Sahara"},
"MS Zaandam": {"reaction": "🛳️", "tag": b"MS Zaandam"},
"Wallis and Futuna": {"reaction": "🇼🇫", "tag": b"Wallis and Futuna"},
"Marshall Islands": {"reaction": "🇲🇭", "tag": b"Marshall Islands"},
"Samoa": {"reaction": "🇼🇸", "tag": b"Samoa"},
"Micronesia": {"reaction": "🇫🇲", "tag": b"Micronesia"},
"Vanuatu": {"reaction": "🇻🇺", "tag": b"Vanuatu"}
})
sep = b"</td>"
url = r"https://www.worldometers.info/coronavirus/"
async def __main__(client: discord.Client, _event: int, reaction: discord.RawReactionActionEvent = None):
try:
channel: discord.TextChannel = client.get_channel(id_channel)
try:
if _event == Utils.EVENT.on_raw_reaction_add:
message: discord.Message = await channel.fetch_message(id_message)
if reaction.member == client.user:
return
''' # To reset the reactions
msgs = [await channel.fetch_message(_id) for _id in msg_ids]
for msg in msgs:
await msg.clear_reactions()
_ = 0
for key in list(supported.keys()):
await msgs[_//20].add_reaction(discord.PartialEmoji(name=supported[key].reaction))
_ += 1
'''
key = ""
if reaction.channel_id == id_channel:
msg = await channel.fetch_message(reaction.message_id)
for _id in msg_ids:
if reaction.message_id == _id:
for key in list(supported.keys()):
if reaction.emoji == discord.PartialEmoji(name=supported[key].reaction):
break
break
await msg.remove_reaction(reaction.emoji, reaction.member)
await update(key, message, True)
elif _event == Utils.EVENT.on_ready:
while True:
try:
message: discord.Message = await channel.fetch_message(id_message)
key = message.content.splitlines()[0].split()[1].replace("*", "")[:-2]
await update(key, message)
await sleep((timedelta(minutes=1) - timedelta(seconds=datetime.utcnow().second,
microseconds=datetime.utcnow().microsecond)).total_seconds())
except (KeyError, IndexError, ValueError, TypeError,
AttributeError, RuntimeError):
pass
except (KeyError, IndexError, ValueError, TypeError,
AttributeError, RuntimeError):
pass
except Exception as e:
await Utils.send_exception(client=client, exception=e, source_name=__name__)
async def update(key: str, message: discord.Message, new=False):
if new:
msg = f"""
__**<a:loading:832383867700772866> {key}**__
```md
COVID-19 Cases
------------------------
loading...
Deaths
------------------------
loading...
Recovered
------------------------
loading...
Active
------------------------
loading...
> loading...```
@here is the source: <{url}> :)
"""
await message.edit(content=msg)
async with ClientSession() as session:
resp = session.get(url)
data: ClientResponse = await resp
content = await data.read()
pos = content.find(supported[key].tag)
data: Utils.List[bytes] = content[pos:].split(sep)
cases: bytes = data[1].split(b">")[-1]
deaths: bytes = data[3].split(b">")[-1]
recovered: bytes = data[5].split(b">")[-1]
active: bytes = data[7].split(b">")[-1]
msg = f"""
__**{supported[key].reaction} {key}**__
```md
COVID-19 Cases
------------------------
{cases.decode():11}
Deaths
------------------------
{deaths.decode():11}
Recovered
------------------------
{recovered.decode():11}
Active
------------------------
{active.decode():11}
> UTC {datetime.utcnow().date()} {datetime.utcnow().hour}:{"0" + str(datetime.utcnow().minute) if datetime.utcnow().minute < 10 else datetime.utcnow().minute}```
@here is the source: <{url}> :)
"""
await message.edit(content=msg)
|
wolcomm/eos-prefix-list-agent | prefix_list_agent/worker.py | <filename>prefix_list_agent/worker.py<gh_stars>1-10
# Copyright (c) 2019 <NAME>. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""prefix_list_agent worker functions."""
from __future__ import print_function
import collections
import json
import multiprocessing
import os
import re
import signal
import sys
import time
import urllib2
from prefix_list_agent.base import PrefixListBase
from prefix_list_agent.exceptions import handle_sigterm, TermException
class PrefixListWorker(multiprocessing.Process, PrefixListBase):
"""Worker to fetch and process IRR data."""
def __init__(self, endpoint, path, eapi, update_delay, *args, **kwargs):
"""Initialise an PrefixListWorker instance."""
super(PrefixListWorker, self).__init__(*args, **kwargs)
PrefixListBase.__init__(self)
self.endpoint = endpoint
self.path = path
self.eapi = eapi
self.update_delay = update_delay
self.path_re = re.compile(r"^file:{}/(?P<policy>\w+)/(?P<file>[-.:\w]+)$" # noqa: E501
.format(self.path.rstrip("/")))
self._p_err, self._c_err = multiprocessing.Pipe(duplex=False)
self._p_data, self._c_data = multiprocessing.Pipe(duplex=False)
@property
def p_err(self):
"""Get 'p_err' property."""
return self._p_err
@property
def c_err(self):
"""Get 'c_err' property."""
return self._c_err
@property
def p_data(self):
"""Get 'p_data' property."""
return self._p_data
@property
def c_data(self):
"""Get 'c_data' property."""
return self._c_data
def run(self):
"""Run the worker process."""
self.info("Worker started")
signal.signal(signal.SIGTERM, handle_sigterm)
try:
policies = self.get_policies()
configured = self.get_configured(policies)
data = self.get_data(configured)
stats, written_objs = self.write_results(configured, data)
self.refresh_all(written_objs)
self.c_data.send(stats)
except TermException:
self.notice("Got SIGTERM signal: exiting.")
if os.getpid() == self.pid:
sys.exit(127 + signal.SIGTERM)
except Exception as e:
self.err(e)
try:
self.c_err.send(e)
except TypeError: # pragma: no cover
self.c_err.send(Exception(str(e)))
finally:
self.c_err.close()
self.c_data.close()
def get_configured(self, policies):
"""Get the prefix-lists in running-config."""
configured = {p: collections.defaultdict(dict) for p in policies}
self.info("Searching for prefix-lists with source matching {}"
.format(self.path_re.pattern))
for afi, cmd in (("ipv4", "show ip prefix-list"),
("ipv6", "show ipv6 prefix-list")):
data = self.eapi_request(cmd, result_node="ipPrefixLists")
self.debug("Got response: {}".format(data))
for name, config in data.items():
try:
source = config["ipPrefixListSource"]
except KeyError:
continue
self.debug("Testing {}, source {}".format(name, source))
m = self.path_re.match(source)
if m:
self.debug("Source matched")
(policy, file) = m.groups()
if policy in policies:
configured[policy][name][afi] = file
else:
self.warning("Ignoring unknown policy {}"
.format(policy))
else:
self.debug("No match")
return configured
def refresh_prefix_list(self, afi, prefix_list=None):
"""Refresh all prefix-lists or a single named prefix-list."""
cmd = "refresh {} prefix-list".format(afi)
if prefix_list is not None:
cmd += " {}".format(prefix_list)
messages = self.eapi_request(cmd, result_node="messages",
allow_empty=True)
for msg in messages:
for submsg in msg.replace("\nNum", " -").rstrip().split("\n"):
self.info(submsg)
def refresh_all(self, written_objs):
"""Refresh prefix-lists."""
self.info("Refreshing source-based prefix-lists")
for afi in ("ip", "ipv6"):
if self.update_delay is None:
self.refresh_prefix_list(afi)
else:
for prefix_list in written_objs:
self.refresh_prefix_list(afi, prefix_list)
time.sleep(self.update_delay)
self.notice("Prefix-lists refreshed successfully")
def get_policies(self):
"""Get the list of valid policy names from RPTK."""
url_path = "/policies"
self.info("Trying to get policy data from {}".format(url_path))
policies = self.rptk_request(url_path)
self.debug("Got policies: {}".format(policies.keys()))
return policies
def get_data(self, configured):
"""Get IRR data for the configured prefix-list objects."""
data = dict()
self.info("Querying for IRR data")
for policy, objs in configured.items():
if not objs:
continue
self.info("Trying bulk query")
try:
result = self.get_data_bulk(policy, objs)
data.update({policy: result})
continue
except Exception as e:
self.err(e)
self.info("Failing back to indiviual queries")
data[policy] = dict()
for obj in objs:
try:
result = self.get_data_obj(policy, obj)
except Exception as e:
self.err(e)
continue
data[policy].update(result)
return data
def get_data_bulk(self, policy, objs):
"""Get IRR data in bulk."""
url_path = "/json/query?policy={}&".format(policy) + \
"&".join(["objects={}".format(obj) for obj in objs])
self.info("Trying to get prefix data from {}".format(url_path))
result = self.rptk_request(url_path)
self.debug("Got prefix data")
return result
def get_data_obj(self, policy, obj):
"""Get IRR data for a single object."""
url_path = "/json/{}/{}".format(obj, policy)
self.info("Trying to get prefix data from {}".format(url_path))
result = self.rptk_request(url_path)
self.debug("Got prefix data")
return result
# TODO: include list of written prefix-lists, to give to 'refresh_all()'
def write_results(self, configured, data):
"""Write prefix-list data to files."""
stats = {"succeeded": 0, "failed": 0}
written_objs = set()
for policy, objs in configured.items():
self.info("Writing files for policy {}".format(policy))
if not objs:
self.info("No objects with policy: {}".format(policy))
continue
policy_dir = os.path.join(self.path, policy)
if not os.path.isdir(policy_dir):
self.info("Creating directory {}".format(policy_dir))
os.makedirs(policy_dir)
for obj, config in objs.items():
self.info("Trying to write files for {}/{}"
.format(obj, policy))
if obj in data[policy]:
for afi, file in config.items():
path = os.path.join(policy_dir, file)
entries = data[policy][obj][afi]
try:
self.write_prefix_list(path, entries, afi)
except Exception: # pragma: no cover
stats["failed"] += 1
continue
stats["succeeded"] += 1
written_objs.add(obj)
else:
self.warning("No prefix data for {}/{}"
.format(obj, policy))
stats["failed"] += len(config)
return stats, written_objs
def write_prefix_list(self, path, entries, afi):
"""Write prefix-list to file."""
self.info("Trying to write {}".format(path))
try:
with open(path, "w") as f:
for i, p in enumerate(entries):
f.write(self.prefix_list_line(i, p))
except Exception as e:
self.err("Failed to write {}: {}".format(path, e))
raise e
def prefix_list_line(self, index, entry):
"""Generate a line in a prefix-list."""
line = "seq {} permit {}".format(index + 1, entry["prefix"])
if not entry["exact"]:
if "greater-equal" in entry:
line += " ge {}".format(entry["greater-equal"])
if "less-equal" in entry:
line += " le {}".format(entry["less-equal"])
line += "\n"
return line
def eapi_request(self, cmd, result_node, allow_empty=False):
"""Get call an enable-mode eAPI command."""
self.debug("Calling eAPI command {}".format(cmd))
try:
resp = self.eapi.run_show_cmd(cmd)
except Exception as e:
self.err("eAPI request failed: {}".format(e))
raise e
if resp.success():
data = self.json_load(resp.responses()[0])
else:
e = RuntimeError("eAPI request failed: {} ({})"
.format(resp.error_message(),
resp.error_code()))
self.err(e)
raise e
try:
result = data[result_node]
except KeyError as e:
if allow_empty:
result = {}
else:
self.err("Failed to get result data: {}".format(e))
raise e
self.debug("eAPI request successful")
return result
def rptk_request(self, url_path):
"""Perform a query against the RPTK endpoint."""
url = "{}/{}".format(self.endpoint.rstrip("/"),
url_path.lstrip("/"))
self.debug("Querying RPTK endpoint at {}".format(url))
try:
resp = urllib2.urlopen(url)
except urllib2.HTTPError as e:
self.err("Request failed: {} {}".format(e.code, e.reason))
raise e
except urllib2.URLError as e:
self.err("Request failed: {}".format(e))
raise e
self.debug("Request successful: {}".format(resp.getcode()))
result = self.json_load(resp)
return result
def json_load(self, obj):
"""Deserialise JSON from a string or file-like object."""
def fail(e):
self.err("Failed to deserialise response: {}".format(e))
raise e
self.debug("Deserialising JSON response")
try:
self.debug("Trying 'json.load' method")
result = json.load(obj)
except AttributeError:
self.debug("Object has no 'read' method")
self.debug("Trying 'json.loads' method")
try:
result = json.loads(obj)
except Exception as e:
fail(e)
except Exception as e:
fail(e)
self.debug("Successfully deserialised JSON")
return result
@property
def data(self):
"""Get data from the worker."""
if self.p_data.poll():
return self.p_data.recv()
@property
def error(self):
"""Get exception raised by worker."""
if self.p_err.poll():
return self.p_err.recv()
|
wolcomm/eos-prefix-list-agent | tests/00_unit/test_exceptions.py | <reponame>wolcomm/eos-prefix-list-agent
# Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for prefix_list_agent.exceptions module."""
from __future__ import print_function
import signal
import pytest
from prefix_list_agent.exceptions import TermException, handle_sigterm
class TestExceptions(object):
"""Test cases for exceptions module."""
def test_term(self):
"""Test case for SIGTERM signal handler."""
with pytest.raises(TermException):
handle_sigterm(signal.SIGTERM, None)
|
wolcomm/eos-prefix-list-agent | tests/01_integration/conftest.py | <reponame>wolcomm/eos-prefix-list-agent<gh_stars>1-10
# Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Fixtures for PrefixListAgent integration tests."""
from __future__ import print_function
import time
import pytest
from rptk_stub import RptkStubProcess
@pytest.fixture(scope="module")
def node():
"""Provide a pyeapi node connected to the local unix socket."""
for retry in range(60):
try:
import pyeapi
conn = pyeapi.connect(transport="socket")
node = pyeapi.client.Node(conn)
assert node.version
break
except Exception as e:
time.sleep(3)
continue
else:
raise e
return node
@pytest.fixture(scope="module")
def configure_daemon(node):
"""Configure the agent as an EOS ProcMgr daemon."""
agent_config = [
"trace PrefixListAgent-PrefixListAgent setting PrefixList*/*",
"daemon PrefixListAgent",
"exec /root/bin/PrefixListAgent",
"option rptk_endpoint value http://127.0.0.1:8000/",
"option refresh_interval value 10",
"option update_delay value 1",
"no shutdown"
]
node.config(agent_config)
time.sleep(3)
yield
@pytest.fixture(scope="module")
def rptk_stub():
"""Launch a stub version of an rptk web application."""
process = RptkStubProcess()
process.start()
yield
process.terminate()
process.join()
|
wolcomm/eos-prefix-list-agent | prefix_list_agent/agent.py | # Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""prefix_list_agent agent implementation."""
from __future__ import print_function
import collections
import datetime
import filecmp
import os
import platform
import shutil
import signal
import tempfile
import eossdk
from prefix_list_agent.base import PrefixListBase
from prefix_list_agent.worker import PrefixListWorker
class PrefixListAgent(PrefixListBase, eossdk.AgentHandler,
eossdk.TimeoutHandler, eossdk.FdHandler):
"""An EOS SDK based agent that creates prefix-list policy objects."""
sysdb_mounts = ("agent",)
agent_options = ("rptk_endpoint", "source_dir", "refresh_interval",
"update_delay")
@classmethod
def set_sysdb_mp(cls, name):
"""Create the SysdbMountProfiles file for the agent."""
# set the path
arch = platform.architecture()[0]
if arch == "32bit": # pragma: no cover
lib_dir = "/usr/lib"
elif arch == "64bit":
lib_dir = "/usr/lib64"
else: # pragma: no cover
raise RuntimeError("Unknown architecture '{}'".format(arch))
profile_path = os.path.join(lib_dir, "SysdbMountProfiles", name)
# get a tempfile for writing the profile to
with tempfile.NamedTemporaryFile() as tmp:
# write the profile file
tmp.write("agentName:{}-%sliceId\n\n".format(name))
for profile in cls.sysdb_mounts:
tmp.write("Include: EosSdk_{}.include\n".format(profile))
tmp.flush()
# check whether an existing file matches and bail out
if os.path.isfile(profile_path):
if filecmp.cmp(profile_path, tmp.name, shallow=False):
return False
# copy the tempfile into place
shutil.copy(tmp.name, profile_path)
return True
def __init__(self, sdk):
"""Initialise the agent instance."""
# Set up tracing
PrefixListBase.__init__(self)
# get sdk managers
self.agent_mgr = sdk.get_agent_mgr()
self.timeout_mgr = sdk.get_timeout_mgr()
self.eapi_mgr = sdk.get_eapi_mgr()
# init sdk handlers
eossdk.AgentHandler.__init__(self, self.agent_mgr)
eossdk.TimeoutHandler.__init__(self, self.timeout_mgr)
eossdk.FdHandler.__init__(self)
# set worker process to None
self.worker = None
self.watching = set()
# set default confg options
self._rptk_endpoint = None
self._source_dir = "/tmp/prefix-lists"
self._refresh_interval = 3600
self._update_delay = None
# create state containers
self._status = None
self._last_start = None
self._last_end = None
self._result = None
@property
def rptk_endpoint(self):
"""Get 'rptk_endpoint' property."""
return self._rptk_endpoint
@rptk_endpoint.setter
def rptk_endpoint(self, url):
"""Set 'rptk_endpoint' property."""
self._rptk_endpoint = url
@property
def source_dir(self):
"""Get 'source_dir' property."""
return self._source_dir
@source_dir.setter
def source_dir(self, path):
"""Set 'source_dir' property."""
self._source_dir = path
@property
def refresh_interval(self):
"""Get 'refresh_interval' property."""
return self._refresh_interval
@refresh_interval.setter
def refresh_interval(self, i):
"""Set 'refresh_interval' property."""
i = int(i)
if i in range(10, 86400):
self._refresh_interval = i
else:
raise ValueError("refresh_interval must be in range 1 - 86399")
@property
def update_delay(self):
"""Get 'update_delay' property."""
return self._update_delay
@update_delay.setter
def update_delay(self, i):
"""Set 'update_delay' property."""
if i is not None:
i = int(i)
if i not in range(1, 121):
raise ValueError("update_delay must be in range 1 - 120")
self._update_delay = i
@property
def status(self):
"""Get 'status' property."""
return self._status
@status.setter
def status(self, s):
"""Set 'status' property."""
self._status = s
self.agent_mgr.status_set("status", self.status)
self.info("Status: {}".format(self.status))
@property
def result(self):
"""Get 'result' property."""
return self._result
@result.setter
def result(self, r):
"""Set 'result' property."""
self._result = r
self.agent_mgr.status_set("result", self.result)
self.notice("Result: {}".format(self.result))
@property
def last_start(self):
"""Set the 'last_start' timestamp."""
return self._last_start
@last_start.setter
def last_start(self, ts):
"""Set the 'last_start' timestamp."""
if not isinstance(ts, datetime.datetime):
raise TypeError("Expected datetime.datetime, got {}".format(ts))
self._last_start = ts
self.agent_mgr.status_set("last_start", str(self.last_start))
self.info("Last start: {}".format(ts))
@property
def last_end(self):
"""Set the 'last_end' timestamp."""
return self._last_end
@last_end.setter
def last_end(self, ts):
"""Set the 'last_end' timestamp."""
if not isinstance(ts, datetime.datetime):
raise TypeError("Expected datetime.datetime, got {}".format(ts))
self._last_end = ts
self.agent_mgr.status_set("last_end", str(self.last_end))
self.info("Last end: {}".format(ts))
def configure(self):
"""Read and set all configuration options."""
self.info("Reading configuration options")
for key in self.agent_mgr.agent_option_iter():
value = self.agent_mgr.agent_option(key)
self.set(key, value)
def set(self, key, value):
"""Set a configuration option."""
if not value:
value = None
self.info("Setting configuration '{}'='{}'".format(key, value))
if key in self.agent_options:
setattr(self, key, value)
else:
self.warning("Ignoring unknown option '{}'".format(key))
def start(self):
"""Start up the agent."""
self.status = "init"
self.configure()
self.init()
self.run()
def init(self): # pragma: no cover
"""Perform one-time start actions."""
pass
def init_worker(self):
"""Create a worker instance."""
self.info("Initialising worker")
self.worker = PrefixListWorker(endpoint=self.rptk_endpoint,
path=self.source_dir,
eapi=self.eapi_mgr,
update_delay=self.update_delay)
def run(self):
"""Spawn worker process."""
self.status = "running"
if self.rptk_endpoint is not None:
self.last_start = datetime.datetime.now()
try:
self.init_worker()
self.watch(self.worker.p_data, "result")
self.watch(self.worker.p_err, "error")
self.info("Starting worker")
self.worker.start()
self.info("Worker started: pid {}".format(self.worker.pid))
except Exception as e:
self.err("Starting worker failed: {}".format(e))
self.failure(err=e)
else:
self.warning("'rptk_endpoint' is not set")
self.sleep()
def watch(self, conn, type):
"""Watch a Connection for new data."""
self.info("Trying to watch for {} data on {}".format(type, conn))
fileno = conn.fileno()
self.watch_readable(fileno, True)
self.watching.add(conn)
self.info("Watching {} for {} data".format(conn, type))
def unwatch(self, conn, close=False):
"""Stop watching a Connection for new data."""
self.info("Trying to remove watch on {}".format(conn))
fileno = conn.fileno()
self.watch_readable(fileno, False)
if conn in self.watching:
self.watching.remove(conn)
self.info("Stopped watching {}".format(conn))
if close:
self.info("Closing connection {}".format(conn))
conn.close()
def success(self):
"""Report statistics and restart refresh_interval timer."""
self.status = "finalising"
self.info("Receiving results from worker")
stats = self.worker.data
self.report(**stats)
self.result = "ok"
self.last_end = datetime.datetime.now()
self.cleanup(process=self.worker)
self.sleep()
def failure(self, err=None, process=None, restart=False):
"""Handle worker exception."""
self.status = "error"
if err is None:
try:
err = process.error
except Exception as e:
self.err("Retreiving exception from {} failed"
.format(process.__class__.__name__))
err = e
self.err(err)
self.result = "failed"
self.last_end = datetime.datetime.now()
if restart:
self.restart()
else:
self.cleanup(process=process)
self.sleep()
def report(self, **stats):
"""Report statistics to the agent manager."""
for name, value in stats.items():
self.info("{}: {}".format(name, value))
self.agent_mgr.status_set(name, str(value))
def cleanup(self, process):
"""Kill the process if it is still running."""
self.status = "cleanup"
process_name = process.__class__.__name__
self.info("Cleaning up {} process".format(process_name))
if process is not None:
self.info("Closing connections from {}".format(process_name))
for conn in [c for c in
[getattr(process, k) for k in dir(process)
if not k.startswith("_")]
if isinstance(c, collections.Hashable)
and c in self.watching]:
try:
self.unwatch(conn, close=True)
except Exception as e:
self.err(e)
for retry in range(3):
if process.is_alive():
if retry:
self.info("Sending {} SIGTERM".format(process_name))
process.terminate()
timeout = retry * 5 + 1
self.info("Waiting {} seconds for {} (pid: {}) to join"
.format(timeout, process_name, process.pid))
process.join(timeout)
if process.is_alive():
self.notice("Timeout waiting for {}. Sending SIGKILL"
.format(process_name))
os.kill(process.pid, signal.SIGKILL)
self.info("Cleanup complete")
def sleep(self):
"""Go to sleep for 'refresh_interval' seconds."""
self.status = "sleeping"
self.timeout_time_is(eossdk.now() + self.refresh_interval)
def shutdown(self):
"""Shutdown the agent gracefully."""
self.notice("Shutting down")
try:
self.cleanup(process=self.worker)
except Exception as e:
self.err(e)
self.status = "shutdown"
self.agent_mgr.agent_shutdown_complete_is(True)
def restart(self):
"""Restart the agent."""
self.notice("Restarting")
self.status = "restarting"
try:
self.cleanup(process=self.worker)
except Exception as e:
self.err(e)
self.start()
def on_initialized(self):
"""Start the agent after initialisation."""
self.start()
def on_agent_option(self, key, value):
"""Handle a change to a configuration option."""
self.set(key, value)
def on_agent_enabled(self, enabled):
"""Handle a change in the admin state of the agent."""
if enabled:
self.notice("Agent enabled")
else:
self.notice("Agent disabled")
self.shutdown()
def on_timeout(self):
"""Handle a 'refresh_interval' timeout."""
self.run()
def on_readable(self, fd):
"""Handle a watched file descriptor becoming readable."""
self.info("Watched file descriptor {} is readable".format(fd))
if fd == self.worker.p_data.fileno():
self.info("Data channel is ready")
return self.success()
elif fd == self.worker.p_err.fileno():
self.info("Exception received from worker")
return self.failure(process=self.worker)
else:
self.warning("Unknown file descriptor: ignoring")
|
wolcomm/eos-prefix-list-agent | tests/01_integration/test_agent_daemon.py | # Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Integration tests for PrefixListAgent."""
from __future__ import print_function
import re
import time
import pytest
NAME = "PrefixListAgent"
@pytest.mark.usefixtures("rptk_stub", "configure_daemon")
class TestPrefixListAgentDaemon(object):
"""Integration test cases for PrefixListAgent."""
def test_running(self, node):
"""Test that the agent is running."""
resp = node.enable("show daemon {}".format(NAME))
status = resp[0]["result"]["daemons"]
assert NAME in status
assert status[NAME]["isSdkAgent"]
assert status[NAME]["enabled"]
assert status[NAME]["running"]
def test_prefix_lists(self, node):
"""Test prefix-list creation."""
objects = {
"AS-FOO": {
"ipv4": [
{"prefix": "192.0.2.0/24", "exact": True}
],
"ipv6": [
{"prefix": "2001:db8::/32", "exact": False,
"greater-equal": 40, "less-equal": 48}
]
},
"AS-BAR": {
"ipv4": [
{"prefix": "198.51.100.0/24", "exact": True},
{"prefix": "203.0.113.0/24", "exact": True}
],
"ipv6": []
}
}
time.sleep(15)
responses = node.enable(["show {} prefix-list".format(config_af)
for config_af in ("ip", "ipv6")])
entry_pattern = r"^\s+seq \d+ permit (?P<p>[\w.:/]+)( ge (?P<ge>\d+))?( le (?P<le>\d+))?$" # noqa: E501
entry_regexp = re.compile(entry_pattern)
for obj, data in objects.items():
for resp in responses:
assert obj in resp["result"]["ipPrefixLists"]
for config_af, afi in (("ip", "ipv4"), ("ipv6", "ipv6")):
resp = node.enable("show {} prefix-list {} detail"
.format(config_af, obj),
encoding="text")
output = resp[0]["result"]["output"]
entries = [m.groupdict() for m in
[entry_regexp.match(l) for l in output.splitlines()] #noqa E741
if m is not None]
assert len(entries) == len(data[afi])
for item in data[afi]:
assert item["prefix"] in [e["p"] for e in entries]
if not item["exact"]:
assert item["less-equal"] in [int(e["le"])
for e in entries]
assert item["greater-equal"] in [int(e["ge"])
for e in entries]
status_resp = node.enable("show daemon {}".format(NAME))
status = status_resp[0]["result"]["daemons"]
assert NAME in status
assert status[NAME]["data"]["result"] == "ok"
assert int(status[NAME]["data"]["failed"]) == 0
assert int(status[NAME]["data"]["succeeded"]) == 4
|
wolcomm/eos-prefix-list-agent | tests/00_unit/test_agent.py | # Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for prefix_list_agent.agent module."""
from __future__ import print_function
import datetime
import signal
import time
import pytest
from prefix_list_agent.agent import PrefixListAgent
from prefix_list_agent.exceptions import handle_sigterm, TermException
class TestPrefixListAgent(object):
"""Test cases for PrefixListAgent object."""
def test_set_sysdb_mp(self, sdk):
"""Test case for 'set_sysdb_mp' classmethod."""
# Should return True on the first call
assert PrefixListAgent.set_sysdb_mp(sdk.name())
# Should return False on subsequent calls
for i in range(3):
assert not PrefixListAgent.set_sysdb_mp(sdk.name())
def test_init(self, agent):
"""Test case for PrefixListAgent initialisation."""
assert isinstance(agent, PrefixListAgent)
def test_property_rptk_endpoint(self, agent):
"""Test 'rptk_endpoint' getter and setter."""
assert agent.rptk_endpoint == "https://example.com"
test_value = "https://example.net"
agent.rptk_endpoint = test_value
assert agent.rptk_endpoint == test_value
def test_property_source_dir(self, agent):
"""Test 'source_dir' getter and setter."""
assert agent.source_dir == "/tmp/prefix-lists"
test_value = "/foo/bar"
agent.source_dir = test_value
assert agent.source_dir == test_value
def test_property_refresh_interval(self, agent):
"""Test 'refresh_interval' getter and setter."""
assert agent.refresh_interval == 3600
test_value = 60
agent.refresh_interval = test_value
assert agent.refresh_interval == test_value
for fail_value in ("x", 5, 100000):
with pytest.raises(ValueError):
agent.refresh_interval = fail_value
assert agent.refresh_interval == test_value
def test_property_update_delay(self, agent):
"""Test 'update_delay' getter and setter."""
assert agent.update_delay is None
test_value = 5
agent.update_delay = test_value
assert agent.update_delay == test_value
for fail_value in ("x", 0, 100000):
with pytest.raises(ValueError):
agent.update_delay = fail_value
assert agent.update_delay == test_value
def test_property_status(self, agent):
"""Test 'status' getter and setter."""
assert agent.status is None
test_value = "test"
agent.status = test_value
assert agent.status == test_value
def test_property_result(self, agent):
"""Test 'result' getter and setter."""
assert agent.result is None
test_value = "test"
agent.result = test_value
assert agent.result == test_value
def test_property_timestamps(self, agent):
"""Test 'last_start' and 'last_end' getters and setters."""
for prop in ("last_start", "last_end"):
assert getattr(agent, prop) is None
test_value = datetime.datetime.now()
setattr(agent, prop, test_value)
assert getattr(agent, prop) == test_value
with pytest.raises(TypeError):
setattr(agent, prop, "foo")
assert getattr(agent, prop) == test_value
def test_configure(self, agent):
"""Test 'configure' method."""
agent.configure()
agent.agent_mgr.agent_option_iter.assert_called_once()
assert agent.agent_mgr.agent_option.call_count == 5
@pytest.mark.parametrize(("key", "value"), (
("rptk_endpoint", "https://x.com"),
("source_dir", "/foo/bar"),
("refresh_interval", 60),
("update_delay", 1),
("bad_option", None)
))
def test_set(self, agent, mocker, key, value):
"""Test case for 'set' method."""
mocker.patch.object(agent, "warning", autospec=True)
agent.set(key, value)
if key in agent.agent_options:
assert getattr(agent, key) == value
else:
agent.warning.assert_called_once_with("Ignoring unknown option '{}'" # noqa: E501
.format(key))
@pytest.mark.parametrize(("agent_key", "worker_key", "value"), (
("rptk_endpoint", "endpoint", "https://foo.bar"),
("source_dir", "path", "/quux/baz"),
("update_delay", "update_delay", None),
("update_delay", "update_delay", 1)
))
def test_init_worker(self, agent, agent_key, worker_key, value):
"""Test case for `init_worker` method."""
agent.set(agent_key, value)
agent.init_worker()
assert getattr(agent.worker, worker_key) == value
def test_start(self, agent, mocker):
"""Test case for 'start' method."""
methods = ("configure", "init", "run")
for method in methods:
mocker.patch.object(agent, method, autospec=True)
agent.start()
for method in methods:
getattr(agent, method).assert_called_once_with()
@pytest.mark.parametrize("side_effect", ((None,), StandardError()))
@pytest.mark.parametrize("rptk_endpoint", ("https://example.com", None))
def test_run(self, agent, worker, mocker, side_effect, rptk_endpoint):
"""Test case for 'run' method."""
mock_worker = mocker.patch("prefix_list_agent.agent.PrefixListWorker",
autospec=True)
mock_worker.return_value.start.side_effect = side_effect
for method in ("watch", "failure", "sleep"):
mocker.patch.object(agent, method, autospec=True)
agent.rptk_endpoint = rptk_endpoint
agent.run()
if agent.rptk_endpoint is None:
agent.sleep.assert_called_once_with()
else:
assert agent.watch.call_count == 2
agent.worker.start.assert_called_once_with()
if issubclass(type(side_effect), Exception):
assert agent.failure.call_count == 1
def test_watch(self, agent, mocker, connection):
"""Test case for 'watch' method."""
mocker.patch.object(agent, "watch_readable")
agent.watch(connection, "test")
agent.watch_readable.assert_called_once_with(connection.fileno(), True)
assert connection in agent.watching
@pytest.mark.parametrize("close", (True, False))
def test_unwatch(self, agent, mocker, connection, close):
"""Test case for 'unwatch' method."""
mocker.patch.object(agent, "watch_readable")
agent.watching.add(connection)
agent.unwatch(connection, close=close)
agent.watch_readable.assert_called_once_with(connection.fileno(),
False)
if close:
connection.close.assert_called_once_with()
def test_success(self, agent, mocker):
"""Test case for 'success' method."""
for method in ("report", "cleanup", "sleep"):
mocker.patch.object(agent, method, autospec=True)
stats = {"foo": "bar"}
mock_worker = mocker.patch("prefix_list_agent.agent.PrefixListWorker",
autospec=True)
mock_worker.return_value.data = stats
agent.worker = mock_worker(endpoint=agent.rptk_endpoint,
path=agent.source_dir,
eapi=agent.eapi_mgr,
update_delay=agent.update_delay)
agent.success()
agent.report.assert_called_once_with(**stats)
agent.cleanup.assert_called_once_with(process=agent.worker)
agent.sleep.assert_called_once_with()
@pytest.mark.parametrize("local_err", (None, StandardError("test_error")))
@pytest.mark.parametrize("worker_process", (True, False))
@pytest.mark.parametrize("restart", (True, False))
def test_failure(self, agent, mocker, local_err, worker_process, restart):
"""Test case for 'failure' method."""
for method in ("err", "restart", "cleanup", "sleep"):
mocker.patch.object(agent, method, autospec=True)
mock_worker = mocker.patch("prefix_list_agent.agent.PrefixListWorker",
autospec=True)
worker_err = StandardError("worker_err")
mock_worker.return_value.error = worker_err
agent.worker = mock_worker(endpoint=agent.rptk_endpoint,
path=agent.source_dir,
eapi=agent.eapi_mgr,
update_delay=agent.update_delay)
if worker_process:
process = agent.worker
else:
process = None
agent.failure(err=local_err, process=process, restart=restart)
if local_err is None:
if process is None:
assert agent.err.call_count == 2
else:
agent.err.assert_called_once_with(worker_err)
else:
agent.err.assert_called_once_with(local_err)
if restart:
agent.restart.assert_called_once_with()
else:
agent.cleanup.assert_called_once_with(process=process)
agent.sleep.assert_called_once_with()
def test_report(self, agent):
"""Test case for 'report' method."""
stats = {"foo": 1, "bar": "baz"}
agent.report(**stats)
assert agent.agent_mgr.status_set.call_count == len(stats)
@pytest.mark.parametrize("unwatch_err", ((None,), StandardError()))
@pytest.mark.parametrize("catch_sigterm", (True, False))
def test_cleanup(self, agent, worker, mocker, unwatch_err, catch_sigterm):
"""Test case for 'cleanup' method."""
def run():
if catch_sigterm:
signal.signal(signal.SIGTERM, handle_sigterm)
while True:
try:
time.sleep(1)
except TermException:
pass
else:
while True:
time.sleep(1)
mocker.patch.object(worker, "run", autospec=True, side_effect=run)
mocker.patch.object(agent, "unwatch", autospec=True,
side_effect=unwatch_err)
for method in ("err", "notice", "info"):
mocker.patch.object(agent, method, autospec=True)
agent.watching.add(worker.p_data)
agent.watching.add(worker.p_err)
worker.start()
agent.cleanup(worker)
agent.unwatch.assert_any_call(worker.p_data, close=True)
agent.unwatch.assert_any_call(worker.p_err, close=True)
assert agent.unwatch.call_count == 2
if isinstance(unwatch_err, Exception):
agent.err.assert_called_with(unwatch_err)
assert agent.err.call_count == 2
if catch_sigterm:
agent.notice.assert_called_once_with("Timeout waiting for {}. Sending SIGKILL" # noqa: E501
.format(worker.__class__.__name__)) # noqa: E501
assert 5 <= agent.info.call_count <= 9
def test_sleep(self, agent, mocker):
"""Test case for 'sleep' method."""
mocker.patch("eossdk.now", autospec=True, return_value=0)
mocker.patch.object(agent, "timeout_time_is")
agent.sleep()
agent.timeout_time_is.assert_called_once_with(agent.refresh_interval)
@pytest.mark.parametrize("side_effect", ((None,), StandardError))
def test_shutdown(self, agent, mocker, side_effect):
"""Test case for 'shutdown' method."""
mocker.patch.object(agent, "cleanup", autospec=True,
side_effect=side_effect)
agent.shutdown()
agent.cleanup.assert_called_once_with(process=agent.worker)
agent.agent_mgr.agent_shutdown_complete_is.assert_called_once_with(True) # noqa: E501
@pytest.mark.parametrize("side_effect", ((None,), StandardError))
def test_restart(self, agent, mocker, side_effect):
"""Test case for 'restart' method."""
mocker.patch.object(agent, "cleanup", autospec=True,
side_effect=side_effect)
mocker.patch.object(agent, "start", autospec=True)
agent.restart()
agent.cleanup.assert_called_once_with(process=agent.worker)
agent.start.assert_called_once_with()
def test_on_initialized(self, agent, mocker):
"""Test case for 'on_initialized' method."""
mocker.patch.object(agent, "start", autospec=True)
agent.on_initialized()
agent.start.assert_called_once_with()
def test_on_agent_option(self, agent, mocker):
"""Test case for 'on_agent_option' method."""
mocker.patch.object(agent, "set", autospec=True)
agent.on_agent_option("foo", "bar")
agent.set.assert_called_once_with("foo", "bar")
@pytest.mark.parametrize("enabled", (True, False))
def test_on_agent_enabled(self, agent, mocker, enabled):
"""Test case for 'on_agent_enabled' method."""
mocker.patch.object(agent, "notice", autospec=True)
mocker.patch.object(agent, "shutdown", autospec=True)
agent.on_agent_enabled(enabled)
assert agent.notice.call_count == 1
if not enabled:
agent.shutdown.assert_called_once_with()
def test_on_timeout(self, agent, mocker):
"""Test case for 'on_timeout' method."""
mocker.patch.object(agent, "run", autospec=True)
agent.on_timeout()
agent.run.assert_called_once_with()
@pytest.mark.parametrize("fd", (1, 2, 3))
def test_on_readable(self, agent, mocker, fd):
"""Test case for 'on_readable' method."""
mock_worker = mocker.patch("prefix_list_agent.agent.PrefixListWorker",
autospec=True)
mock_worker.return_value.p_data.fileno.return_value = 1
mock_worker.return_value.p_err.fileno.return_value = 2
for method in ("success", "failure", "warning"):
mocker.patch.object(agent, method, autospec=True)
agent.worker = mock_worker(endpoint=agent.rptk_endpoint,
path=agent.source_dir,
eapi=agent.eapi_mgr,
update_delay=agent.update_delay)
agent.on_readable(fd)
if fd == 1:
agent.success.assert_called_once_with()
elif fd == 2:
agent.failure.assert_called_once_with(process=agent.worker)
else:
agent.warning.assert_called_once_with("Unknown file descriptor: ignoring") # noqa: E501
|
wolcomm/eos-prefix-list-agent | tests/00_unit/test_worker.py | # Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for prefix_list_agent.worker module."""
from __future__ import print_function
import json
import signal
import StringIO
import time
import urllib2
import datetime
import pytest
from mock import MagicMock
from prefix_list_agent.exceptions import TermException
from prefix_list_agent.worker import PrefixListWorker
class TestPrefixListWorker(object):
"""Test cases for the PrefixListWorker object."""
def test_init(self, worker):
"""Test case for PrefixListWorker initialisation."""
assert isinstance(worker, PrefixListWorker)
@pytest.mark.parametrize(("case", "side_effect"), (
("success", (({"foo": "bar"}, set()),)),
("sigterm", TermException),
("error", StandardError),
))
def test_run(self, worker, mocker, case, side_effect):
"""Test case for 'run' method."""
for method in ("get_policies", "get_configured", "get_data",
"refresh_all", "refresh_prefix_list", "notice"):
mocker.patch.object(worker, method, autospec=True)
mocker.patch.object(worker, "write_results", autospec=True,
side_effect=side_effect)
worker.run()
if case == "success":
assert worker.data == {"foo": "bar"}
elif case == "sigterm":
worker.notice.assert_called_once_with("Got SIGTERM signal: exiting.") # noqa: E501
elif case == "error":
assert type(worker.error) is StandardError
else:
raise ValueError(case)
@pytest.mark.parametrize(("case", "side_effect"), (
("success", (({"foo": "bar"}, set()),)),
("sigterm", TermException),
("error", StandardError),
))
def test_start(self, worker, mocker, case, side_effect):
"""Test case for 'start' method."""
for method in ("get_policies", "get_configured", "get_data",
"refresh_all", "refresh_prefix_list", "notice"):
mocker.patch.object(worker, method, autospec=True)
mocker.patch.object(worker, "write_results", autospec=True,
side_effect=side_effect)
worker.start()
time.sleep(1)
if case == "success":
assert worker.data == {"foo": "bar"}
elif case == "sigterm":
assert worker.exitcode == 127 + signal.SIGTERM
elif case == "error":
assert type(worker.error) is StandardError
else:
raise ValueError(case)
if worker.is_alive():
worker.terminate()
worker.join()
def test_get_configured(self, worker):
"""Test case for 'test_get_configured' method."""
configured = worker.get_configured(["strict"])
expect = {"strict": {"AS-FOO": {"ipv4": "as-foo",
"ipv6": "as-foo"}}}
assert worker.eapi.run_show_cmd.call_count == 2
assert configured == expect
@pytest.mark.parametrize(("prefix_list",), ((None,), ("AS-BAZ",)))
def test_refresh_prefix_list(self, worker, prefix_list):
"""Test case for 'refresh_prefix_list' method."""
test_afi = "ip"
worker.refresh_prefix_list(test_afi, prefix_list)
assert worker.eapi.run_show_cmd.call_count == 1
cmd = worker.eapi.run_show_cmd.call_args.args[0]
assert test_afi in cmd
if prefix_list is not None:
assert cmd.endswith(prefix_list)
@pytest.mark.parametrize(("update_delay",), ((None,), (1,)))
def test_refresh_all(self, worker, mocker, update_delay):
"""Test case for 'refresh_all' method."""
test_objs = ["AS-FOO", "AS-BAR"]
test_afi = "ipv6"
mocker.patch.object(worker, "refresh_prefix_list")
def func_wrapper(func, m):
m.deltas = list()
def wrapped(*args, **kwargs):
t0 = datetime.datetime.utcnow()
func(*args, **kwargs)
t1 = datetime.datetime.utcnow()
delta = t1 - t0
m.deltas.append(delta)
return wrapped
m = MagicMock()
m.side_effect = func_wrapper(time.sleep, m)
mocker.patch.object(time, "sleep", m)
worker.update_delay = update_delay
worker.refresh_all(test_objs)
assert len(m.deltas) == m.call_count
for delta in m.deltas:
assert 0.5 <= delta.seconds <= 1.5
if update_delay is None:
# Test refresh_prefix_all behavior without update_delay
assert m.call_count == 0
assert worker.refresh_prefix_list.call_count == 2
worker.refresh_prefix_list.assert_called_with(test_afi)
else:
# Test refresh_prefix_all behavior with update_delay
assert m.call_count == len(test_objs) * 2
assert worker.refresh_prefix_list.call_count == len(test_objs) * 2
worker.refresh_prefix_list.assert_called_with(test_afi,
test_objs[1])
def test_get_policies(self, worker, mocker):
"""Test case for 'get_policies' method."""
resp_data = {"strict": "strict descr", "loose": "loose descr"}
resp_fp = StringIO.StringIO(json.dumps(resp_data))
return_value = urllib2.addinfourl(url="/testing", code=200,
headers=None, fp=resp_fp)
mocker.patch.object(urllib2, "urlopen", autospec=True,
return_value=return_value)
policies = worker.get_policies()
assert policies == resp_data
def test_get_data(self, worker, mocker):
"""Test case for 'get_data' method."""
configured = {"strict": {"AS-FOO": {"ipv4": "as-foo-4",
"ipv6": "as-foo-6"},
"AS-BAR": {"ipv4": "as-bar-4",
"ipv6": "as-bar-6"}},
"loose": {"AS-BAZ": {"ipv4": "as-baz-4",
"ipv6": "as-baz-6"},
"AS-QUX": {"ipv4": "as-qux-4",
"ipv6": "as-qux-6"}},
"empty": {}}
data = {"strict": {"AS-FOO": {"ipv4": [], "ipv6": []},
"AS-BAR": {"ipv4": [], "ipv6": []}},
"loose": {"AS-BAZ": {"ipv4": [], "ipv6": []}}}
mocker.patch.object(worker, "get_data_bulk", autospec=True,
side_effect=(data["strict"], StandardError))
mocker.patch.object(worker, "get_data_obj", autospec=True,
side_effect=(data["loose"], StandardError))
result = worker.get_data(configured)
assert result == data
assert worker.get_data_bulk.call_count == 2
assert worker.get_data_obj.call_count == 2
def test_get_data_bulk(self, worker, mocker):
"""Test case for 'get_data_obj' method."""
policy = "strict"
objs = ["AS-FOO", "AS-BAR"]
resp_data = {obj: {"ipv4": [], "ipv6": []} for obj in objs}
resp_fp = StringIO.StringIO(json.dumps(resp_data))
return_value = urllib2.addinfourl(url="/testing", code=200,
headers=None, fp=resp_fp)
mocker.patch.object(urllib2, "urlopen", autospec=True,
return_value=return_value)
result = worker.get_data_bulk(policy, objs)
assert result == resp_data
def test_get_data_obj(self, worker, mocker):
"""Test case for 'get_data_obj' method."""
policy = "strict"
obj = "AS-FOO"
resp_data = {obj: {"ipv4": [], "ipv6": []}}
resp_fp = StringIO.StringIO(json.dumps(resp_data))
return_value = urllib2.addinfourl(url="/testing", code=200,
headers=None, fp=resp_fp)
mocker.patch.object(urllib2, "urlopen", autospec=True,
return_value=return_value)
result = worker.get_data_obj(policy, obj)
assert result == resp_data
@pytest.mark.parametrize(("configured", "data"), (
({"strict": {"AS-FOO": {"ipv4": "as-foo-4", "ipv6": "as-foo-6"},
"AS-BAR": {"ipv4": "as-bar-4", "ipv6": "as-bar-6"}},
"empty": {}},
{"strict": {"AS-FOO": {"ipv4": [{"prefix": "192.0.2.0/24",
"exact": True}],
"ipv6": [{"prefix": "2001:db8::/32",
"exact": True}]}}}),
))
def test_write_results(self, worker, mocker, configured, data):
"""Test case for 'write_results' method."""
mocker.patch("__builtin__.open", mocker.mock_open())
stats, written_objs = worker.write_results(configured, data)
assert stats["succeeded"] == 2
assert stats["failed"] == 2
assert len(written_objs) == 1
@pytest.mark.parametrize(("entries", "side_effect"), (
([], None),
([{"prefix": "2001:db8:b00::/48", "exact": True},
{"prefix": "2001:db8:f00::/48", "exact": True}], None),
pytest.param([], IOError, marks=pytest.mark.xfail(raises=IOError))
))
def test_write_prefix_list(self, worker, mocker, entries, side_effect):
"""Test case for 'write_prefix_list' method."""
m = mocker.patch("__builtin__.open", mocker.mock_open())
m.side_effect = side_effect
path = "/tmp/foo"
worker.write_prefix_list(path, entries, "ipv6")
m.assert_called_once_with(path, "w")
assert m().write.call_count == len(entries)
@pytest.mark.parametrize(("entry", "expect"), (
({"prefix": "10.0.0.0/8", "exact": True}, "seq 1 permit 10.0.0.0/8\n"),
({"prefix": "2001:db8::/32", "exact": False,
"greater-equal": 48, "less-equal": 64},
"seq 1 permit 2001:db8::/32 ge 48 le 64\n")
))
def test_prefix_list_line(self, worker, entry, expect):
"""Test case for 'prefix_list_line' method."""
line = worker.prefix_list_line(0, entry)
assert line == expect
@pytest.mark.parametrize(("cmd", "allow_empty"), (
("test", False),
("empty", True),
pytest.param("empty", False,
marks=pytest.mark.xfail(raises=KeyError)),
pytest.param("fail", False,
marks=pytest.mark.xfail(raises=RuntimeError)),
pytest.param("error", False,
marks=pytest.mark.xfail(raises=StandardError))
))
def test_eapi_request(self, worker, cmd, allow_empty):
"""Test case for 'eapi_request' method."""
result = worker.eapi_request(cmd, "{}_resp".format(cmd), allow_empty)
if allow_empty:
assert result == {}
else:
assert result["foo"] == "bar"
@pytest.mark.parametrize("side_effect", (
(urllib2.addinfourl(url="/testing", code=200, headers=None,
fp=StringIO.StringIO('{"foo":"bar"}')),),
pytest.param(urllib2.URLError(reason="Testing"),
marks=pytest.mark.xfail(raises=urllib2.URLError)),
pytest.param(urllib2.HTTPError(url="/testing", code=500,
msg="Testing", hdrs=None, fp=None),
marks=pytest.mark.xfail(raises=urllib2.HTTPError))
))
def test_rptk_request(self, mocker, worker, side_effect):
"""Test case for 'rptk_request' method."""
mocker.patch.object(urllib2, "urlopen", autospec=True,
side_effect=side_effect)
result = worker.rptk_request("/testing")
assert result["foo"] == "bar"
@pytest.mark.parametrize("obj", (
'{"foo":"bar"}',
StringIO.StringIO('{"foo":"bar"}'),
pytest.param("foo",
marks=pytest.mark.xfail(raises=ValueError, strict=True)),
pytest.param(StringIO.StringIO("foo"),
marks=pytest.mark.xfail(raises=ValueError, strict=True))
))
def test_json_load(self, worker, obj):
"""Test case for 'json_load' method."""
result = worker.json_load(obj)
assert result["foo"] == "bar"
def test_property_data(self, worker):
"""Test case for 'data' property."""
assert worker.data is None
send_data = {"foo": "bar"}
worker.c_data.send(send_data)
recv_data = worker.data
assert recv_data == send_data
def test_property_error(self, worker):
"""Test case for 'error' property."""
assert worker.error is None
send_error = Exception()
worker.c_err.send(send_error)
recv_error = worker.error
assert (type(recv_error) is type(send_error) and
recv_error.args == send_error.args)
|
wolcomm/eos-prefix-list-agent | prefix_list_agent/__meta__.py | <reponame>wolcomm/eos-prefix-list-agent
#!/usr/bin/env python
# Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""prefix_list_agent package metadata."""
from __future__ import print_function
from __future__ import unicode_literals
__version__ = "0.1.2"
__author__ = "<NAME>"
__author_email__ = "<EMAIL>"
__licence__ = "MIT"
__copyright__ = "Copyright (c) 2019 Workonline Communications (Pty) Ltd"
__url__ = "https://github.com/wolcomm/eos-prefix-list-agent"
__classifiers__ = [
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Telecommunications Industry",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Other",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
"Topic :: System :: Networking"
]
__entry_points__ = None
__scripts__ = [
"bin/PrefixListAgent"
]
if __name__ == "__main__":
print(__version__)
|
wolcomm/eos-prefix-list-agent | prefix_list_agent/cli.py | <filename>prefix_list_agent/cli.py
# Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""prefix_list_agent cli entry point."""
from __future__ import print_function
import sys
from prefix_list_agent import PrefixListAgent
def start(sdk):
"""Start the agent."""
try:
# create a Sysdb mount profile, restarting if necessary
if PrefixListAgent.set_sysdb_mp(sdk.name()):
# return a user defined status to indicate
# that a restart is desired
return 64
# create an instance of the agent
agent = PrefixListAgent(sdk) # noqa: W0612
# enter the sdk event-loop
sdk.main_loop(sys.argv)
except KeyboardInterrupt:
return 130
except Exception:
raise
return
|
wolcomm/eos-prefix-list-agent | tests/00_unit/test_cli.py | <gh_stars>1-10
# Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for prefix_list_agent.cli module."""
from __future__ import print_function
import sys
import pytest
from prefix_list_agent.agent import PrefixListAgent
from prefix_list_agent.cli import start
class TestCli(object):
"""Test cases for cli module."""
@staticmethod
def module_patch(mocker, mod, cls):
"""Patch all superclasses in a given module."""
for super_cls in cls.mro():
if super_cls.__module__ == mod:
mocker.patch("{}.{}".format(mod, super_cls.__name__),
autospec=True)
@pytest.mark.parametrize(("arg", "ret"),
((None, None), (KeyboardInterrupt, 130),
(StandardError, StandardError)))
@pytest.mark.parametrize("sysdb_mp_written", (True, False))
def test_start(self, sdk, mocker, arg, ret, sysdb_mp_written):
"""Test case for cli entrypoint."""
self.module_patch(mocker, "eossdk", PrefixListAgent)
mocker.patch.object(PrefixListAgent, "set_sysdb_mp",
return_value=sysdb_mp_written)
sys.argv = arg
if sysdb_mp_written:
assert start(sdk) == 64
else:
if not isinstance(ret, type):
assert start(sdk) == ret
else:
with pytest.raises(ret):
start(sdk)
|
wolcomm/eos-prefix-list-agent | tests/00_unit/test_base.py | <filename>tests/00_unit/test_base.py
# Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests for prefix_list_agent.agent module."""
from __future__ import print_function
import pytest
from prefix_list_agent.base import PrefixListBase
class TestPrefixListAgent(object):
"""Test cases for PrefixListBase object."""
def test_init(self, sdk, mocker):
"""Test case for PrefixListAgent initialisation."""
mocker.patch("eossdk.Tracer", autospec=True)
base = PrefixListBase()
assert isinstance(base, PrefixListBase)
@pytest.mark.parametrize("level", ("emerg", "alert", "crit", "err",
"warning", "notice", "info", "debug"))
def test_tracing(self, mocker, level):
"""Test calls to tracer."""
mocker.patch("eossdk.Tracer", autospec=True)
base = PrefixListBase()
method = getattr(base, level)
method("message")
assert base.tracer.trace.call_count == 1
|
wolcomm/eos-prefix-list-agent | tests/00_unit/conftest.py | # Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Fixtures for prefix_list_agent test cases."""
from __future__ import print_function
import json
import mock
import pytest
import eossdk
from _multiprocessing import Connection
from prefix_list_agent.agent import PrefixListAgent
class DummySdk(object):
"""Dummy class to stub-out eossdk interactions in test cases."""
def name(self):
"""Stub for 'name' method."""
return "PrefixListAgent"
def main_loop(self, arg=None):
"""Stub for 'main_loop' method."""
if arg is not None and issubclass(arg, BaseException):
raise arg
return
def get_agent_mgr(self):
"""Stub for 'get_agent_mgr' method."""
mgr = mock.create_autospec(eossdk.AgentMgr)
test_options = {"rptk_endpoint": "https://example.com",
"source_dir": "/foo/bar",
"refresh_interval": 60,
"update_delay": 1,
"bad_option": None}
def agent_option_iter():
return test_options.keys()
mgr.agent_option_iter.side_effect = agent_option_iter
def agent_option(key):
return test_options[key]
mgr.agent_option.side_effect = agent_option
return mgr
def get_timeout_mgr(self):
"""Stub for 'get_timeout_mgr' method."""
mgr = mock.create_autospec(eossdk.TimeoutMgr)
return mgr
def get_eapi_mgr(self):
"""Stub for 'get_eapi_mgr' method."""
mgr = mock.create_autospec(eossdk.EapiMgr)
def run_show_cmd(cmd):
if cmd == "error":
raise StandardError
elif cmd == "fail":
return eossdk.EapiResponse(False, 255, "synthetic_failure", [])
elif cmd == "empty":
result = json.dumps({})
elif cmd.startswith("refresh"):
result = json.dumps({"messages": ["Dummy message"]})
elif cmd.startswith("show"):
result = json.dumps({"ipPrefixLists": {
"AS-FOO": {"ipPrefixListSource": "file:/tmp/prefix-lists/strict/as-foo"}, # noqa: E501
"AS-BAR": {},
"AS-BAZ": {"ipPrefixListSource": "file:/baz/as-baz"},
"AS-QUX": {"ipPrefixListSource": "file:/tmp/prefix-lists/qux/as-qux"} # noqa: E501
}})
else:
result = json.dumps({"{}_resp".format(cmd): {"foo": "bar"}})
return eossdk.EapiResponse(True, 0, "", [result])
mgr.run_show_cmd.side_effect = run_show_cmd
return mgr
@pytest.fixture(scope="session")
def sdk():
"""Provide an instance of DummySdk to test cases."""
return DummySdk()
@pytest.fixture()
def agent(sdk, mocker):
"""Provide a PrefixListAgent instance with mocked Mgrs."""
for cls in PrefixListAgent.mro():
if cls.__module__ == "eossdk":
mocker.patch("eossdk.{}".format(cls.__name__), autospec=True)
agent = PrefixListAgent(sdk)
agent.rptk_endpoint = "https://example.com"
return agent
@pytest.fixture()
def worker(agent):
"""Provide a PrefixListWorker instance from a mocked agent."""
agent.init_worker()
return agent.worker
@pytest.fixture()
def connection(mocker):
"""Provide a mocked Connection instance."""
conn = mock.create_autospec(Connection)
return conn
|
wolcomm/eos-prefix-list-agent | tests/01_integration/rptk_stub.py | <reponame>wolcomm/eos-prefix-list-agent
# Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Fixtures for PrefixListAgent integration tests."""
from __future__ import print_function
import json
import multiprocessing
import sys
import flask
import gunicorn.app.base
class RptkStub(gunicorn.app.base.BaseApplication):
"""Integrated web server."""
app = flask.Flask(__name__)
formats = {"json": {"description": "JSON object"}}
policies = {"test": "A dummy test policy"}
objects = {
"AS-FOO": {
"ipv4": [
{"prefix": "192.0.2.0/24", "exact": True}
],
"ipv6": [
{"prefix": "2001:db8::/32", "exact": False,
"greater-equal": 40, "less-equal": 48}
]
},
"AS-BAR": {
"ipv4": [
{"prefix": "198.51.100.0/24", "exact": True},
{"prefix": "203.0.113.0/24", "exact": True}
],
"ipv6": []
}
}
def __init__(self, **kwargs):
"""Initialise the uwsgi app."""
self.opts = kwargs
super(RptkStub, self).__init__()
def load(self):
"""Load the uwsgi app."""
return self.app
def load_config(self):
"""Set config options."""
for key, value in self.opts.items():
try:
self.cfg.set(key.lower(), value)
except Exception as e:
print(e)
def run(self, *args, **kwargs):
"""Run the server."""
@self.app.route("/formats")
def formats():
return json.dumps(self.formats)
@self.app.route("/policies")
def policies():
return json.dumps(self.policies)
@self.app.route("/query")
@self.app.route("/<string:format>/query")
@self.app.route("/<string:format>/<string:obj>")
@self.app.route("/<string:format>/<string:obj>/<string:policy>")
def prefix_list(format=None, obj=None, policy=None):
objs = flask.request.args.getlist("objects")
if obj:
objs.append(obj)
objs = set(objs)
result = {o: self.objects[o] for o in objs}
return json.dumps(result)
super(RptkStub, self).run(*args, **kwargs)
class RptkStubProcess(multiprocessing.Process):
"""Multiprocessing runner."""
def run(self):
"""Run the stub server."""
sys.argv = [sys.executable]
server = RptkStub(loglevel="warning")
server.run()
def main():
"""Launch a stub version of an rptk web application."""
server = RptkStub()
return server.run()
if __name__ == "__main__":
sys.exit(main())
|
wolcomm/eos-prefix-list-agent | prefix_list_agent/exceptions.py | # Copyright (c) 2019 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the MIT License
# (the "License"); you may not use this file except in compliance with the
# License.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""prefix_list_agent custom execptions."""
from __future__ import print_function
class TermException(BaseException):
"""Raised when SIGTERM is handled by handle_sigterm."""
pass
def handle_sigterm(signum, frame):
"""Handle a SIGTERM signal by raising custom exception."""
raise TermException
|
LordPov/net-tests | summary.py | #!/usr/bin/env python
import fnmatch
import os
def find_files(pattern, location='.'):
for file in os.listdir(location):
if fnmatch.fnmatch(file, pattern):
yield file
def extract_times(file):
all_times = []
for line in open(file):
if 'icmp_seq' in line:
all_times.append(float(line[line.rfind('=')+1:line.rfind(' ')]))
elif 'rtt min/avg/max/mdev' in line:
_, avg, _, mdev = [float(time) for time in line[line.find('=')+2:line.rfind(' ')].split('/')]
return [time for time in all_times if (mdev < avg or time < mdev / 2)]
def list_average(input_list):
return sum(input_list) / len(input_list)
def summary(pattern):
averages = [list_average(extract_times(file)) for file in find_files(pattern)]
list_min = min(averages)
list_max = max(averages)
list_avg = list_average(averages)
print('min: %.2f, max: %.2f, avg: %.2f' % (list_min, list_max, list_avg))
|
LordPov/net-tests | summary-g.py | <reponame>LordPov/net-tests
#!/usr/bin/env python
import summary
summary.summary('pt-g-*')
|
LordPov/net-tests | summary-s.py | <filename>summary-s.py
#!/usr/bin/env python
import summary
def extract_speeds(file):
for line in open(file):
if 'Download:' in line:
download = float(line.split(' ')[1])
elif 'Upload:' in line:
upload = float(line.split(' ')[1])
return download, upload
def output(speeds):
return 'min: %.2f, max: %.2f, avg: %.2f' % ((min(speeds)), (max(speeds)), (summary.list_average(speeds)))
def output_both(pattern):
downloads, uploads = [output(speeds) for speeds in zip(*[extract_speeds(file) for file in summary.find_files(pattern)])]
print('Downloads (Mbit/s) -', downloads)
print('Uploads (Mbit/s) -', uploads)
if __name__ == '__main__':
output_both('st-*')
|
LordPov/net-tests | summary-t.py | <reponame>LordPov/net-tests<filename>summary-t.py
#!/usr/bin/env python
import summary
summary.summary('pt-t-*')
|
subhajeet2107/pylexer | pylex/config/lexer_dict_config.py | <reponame>subhajeet2107/pylexer
"""
* This file is part of the subhajeet2107/pylexer package.
*
* (c) <NAME> <<EMAIL>>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
from pylex.config.lexer_config import LexerConfig
from pylex.config.token_defination import TokenDefination
class LexerDictConfig(LexerConfig):
"""
Lexer Configuration using a dictionary
"""
def __init__(self, token_definations={}):
self.definations = []
for k,v in token_definations.items():
if type(v) == TokenDefination:
self.add_token_defination(v)
else:
self.add_token_defination(TokenDefination(v, k))
def add_token_defination(self, token_defination):
self.definations.append(token_defination)
def get_token_definations(self):
return self.definations
|
subhajeet2107/pylexer | tests/test_lexer.py | import unittest
from pylex.config.lexer_dict_config import LexerDictConfig
from pylex.token import Token
from pylex.lexer import PyLexer
class TestLexer(unittest.TestCase):
def get_algebra_config(self):
return LexerDictConfig({
'\\s' : '',
'\\d+': 'number',
'\\+' : 'plus',
'-' : 'minus',
'\\*' : 'mul',
'/' : 'div',
})
def test_static_scan_algebra(self):
config = self.get_algebra_config()
tokens = PyLexer.scan(config, '2 +3 /4 -1 ')
self.assertEqual(['number', 'plus', 'number', 'div', 'number', 'minus', 'number'],map(lambda x: x.get_name(), tokens))
self.assertEqual(['2', '+', '3', '/', '4', '-', '1'], map(lambda x: x.get_value(), tokens))
def test_move_next(self):
lexer = PyLexer(self.get_algebra_config())
lexer.set_input('2 +3 /4 -1 ')
self.assertIsNone(lexer.get_look_ahead())
self.assertIsNone(lexer.get_token())
self.assertTrue(lexer.move_next())
self.assertIsInstance(lexer.get_look_ahead(), Token)
self.assertEqual('number', lexer.get_look_ahead().get_name())
self.assertEqual('2', lexer.get_look_ahead().get_value())
self.assertIsNone(lexer.get_token())
self.assertTrue(lexer.move_next())
self.assertIsInstance(lexer.get_look_ahead(), Token)
self.assertEqual('plus', lexer.get_look_ahead().get_name())
self.assertEqual('+', lexer.get_look_ahead().get_value())
self.assertIsNone(lexer.get_token())
self.assertEqual('number', lexer.get_look_ahead().get_name())
self.assertEqual('2', lexer.get_look_ahead().get_value())
self.assertTrue(lexer.move_next())
self.assertTrue(lexer.move_next())
self.assertTrue(lexer.move_next())
self.assertTrue(lexer.move_next())
self.assertTrue(lexer.move_next())
self.assertIsInstance(lexer.get_look_ahead(), Token)
self.assertEqual('number', lexer.get_look_ahead().get_name())
self.assertEqual('1', lexer.get_look_ahead().get_value())
self.assertIsNone(lexer.get_token())
self.assertEqual('minus', lexer.get_look_ahead().get_name())
self.assertEqual('-', lexer.get_look_ahead().get_value())
self.assertFalse(lexer.move_next())
self.assertIsNone(lexer.get_look_ahead())
self.assertIsInstance(lexer.get_look_ahead(), Token)
self.assertEqual('number', lexer.get_look_ahead().get_name())
self.assertEqual('1', lexer.get_look_ahead().get_value())
self.assertFalse(lexer.move_next())
self.assertIsNone(lexer.get_token())
self.assertIsNone(lexer.get_look_ahead())
def test_peek(self):
lexer = PyLexer(self.get_algebra_config())
lexer.set_input('2 +3 /4 -1 ')
lexer.move_next()
lexer.move_next()
token = lexer.peeks()
self.assertEqual('3', token.get_value())
token = lexer.peeks()
self.assertEqual('/', token.get_value())
lexer.move_next()
token = lexer.peeks()
self.assertEqual('/', token.get_value())
lexer.reset_peek()
token = lexer.peeks()
self.assertEqual('/', token.get_value())
def test_skip_until(self):
lexer = PyLexer(self.get_algebra_config())
lexer.set_input('2 +3 /4 -1 ')
lexer.move_next()
lexer.skip_until('minus')
self.assertEqual('minus', lexer.get_look_ahead().get_name())
self.assertEqual('4', lexer.get_token().get_value())
def test_is_next_token(self):
lexer = PyLexer(self.get_algebra_config())
lexer.set_input('2 +3 /4 -1 ')
self.assertFalse(lexer.is_next_token('number'))
lexer.move_next()
self.assertTrue(lexer.is_next_token('number'))
lexer.move_next()
self.assertTrue(lexer.is_next_token_any(['minus','plus']))
def test_reset_position(self):
lexer = PyLexer(self.get_algebra_config())
lexer.set_input('2 +3 /4 -1 ')
lexer.move_next()
lexer.move_next()
lexer.move_next()
lexer.move_next()
lexer.move_next()
lexer.reset_position()
lexer.move_next()
self.assertEqual('2', lexer.get_look_ahead().get_value())
def test_glimpse(self):
lexer = PyLexer(self.get_algebra_config())
lexer.set_input('2 +3 /4 -1 ')
lexer.move_next()
self.assertEqual('+', lexer.glimpse().get_value())
self.assertEqual('+', lexer.glimpse().get_value())
self.assertEqual('+', lexer.glimpse().get_value())
|
subhajeet2107/pylexer | pylex/error/custom_exception.py | """
* This file is part of the subhajeet2107/pylexer package.
*
* (c) <NAME> <<EMAIL>>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
class UnknownTokenException(Exception):
pass
class IllegalArgumentError(ValueError):
pass
|
subhajeet2107/pylexer | setup.py | from setuptools import setup, find_packages
version = '1.0.0'
license = 'MIT License'
setup(
name = 'pylexer',
version = version,
license = license,
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/subhajeet2107/pylexer/',
long_description_content_type='text/markdown',
description = 'A python implementation of a lexical analyzer which supports full scan, state based lexing and lookahead',
long_description = open('README.md').read().strip(),
packages = find_packages(),
install_requires=[
'pytest',
'six',
],
test_suite = 'tests',
entry_points = {
'console_scripts': [
'pylexer = pylexer.__main__:main',
]
}
)
|
subhajeet2107/pylexer | pylex/config/lexer_config.py | <gh_stars>0
"""
* This file is part of the subhajeet2107/pylexer package.
*
* (c) <NAME> <<EMAIL>>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
import abc, six
@six.add_metaclass(abc.ABCMeta)
class LexerConfig:
"""
Lexer Config base class for all clases to implement
"""
@abc.abstractmethod
def get_token_definations():
pass |
subhajeet2107/pylexer | pylex/token.py | """
* This file is part of the subhajeet2107/pylexer package.
*
* (c) <NAME> <<EMAIL>>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
from pylex.error.custom_exception import IllegalArgumentError
class Token:
"""
Token Implementation class, which converts string to tokens, using lexer
"""
def __init__(self, name, value, offset, count):
self.name = name
self.value = value
self.offset = offset
self.position = count
def get_position(self):
return self.position
def get_offset(self):
return self.offset
def get_name(self):
return self.name
def get_value(self):
return self.value
def is_token(self, token):
if isinstance(token, self):
return self.name == token.get_name()
elif type(token) == str:
return self.name == token
else:
raise InvalidArgumentException('Expected string or Token')
|
subhajeet2107/pylexer | pylex/lexer.py | <reponame>subhajeet2107/pylexer
"""
* This file is part of the subhajeet2107/pylexer package.
*
* (c) <NAME> <<EMAIL>>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
import re
from pylex.config.lexer_config import LexerConfig
from pylex.error.custom_exception import IllegalArgumentError, UnknownTokenException
from pylex.token import Token
class PyLexer:
def __init__(self, config):
self.config = config
@staticmethod
def scan(config, input_string):
tokens = []
offset = 0
position = 0
matches = None
while len(input_string):
any_match = False
for token_defination in config.get_token_definations():
matches = re.search(token_defination.get_regex(), input_string, flags=re.IGNORECASE)
if matches is not None:
str_matched = matches.group(0)
str_len = len(str_matched)
if len(token_defination.get_name()) > 0:
tokens.append(Token(token_defination.get_name(), str_matched, offset, position))
position += 1
input_string = input_string[:str_len]
any_match = True
offset += str_len
break
if not any_match:
raise UnknownTokenException('At offset %s: %s' %( offset, input_string[0:16] + '...'))
return tokens
def get_input(self):
return self.input
def get_position(self):
return self.position
def get_look_ahead(self):
return self.lookahead
def get_token(self):
return self.token
def set_input(self, input_string):
self.input = input_string
self.reset()
self.tokens = PyLexer.scan(self.config, input_string)
def reset(self):
self.position = 0
self.peek = 0
self.token = None
self.lookahead = None
def reset_position(self, position=0):
self.position = position
def is_next_token(self, token_name):
return self.lookahead is not None and self.lookahead.get_name() == token_name
def is_next_token_any(self, token_names):
return self.lookahead is not None and self.lookahead.get_name() in token_names
def move_next(self):
self.peek = 0
self.token = self.lookahead
try:
self.lookahead = self.tokens[self.position]
self.position += 1
except IndexError:
self.lookahead = None
return self.lookahead != None
def skip_until(self, token_name):
while self.lookahead != None and self.lookahead.get_name() != token_name:
self.move_next()
def skip_tokens(self, token_names):
while self.lookahead != None and self.lookahead.get_name() in token_names:
self.move_next()
def peeks(self):
try:
if self.tokens[self.position + self.peek]:
self.peek += 1
return self.tokens[self.position + self.peek]
except IndexError:
return None
def peek_while_tokens(self, token_names):
token = self.peeks()
while token:
if token.get_name() not in token_names:
break
return token
def glimpse(self):
peek = self.peeks()
self.peek = 0
return peek
|
subhajeet2107/pylexer | pylex/config/token_defination.py | <filename>pylex/config/token_defination.py
"""
* This file is part of the subhajeet2107/pylexer package.
*
* (c) <NAME> <<EMAIL>>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
import re
from pylex.error.custom_exception import IllegalArgumentError
class TokenDefination:
def __init__(self, name, regex, modifiers='i'):
self.name = name
delimiter = self.find_delimeter(regex)
self.regex = '%s^%s%s%s' % (delimiter, regex, delimiter, modifiers)
if not self.regex:
raise IllegalArgumentError('Invalid regex for token %s : %s' % (name, regex))
def get_regex(self):
return self.regex
def get_name(self):
return self.name
def find_delimeter(self, regex):
choices = ['/', '|', '#', '~', '@']
for choice in choices:
if choice not in regex:
return choice
raise IllegalArgumentError('Unable to determine delimiter for regex %s' % (regex))
|
utya/sms-gammu-gateway | run.py | <reponame>utya/sms-gammu-gateway
import os
from flask import Flask
from flask_httpauth import HTTPBasicAuth
from flask_restful import reqparse, Api, Resource, abort
from support import load_user_data, init_state_machine
pin = os.getenv('PIN', None)
ssl = os.getenv('SSL', False)
user_data = load_user_data()
machine = init_state_machine(pin)
app = Flask(__name__)
api = Api(app)
auth = HTTPBasicAuth()
@auth.verify_password
def verify(username, password):
if not (username and password):
return False
return user_data.get(username) == password
class Sms(Resource):
def __init__(self, sm):
self.parser = reqparse.RequestParser()
self.parser.add_argument('text')
self.parser.add_argument('number')
self.machine = sm
@auth.login_required
def post(self):
args = self.parser.parse_args()
if args['text'] is None or args['number'] is None:
abort(404, message="Parameters 'text' and 'number' are required.")
message = {
'Text': args.get("text"),
'SMSC': {'Location': 1},
'Number': args.get("number"),
}
return machine.SendSMS(message), 200
class Signal(Resource):
def __init__(self, sm):
self.machine = sm
def get(self):
return machine.GetSignalQuality()
api.add_resource(Sms, '/sms', resource_class_args=[machine])
api.add_resource(Signal, '/signal', resource_class_args=[machine])
if __name__ == '__main__':
if ssl:
app.run(port='5000', host="0.0.0.0", ssl_context=('/ssl/cert.pem', '/ssl/key.pem'))
else:
app.run(port='5000', host="0.0.0.0")
|
numb3r33/Xtreme-ML-HACK | src/models/cross_validation.py | <filename>src/models/cross_validation.py
import numpy as np
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
def cross_validate_contacts(X, y, timestamps, model, features, plot=False, contact_types=None):
errors = []
for timestamp in timestamps:
mask = X.date < timestamp
Xtr = X.loc[mask, features]
ytr = y.loc[mask]
Xte = X.loc[~mask, features]
yte = y.loc[~mask]
model.fit(Xtr, ytr)
mask_installation_report = Xte['contact_type_Installation Report - Input'] == 1
mask_tweet_input = Xte['contact_type_Tweet - Input'] == 1
ypred = model.predict(Xte)
# installation report input and tweet input are such rare events that we can replace them with zero.
ypred[mask_installation_report.values] = 0.
ypred[mask_tweet_input.values] = 0.
# anything that is less than zero turn it into zero.
ypred[ypred < 0] = 0.
if plot:
if timestamp == '2016/01/01':
for contact_type in contact_types:
mask = Xte['contact_type_%s'%(contact_type)] == 1
plt.scatter(yte[mask.values], ypred[mask.values], label='%s'%(contact_type))
plt.legend(loc='best');
fold_rmse = np.sqrt(mean_squared_error(yte, ypred))
print('FOLD RMSE: ', fold_rmse)
errors.append(fold_rmse)
return errors
def cross_validate_resolutions(X, y, timestamps, model, features):
errors = []
for timestamp in timestamps:
mask = X.date < timestamp
Xtr = X.loc[mask, features]
ytr = y.loc[mask]
Xte = X.loc[~mask, features]
yte = y.loc[~mask]
model.fit(Xtr, ytr)
ypred = model.predict(Xte)
# anything that is less than zero turn it into zero.
ypred[ypred < 0] = 0.
fold_rmse = np.sqrt(mean_squared_error(yte, ypred))
print('FOLD RMSE: ', fold_rmse)
errors.append(fold_rmse)
return errors |
numb3r33/Xtreme-ML-HACK | src/features/build_features.py | <reponame>numb3r33/Xtreme-ML-HACK
import pandas as pd
import numpy as np
from sklearn.preprocessing import LabelEncoder
class Dataset(object):
def __init__(self, dataset):
self.dataset = dataset
def add_week(self):
self.dataset = self.dataset.assign(week=self.dataset.date.dt.week)
return self
def add_month(self):
self.dataset = self.dataset.assign(month=self.dataset.date.dt.month)
return self
def add_year(self):
self.dataset = self.dataset.assign(year=self.dataset.date.dt.year)
return self
def add_weekday(self):
self.dataset = self.dataset.assign(weekday=self.dataset.date.dt.weekday)
return self
def add_dayofyear(self):
self.dataset = self.dataset.assign(day_of_year=self.dataset.date.dt.dayofyear)
return self
def ohe_features(self, features):
feature_ohes = []
for feature in features:
feature_ohes.append(pd.get_dummies(self.dataset[feature], drop_first=True, prefix=feature))
feature_ohes.append(self.dataset)
self.dataset = pd.concat(feature_ohes, axis=1)
return self
def add_isweekend(self):
self.dataset = self.dataset.assign(is_weekend=((self.dataset.weekday == 5) | (self.dataset.weekday == 6)).astype(np.int))
return self
def add_islastweek(self):
self.dataset = self.dataset.assign(is_lastweek=(self.dataset.week == 53).astype(np.int))
return self
def add_isspecialday(self):
mask = (self.dataset.day_of_year == 1) | (self.dataset.day_of_year == 6) |\
(self.dataset.day_of_year == 2) | (self.dataset.day_of_year == 59)
self.dataset = self.dataset.assign(isspecialday=mask.astype(np.int))
return self
def add_mean_num_contacts_by_week(self):
mean_contacts = self.dataset.groupby(['weekday'])['num_contacts'].mean()
self.dataset['mean_contact_week'] = self.dataset.weekday.map(mean_contacts)
return self
def add_median_num_contacts_by_type(self):
median_contacts = self.dataset.groupby(['contact_type'])['num_contacts'].sum()
self.dataset['median_contact_type'] = self.dataset.contact_type.map(median_contacts)
return self
def add_mean_num_contacts_by_type_month(self):
mean_contacts = self.dataset.groupby(['contact_type', 'month'])['num_contacts'].median()
self.dataset['mean_contact_type'] = self.dataset[['contact_type', 'month']].apply(lambda x: mean_contacts[x[0], x[1]], axis=1)
return self
def add_max_num_contacts_by_type_month(self):
max_contacts = self.dataset.groupby(['contact_type', 'day_of_year'])['num_contacts'].max()
self.dataset['max_contact_type'] = self.dataset[['contact_type', 'day_of_year']].apply(lambda x: max_contacts[x[0], x[1]], axis=1)
return self
def add_min_num_contacts_by_type_month(self):
min_contacts = self.dataset.groupby(['contact_type', 'month'])['num_contacts'].min()
self.dataset['min_contact_type'] = self.dataset[['contact_type', 'month']].apply(lambda x: min_contacts[x[0], x[1]], axis=1)
return self
def add_range_contacts_by_type_month(self):
max_contacts = self.dataset.groupby(['contact_type', 'month'])['num_contacts'].max()
min_contacts = self.dataset.groupby(['contact_type', 'month'])['num_contacts'].min()
range_contacts = max_contacts - min_contacts
self.dataset['range_contact_type'] = self.dataset[['contact_type', 'month']].apply(lambda x: range_contacts[x[0], x[1]], axis=1)
return self
def add_active_contracts(self, active_contracts):
active_contract_by_day = self.dataset.day_of_year.map(active_contracts)
self.dataset = self.dataset.assign(active_contract=active_contract_by_day)
return self
def add_change_in_trend_indicator(self):
mask = (self.dataset.date.dt.year > 2014) & ((self.dataset.contact_type == 'Web - Input') | (self.dataset.contact_type == 'Tweet - Input'))
self.dataset = self.dataset.assign(change_in_trend=mask.astype(np.int))
return self
def label_encode_features(self, features):
for feature in features:
lbl = LabelEncoder()
lbl.fit(self.dataset[feature])
self.dataset['encoded_%s'%(feature)] = lbl.transform(self.dataset[feature])
return self |
numb3r33/Xtreme-ML-HACK | src/data/make_dataset.py | <filename>src/data/make_dataset.py<gh_stars>1-10
import numpy as np
import pandas as pd
from itertools import product
def get_contacts_df(contacts):
contacts_grouped = contacts.groupby(['CONTACT.TYPE', 'START.DATE'])['Contacts'].sum().unstack().fillna(0)
dates = np.tile(contacts_grouped.columns.values, contacts['CONTACT.TYPE'].nunique())
contact_types = []
for i in range(len(contacts_grouped.index)):
contact_type = [contacts_grouped.index[i]]
contact_types.append(np.repeat(contact_type, contacts_grouped.shape[1]))
contact_types = np.array(contact_types)
contact_types = contact_types.flatten()
contacts_df = pd.DataFrame(contacts_grouped.values.reshape(contacts_grouped.shape[1]*contacts['CONTACT.TYPE'].nunique(), 1))
contacts_df = contacts_df.assign(dates=dates)
contacts_df = contacts_df.assign(contact_types=contact_types)
contacts_df = contacts_df.rename(columns={0: 'num_contacts'})
return contacts_df
def get_resolutions_df(resolution):
resolution_grouped = resolution.groupby(['Category', 'Subject', 'Date'])['Resolution'].sum()\
.unstack()\
.unstack()\
.fillna(0)
dates_res = np.tile(resolution_grouped.columns.levels[0].values, \
resolution.Category.nunique() * resolution.Subject.nunique())
category_subject_pairs = list(product(resolution.Category.unique(), resolution.Subject.unique()))
cs_pairs = []
for i in range(len(category_subject_pairs)):
cs_pair_repeated = []
for j in range(len(resolution_grouped.columns.levels[0].values)):
cs_pair_repeated.append((category_subject_pairs[i]))
cs_pairs.append(cs_pair_repeated)
cs_pairs = np.array(cs_pairs)
categories = []
subjects = []
for i in range(cs_pairs.shape[0]):
for j in range(cs_pairs.shape[1]):
categories.append(cs_pairs[i][j][0])
subjects.append(cs_pairs[i][j][1])
resolution_grouped = resolution_grouped.stack()
df_res = pd.DataFrame(resolution_grouped.values.reshape(cs_pairs.shape[0] * cs_pairs.shape[1], 1))
df_res = df_res.assign(dates=dates_res)
df_res = df_res.assign(categories=categories)
df_res = df_res.assign(subjects=subjects)
df_res = df_res.rename(columns={0: 'num_resolutions'})
return df_res
# month to number mapping
month_dict = {
'jan.': 1,
'feb.': 2,
'mar.': 3,
'apr.': 4,
'may.': 5,
'jun.': 6,
'jul.': 7,
'ago.': 8,
'sep.': 9,
'oct.': 10,
'nov.': 11,
'dec.': 12
}
def create_contract_start_date(row):
year = row['YEAR_CONTRACT']
month = month_dict[row['MONTH_CONTRACT']]
day = row['DAY_ALTA_CONTR']
return pd.to_datetime('%s/%s/%s'%(year, month, day))
def create_contract_end_date(row):
year = row['YEAR_END_CONTRACT']
month = month_dict[row['MONTH_END_CONTRACT']]
day = row['DAY_END_CONTRACT']
return pd.to_datetime('%s/%s/%s'%(year, month, day))
def modify_contracts(contracts_new, contracts_end):
contract_start_date = contracts_new.apply(create_contract_start_date, axis=1)
contract_end_date = contracts_end.apply(create_contract_end_date, axis=1)
contracts_new = contracts_new.assign(date=contract_start_date)
contracts_end = contracts_end.assign(date=contract_end_date)
contracts_new = contracts_new.assign(day_of_year=contracts_new.date.dt.dayofyear)
contracts_end = contracts_end.assign(day_of_year=contracts_end.date.dt.dayofyear)
return contracts_new, contracts_end |
numb3r33/Xtreme-ML-HACK | src/features/average_features.py | <gh_stars>1-10
import pandas as pd
import numpy as np
def prepare_average_features(contacts_df, contacts_df_sub, contacts_test):
def get_quarter_mean(row):
prev_start_date = row['date'] + pd.DateOffset(-90)
# mask = (contacts_df.dates >= prev_quarter_start_date.values[0]) & (contacts_df.dates < row['date'].values[0]) \
# & (contacts_df.contact_types == row['contact_type'].values[0]) &\
# (contacts_df.dates.dt.weekday == row['date'].dt.weekday.values[0])
mask = (contacts_df.dates <= prev_start_date.values[0]) &\
(contacts_df.contact_types == row['contact_type'].values[0]) &\
(contacts_df.dates.dt.weekday == row['date'].dt.weekday.values[0])
return contacts_df.loc[mask, 'num_contacts'].mean()
def get_last_7_days_mean(row):
prev_start_date = row['date'] + pd.DateOffset(-15)
mask = (contacts_df.dates >= prev_start_date.values[0]) & (contacts_df.dates < row['date'].values[0]) \
& (contacts_df.contact_types == row['contact_type'].values[0]) &\
(contacts_df.dates.dt.weekday == row['date'].dt.weekday.values[0])
return contacts_df.loc[mask, 'num_contacts'].mean()
def get_last_30_days_mean(row):
prev_start_date = row['date'] + pd.DateOffset(-30)
mask = (contacts_df.dates >= prev_start_date.values[0]) & (contacts_df.dates < row['date'].values[0]) \
& (contacts_df.contact_types == row['contact_type'].values[0]) &\
(contacts_df.dates.dt.weekday == row['date'].dt.weekday.values[0])
return contacts_df.loc[mask, 'num_contacts'].mean()
def get_last_60_days_mean(row):
prev_start_date = row['date'] + pd.DateOffset(-60)
mask = (contacts_df.dates >= prev_start_date.values[0]) & (contacts_df.dates < row['date'].values[0]) \
& (contacts_df.contact_types == row['contact_type'].values[0]) &\
(contacts_df.dates.dt.weekday == row['date'].dt.weekday.values[0])
return contacts_df.loc[mask, 'num_contacts'].mean()
prev_quarter_mean = contacts_df_sub.groupby(['date', 'contact_type']).apply(get_quarter_mean)
# last_7_days_mean = contacts_df_sub.groupby(['date', 'contact_type']).apply(get_last_7_days_mean)
# last_30_days_mean = contacts_df_sub.groupby(['date', 'contact_type']).apply(get_last_30_days_mean)
# last_60_days_mean = contacts_df_sub.groupby(['date', 'contact_type']).apply(get_last_60_days_mean)
quarter_mean = contacts_df_sub[['date', 'contact_type']].apply(lambda x: prev_quarter_mean[x[0], x[1]], axis=1)
# _7_days_mean = contacts_df_sub[['date', 'contact_type']].apply(lambda x: last_7_days_mean[x[0], x[1]], axis=1)
# _30_days_mean = contacts_df_sub[['date', 'contact_type']].apply(lambda x: last_30_days_mean[x[0], x[1]], axis=1)
# _60_days_mean = contacts_df_sub[['date', 'contact_type']].apply(lambda x: last_60_days_mean[x[0], x[1]], axis=1)
contacts_df_sub = contacts_df_sub.assign(quarter_mean=quarter_mean.values)
# contacts_df_sub = contacts_df_sub.assign(_7_days_mean=_7_days_mean.values)
# contacts_df_sub = contacts_df_sub.assign(_30_days_mean=_30_days_mean.values)
# contacts_df_sub = contacts_df_sub.assign(_60_days_mean=_60_days_mean.values)
prev_quarter_mean_test = contacts_test.groupby(['date', 'contact_type']).apply(get_quarter_mean)
# last_7_days_mean_test = contacts_test.groupby(['date', 'contact_type']).apply(get_last_7_days_mean)
# last_30_days_mean_test = contacts_test.groupby(['date', 'contact_type']).apply(get_last_30_days_mean)
# last_60_days_mean_test = contacts_test.groupby(['date', 'contact_type']).apply(get_last_60_days_mean)
quarter_mean_test = contacts_test[['date', 'contact_type']].apply(lambda x: prev_quarter_mean_test[x[0], x[1]], axis=1)
# _7_days_mean_test = contacts_test[['date', 'contact_type']].apply(lambda x: last_7_days_mean_test[x[0], x[1]], axis=1)
# _30_days_mean_test = contacts_test[['date', 'contact_type']].apply(lambda x: last_30_days_mean_test[x[0], x[1]], axis=1)
# _60_days_mean_test = contacts_test[['date', 'contact_type']].apply(lambda x: last_60_days_mean_test[x[0], x[1]], axis=1)
contacts_test = contacts_test.assign(quarter_mean=quarter_mean_test.values)
# contacts_test = contacts_test.assign(_7_days_mean=_7_days_mean_test.values)
# contacts_test = contacts_test.assign(_30_days_mean=_30_days_mean_test.values)
# contacts_test = contacts_test.assign(_60_days_mean=_60_days_mean_test.values)
return contacts_df_sub, contacts_test
def get_active_contracts_by_day(contracts_new_df, contracts_end_df):
total_started = contracts_new_df.groupby('day_of_year')['NUMBER_OF_CONTRACTS'].sum()
total_ended = contracts_end_df.groupby('day_of_year')['NUMBER_OF_CONTRACTS_ENDED'].sum()
diff = total_started - total_ended
return diff |
TLIsolator/AppDevelopmentProject | Forms.py | <filename>Forms.py<gh_stars>0
from wtforms import Form, StringField, RadioField, SelectField, TextAreaField, validators, PasswordField
from wtforms.validators import EqualTo, Email, ValidationError
import main
password_holder = None
class CreateUserForm(Form):
firstName = StringField('First Name', [validators.Length(min=1, max=150), validators.DataRequired()])
lastName = StringField('Last Name', [validators.Length(min=1, max=150), validators.DataRequired()])
membership = RadioField('Membership', choices=[('F', 'Fellow'), ('S', 'Senior'), ('P', 'Professional')],
default='F')
gender = SelectField('Gender', [validators.DataRequired()],
choices=[('', 'Select'), ('F', 'Female'), ('M', 'Male')], default='')
remarks = TextAreaField('Remarks', [validators.Optional()])
# Validation
# HF
def username_duplication_check(form, field):
temp = main.db.return_keys("Users")
if temp != None and (field.data).lower() in temp:
raise ValidationError('Username have been used')
elif (field.data).lower() == "admin":
raise ValidationError('Username have been used')
def username_login_check(form, field):
global password_holder
password_holder = None
temp = main.db.get_storage("Users")
keys = temp.keys()
admin_acc = main.db.get_storage("ADMIN")
if temp != None and (field.data).lower() in keys:
password_holder = temp[(field.data).lower()].get_password()
elif admin_acc.get_username() == (field.data).lower():
password_holder = admin_acc.get_password()
else:
raise ValidationError('Username not found')
def password_login_check(form, field):
if not field.data == password_holder:
print("correct pass is {}".format(password_holder))
raise ValidationError('Password incorrect')
# HF
class LoginForm(Form):
username = StringField('Username', [validators.DataRequired(), username_login_check])
password = PasswordField('Password', [validators.DataRequired(), password_login_check])
# HF
class SignUpForm(Form):
first_name = StringField('First Name', [validators.Length(min=1, max=150), validators.DataRequired()])
last_name = StringField('Last Name', [validators.Length(min=1, max=150), validators.DataRequired()])
username = StringField('Username', [validators.Length(min=6, max=15), username_duplication_check, validators.DataRequired()])
password = PasswordField('Password', [validators.Length(min=6, max=15), validators.DataRequired(), EqualTo('confirm_pass', message='Passwords must match')])
confirm_pass = PasswordField('Confirm Password', [validators.DataRequired()])
postal_code = StringField('Postal Code', [validators.DataRequired()])
address = StringField('Street Address', [validators.DataRequired()])
country = StringField('Country', [validators.DataRequired()])
city = StringField('City', [validators.DataRequired()])
unit_number = StringField('Unit Number', [validators.DataRequired()])
|
TLIsolator/AppDevelopmentProject | StorageManager.py | <gh_stars>0
# Please read the instruction before using
# This will control all persistent storage (eg. Add, Delete, Modify)
# Note: Do not modify this before asking me (HF)
# HF
import shelve
class StorageManager():
def __init__(self):
# error checking
try:
self.__db = shelve.open('storage.db', 'r')
self.__db.close()
except Exception:
print("Storage not found")
self.delete_storage('TEMP')
def is_key_found(self, name):
keys = self.__db.keys()
if name in keys:
return True
else:
return False
def reset(self):
self.__db = shelve.open('storage.db', 'c')
keys = list(self.__db.keys())
for p in keys:
del self.__db[p]
self.__db.close()
def create_new_storage(self, name, items=None, dict=True):
self.__db = shelve.open('storage.db', 'c')
# items must be a dictionary or list
if(self.is_key_found(name) == False):
if items == None:
if dict == True:
self.__db[name] = {}
print("Created dictionary")
elif dict == False:
self.__db[name] = []
print("Created list")
else:
self.__db[name] = items
print("Created storage")
else:
print("existing name of storage found")
self.__db.close()
def delete_storage(self, name):
self.__db = shelve.open('storage.db', 'c')
if(self.is_key_found(name) == True):
del self.__db[name]
print("Deleted storage")
else:
print("no keys found with the given name")
self.__db.close()
def set_storage(self, name, item):
self.__db = shelve.open('storage.db', 'c')
if(self.is_key_found(name) == True):
self.__db[name] = item
print("modified storage")
else:
print("Unable to set item due to storage name not found")
self.__db.close()
def add_item(self, storage_name, key_to_use, item):
self.__db = shelve.open('storage.db', 'c')
if(self.is_key_found(storage_name) == True):
print("storage name found")
print(self.__db[storage_name])
if key_to_use in self.__db[storage_name].keys():
print("Key is in used")
print("ALL USERS: ")
print(self.__db[storage_name].keys())
else:
temp = self.__db[storage_name]
print("Key is not in used")
temp[key_to_use] = item
self.__db[storage_name] = temp
print("ALL USERS: ")
print(self.__db[storage_name].keys())
else:
print("Unable to set item due to storage name not found")
self.__db.close()
def get_storage(self, name, create=False, dict=False):
self.__db = shelve.open('storage.db', 'c')
if (self.is_key_found(name) == True):
temp = self.__db[name]
self.__db.close()
print("Storage found")
return temp
else:
print("storage name not found")
if create == True:
print("proceeds to create a new one")
if dict == True:
self.__db[name] = {}
print("Created dictionary")
else:
self.__db[name] = []
print("Created List")
self.__db.close()
def check_exist(self, name):
self.__db = shelve.open('storage.db', 'c')
if (self.is_key_found(name) == True):
self.__db.close()
return True
else:
self.__db.close()
return False
# TEST USE ONLY
def return_keys(self, name = None):
self.__db = shelve.open('storage.db', 'c')
if(name == None):
temp = list(self.__db.keys())
self.__db.close()
return temp
elif(name in list(self.__db.keys())):
temp = list(self.__db[name].keys())
self.__db.close()
return temp
else:
return None |
TLIsolator/AppDevelopmentProject | main.py | <filename>main.py
"""
This file will deal with overall of the project
Most classes will be linked to this file
This is to manage all different class to make it organised
Note: Do not modify this before asking me (HF)
"""
from StorageManager import StorageManager
from Admin import Admin
db = None
# HF
def init():
global db
db = StorageManager()
def reset():
StorageManager.reset()
def test_mode():
"""
storage = SM.StorageManager()
storage.create_new_storage("testing", [1, 2, 3])
#storage.delete_storage("testing")
#print(storage.return_keys())
print("hi")
temp = storage.get_storage("testing")
temp = [1, 2]
storage.set_storage("testing",[1,2])
print("TEMP:")
print(temp)
print("DB:")
print(storage.get_storage("testing"))
"""
init()
# print("KEYTSSSSSSSS")
#db.delete_storage("Users")
#db.return_keys()
#print(db.return_keys("Users"))
temp = db.return_keys()
#print("TEST")
print(list(temp))
test_mode() |
TLIsolator/AppDevelopmentProject | app.py | <gh_stars>0
from flask import Flask, render_template, request, redirect, url_for, flash
from Forms import CreateUserForm, LoginForm, SignUpForm
import User
import main
app = Flask(__name__)
main.init()
# Main page
# Current is Login Page
@app.route('/')
def home():
return render_template('home.html')
# Called For testing
# HF
@app.route('/testing/<choice>')
def testing(choice):
#return 'Test successful, code is {}'.format(code)
return render_template('users.html')
# Called when user successful logged in
# HF
@app.route('/users/<username>/<int:choice>')
def users(choice, username):
print("TESTTTTT")
temp = main.db.return_keys("Users")
if temp != None and username in temp:
temp2 = main.db.get_storage("Users")
user_details = temp2[username]
else:
print("ERRORRRRRR")
return render_template('users.html', menu=choice, user=user_details)
#Called when admin login
@app.route('/admin')
def admin():
return render_template('admin.html')
# Called when sign up button is clicked from the login page
# HF
@app.route('/signup', methods=['POST', 'GET'])
def sign_up():
signup_form = SignUpForm(request.form)
if request.method == 'POST' and signup_form.validate():
user = User.User(signup_form.first_name.data, signup_form.last_name.data, signup_form.username.data.lower(),
signup_form.password.data, signup_form.postal_code.data, signup_form.address.data,
signup_form.country.data, signup_form.city.data, signup_form.unit_number.data)
# to create and check if the storage exist
main.db.get_storage('Users', True, True)
main.db.add_item('Users', user.get_username(), user)
# create temporary storage
main.db.get_storage("TEMP", True, True)
main.db.add_item('TEMP', "username", user.get_username())
return redirect(url_for('users', choice=1, username=user.get_username()))
return render_template('signUp.html', form=signup_form)
# Called when user press submit at main page
# Two methods, GET is called when website request the page
# There is POST request only when user click the submit button
# HF
@app.route('/loginMenu', methods=['POST', 'GET'])
def loginMenu():
login_form = LoginForm(request.form)
# login if user already logged in before
temp_exist = main.db.check_exist('TEMP')
if temp_exist == True:
session = main.db.get_storage('TEMP')
s_keys = session.keys()
if "username" in s_keys:
username = session['username']
return redirect(url_for('users', choice=1, username=username))
# When a button is clicked
if request.method == 'POST':
btn_pressed = request.form['submit']
# Login clicked
# Validate only on a POST request
if login_form.validate() and btn_pressed == "Login":
login_name = login_form.username.data.lower()
admin_acc = main.db.get_storage("ADMIN")
temp = main.db.return_keys("Users")
if admin_acc.get_username() == login_name:
print("Admin Login")
return redirect(url_for('admin'))
elif temp != None and login_name in temp:
temp2 = main.db.get_storage("Users")
user = temp2[login_name]
# create temporary storage
main.db.get_storage("TEMP", True, True)
main.db.add_item('TEMP', "username", user.get_username())
return redirect(url_for('users', choice=1, username=user.get_username()))
else:
print("ERRORRRRRR")
# Sign up clicked
elif btn_pressed == "Sign Up":
return redirect(url_for('sign_up'))
# Get request will be skipped to this
return render_template('userLogin.html', form=login_form)
if __name__ == '__main__':
app.run() |
ScarryBear77/skelvis | skelvis/objects.py | from typing import Union, NewType, List, Dict, Final, Tuple
import k3d
import numpy as np
from k3d.objects import Group, Line, Points, Text, Drawable
from .jointset import JointSet
Color = NewType('Color', Union[str, int])
PositionTimestamps = NewType('PositionTimestamps', Dict[str, np.ndarray])
Positions = NewType('Positions', Union[np.ndarray, PositionTimestamps])
DEFAULT_COLORS: Final[Dict[str, int]] = {
'red': 0xFF0000, 'green': 0x00FF00, 'blue': 0x0000FF,
'yellow': 0xFFFF00, 'teal': 0x00FFFF, 'purple': 0xFF00FF,
'white': 0xFFFFFF, 'black': 0x000000
}
DEFAULT_TEXT_SIZE: Final[float] = 0.45
COORDINATE_FORMAT: Final[str] = '({:.2f}, {:.2f}, {:.2f})'
class DrawableSkeleton(Group):
def __init__(self, joint_set: JointSet, joint_points: List[Points], joint_lines: List[Line],
is_ground_truth: bool, joint_names: List[Text] = None, joint_coordinates: List[Text] = None):
drawable_objects: List[Drawable] = joint_points + joint_lines
if joint_names is not None:
drawable_objects += joint_names
if joint_coordinates is not None:
drawable_objects += joint_coordinates
super().__init__(drawable_objects)
self.joint_set: JointSet = joint_set
self.joint_points: List[Points] = joint_points
self.joint_lines: List[Line] = joint_lines
self.is_ground_truth: bool = is_ground_truth
self.joint_names: List[Text] = joint_names
self.joint_coordinates: List[Text] = joint_coordinates
def get_left_objects(self) -> List[Drawable]:
return [self.joint_points[left_point_index]
for left_point_index in self.joint_set.left_joint_indices] + \
[self.joint_lines[left_line_index]
for left_line_index in self.joint_set.left_line_indices]
def get_right_objects(self) -> List[Drawable]:
return [self.joint_points[right_point_index]
for right_point_index in self.joint_set.right_joint_indices] + \
[self.joint_lines[right_line_index]
for right_line_index in self.joint_set.right_line_indices]
def get_center_objects(self) -> List[Drawable]:
return [self.joint_points[center_point_index]
for center_point_index in self.joint_set.center_joint_indices] + \
[self.joint_lines[center_line_index]
for center_line_index in self.joint_set.center_line_indices]
class Skeleton:
def __init__(self, joint_positions: Positions, joint_set: JointSet,
part_size: float, color: Color, is_ground_truth: bool):
self.joint_positions: Positions = joint_positions
self.joint_set: JointSet = joint_set
self.part_size: float = part_size
self.color: Color = color
self.is_ground_truth: bool = is_ground_truth
def to_drawable_skeleton(self) -> DrawableSkeleton:
joint_points: List[Points] = self.__get_joint_points()
joint_lines: List[Line] = self.__get_joint_lines()
return DrawableSkeleton(self.joint_set, joint_points, joint_lines, self.is_ground_truth)
def to_drawable_skeleton_with_names(self) -> DrawableSkeleton:
joint_points: List[Points] = self.__get_joint_points()
joint_lines: List[Line] = self.__get_joint_lines()
joint_names: List[Text] = self.__get_joint_names()
return DrawableSkeleton(self.joint_set, joint_points, joint_lines,
self.is_ground_truth, joint_names=joint_names)
def to_drawable_skeleton_with_coordinates(self) -> DrawableSkeleton:
joint_points: List[Points] = self.__get_joint_points()
joint_lines: List[Line] = self.__get_joint_lines()
joint_coordinates: List[Text] = self.__get_joint_coordinates()
return DrawableSkeleton(self.joint_set, joint_points, joint_lines,
self.is_ground_truth, joint_coordinates=joint_coordinates)
def to_drawable_skeleton_for_video(self) -> DrawableSkeleton:
joint_points: List[Points] = self.__get_joint_points_for_video()
joint_lines: List[Line] = self.__get_joint_lines_for_video()
return DrawableSkeleton(self.joint_set, joint_points, joint_lines, self.is_ground_truth)
def to_drawable_skeleton_for_video_with_names(self) -> DrawableSkeleton:
joint_points: List[Points] = self.__get_joint_points_for_video()
joint_lines: List[Line] = self.__get_joint_lines_for_video()
joint_names: List[Text] = self.__get_joint_names_for_video()
return DrawableSkeleton(self.joint_set, joint_points, joint_lines,
self.is_ground_truth, joint_names=joint_names)
def to_drawable_skeleton_for_video_with_coordinates(self) -> DrawableSkeleton:
joint_points: List[Points] = self.__get_joint_points_for_video()
joint_lines: List[Line] = self.__get_joint_lines_for_video()
joint_coordinates: List[Text] = self.__get_joint_coordinates_for_video()
return DrawableSkeleton(self.joint_set, joint_points, joint_lines,
self.is_ground_truth, joint_coordinates=joint_coordinates)
def __get_joint_points(self) -> List[Points]:
skeleton_joint_colors: np.ndarray = self.__get_joint_colors()
return [k3d.points(
positions=self.joint_positions[line_index], point_size=self.part_size,
shader='mesh', color=int(skeleton_joint_colors[line_index])
) for line_index in range(self.joint_set.number_of_joints)]
def __get_joint_points_for_video(self) -> List[Points]:
skeleton_joint_colors: np.ndarray = self.__get_joint_colors()
return [k3d.points(
positions={
current_timestamp[0]: current_timestamp[1][line_index]
for current_timestamp in self.joint_positions.items()
}, color=int(skeleton_joint_colors[line_index]),
point_size=self.part_size, shader='mesh'
) for line_index in range(self.joint_set.number_of_joints)]
def __get_joint_lines(self) -> List[Line]:
skeleton_line_colors: np.ndarray = self.__get_line_colors()
return [k3d.line(
vertices=[
self.joint_positions[self.joint_set.limb_graph[line_index][0]],
self.joint_positions[self.joint_set.limb_graph[line_index][1]]
], width=self.part_size / 2.2,
color=int(skeleton_line_colors[line_index]),
shader='mesh'
) for line_index in range(len(self.joint_set.limb_graph))]
def __get_joint_lines_for_video(self) -> List[Line]:
skeleton_line_colors: np.ndarray = self.__get_line_colors()
return [k3d.line(
vertices={
current_timestamp[0]: np.array([
current_timestamp[1][self.joint_set.limb_graph[line_index][0]],
current_timestamp[1][self.joint_set.limb_graph[line_index][1]]
]) for current_timestamp in self.joint_positions.items()
}, width=self.part_size / 2.2,
color=int(skeleton_line_colors[line_index]),
shader='mesh'
) for line_index in range(len(self.joint_set.limb_graph))]
def __get_joint_names(self) -> List[Text]:
return [k3d.text(
text=self.joint_set.names[joint_index],
position=self.joint_positions[joint_index],
size=DEFAULT_TEXT_SIZE, label_box=False, color=DEFAULT_COLORS.get('black')
) for joint_index in range(self.joint_set.number_of_joints)]
def __get_joint_names_for_video(self) -> List[Text]:
return [k3d.text(
text=self.joint_set.names[joint_index],
position={
current_timestamp[0]: current_timestamp[1][joint_index]
for current_timestamp in self.joint_positions.items()
}, size=DEFAULT_TEXT_SIZE, label_box=False, color=DEFAULT_COLORS.get('black')
) for joint_index in range(self.joint_set.number_of_joints)]
def __get_joint_coordinates(self) -> List[Text]:
return [k3d.text(
text=COORDINATE_FORMAT.format(
self.joint_positions[joint_index][0],
self.joint_positions[joint_index][1],
self.joint_positions[joint_index][2]
), position=self.joint_positions[joint_index],
size=DEFAULT_TEXT_SIZE, label_box=False, color=DEFAULT_COLORS.get('black')
) for joint_index in range(self.joint_set.number_of_joints)]
def __get_joint_coordinates_for_video(self) -> List[Text]:
return [k3d.text(
text={
current_timestamp[0]: COORDINATE_FORMAT.format(
current_timestamp[1][joint_index][0],
current_timestamp[1][joint_index][1],
current_timestamp[1][joint_index][2])
for current_timestamp in self.joint_positions.items()
}, position={
current_timestamp[0]: current_timestamp[1][joint_index]
for current_timestamp in self.joint_positions.items()
}, size=DEFAULT_TEXT_SIZE, label_box=False, color=DEFAULT_COLORS.get('black')
) for joint_index in range(self.joint_set.number_of_joints)]
def __get_joint_colors(self) -> np.ndarray:
joint_colors_shape: Tuple[int] = (self.joint_set.number_of_joints,)
if self.color == 'default':
joint_colors = np.zeros(shape=joint_colors_shape, dtype='uint32')
if self.is_ground_truth:
joint_colors[self.joint_set.left_joint_indices] = DEFAULT_COLORS.get('yellow')
joint_colors[self.joint_set.right_joint_indices] = DEFAULT_COLORS.get('green')
joint_colors[self.joint_set.center_joint_indices] = DEFAULT_COLORS.get('teal')
else:
joint_colors[self.joint_set.left_joint_indices] = DEFAULT_COLORS.get('red')
joint_colors[self.joint_set.right_joint_indices] = DEFAULT_COLORS.get('blue')
joint_colors[self.joint_set.center_joint_indices] = DEFAULT_COLORS.get('white')
return joint_colors
else:
return np.full(
shape=joint_colors_shape,
fill_value=self.__get_color(self.color),
dtype='uint32')
def __get_line_colors(self) -> np.ndarray:
line_colors_shape: Tuple[int] = (self.joint_set.number_of_joints - 1,)
if self.color == 'default':
line_colors = np.zeros(shape=line_colors_shape, dtype='uint32')
if self.is_ground_truth:
line_colors[self.joint_set.left_line_indices] = DEFAULT_COLORS.get('yellow')
line_colors[self.joint_set.right_line_indices] = DEFAULT_COLORS.get('green')
line_colors[self.joint_set.center_line_indices] = DEFAULT_COLORS.get('teal')
else:
line_colors[self.joint_set.left_line_indices] = DEFAULT_COLORS.get('red')
line_colors[self.joint_set.right_line_indices] = DEFAULT_COLORS.get('blue')
line_colors[self.joint_set.center_line_indices] = DEFAULT_COLORS.get('white')
return line_colors
else:
return np.full(
shape=line_colors_shape,
fill_value=self.__get_color(self.color),
dtype='uint32')
@staticmethod
def __get_color(color: Color) -> int:
if isinstance(color, str):
if color in DEFAULT_COLORS.keys():
return DEFAULT_COLORS.get(color)
else:
raise KeyError('Default colors do not contain ' + color + '.')
elif isinstance(color, int):
return color
else:
raise TypeError('Color must be either of type \'int\' or \'str\'.')
|
ScarryBear77/skelvis | skelvis/jointset.py | <filename>skelvis/jointset.py
from typing import Tuple, List
import numpy as np
from abc import ABCMeta, abstractmethod
class JointSet(metaclass=ABCMeta):
def __init__(self):
self.names: np.ndarray = np.array([])
self.number_of_joints: int = 0
self.limb_graph: List[Tuple[int, int]] = []
self.left_joint_indices: List[int] = []
self.right_joint_indices: List[int] = []
self.center_joint_indices: List[int] = []
self.left_line_indices: List[int] = []
self.right_line_indices: List[int] = []
self.center_line_indices: List[int] = []
self.vertically_aligned_line_indices: List[Tuple[int, int]] = []
@abstractmethod
def convert_to_common_14(self):
return self
class MuPoTSJoints(JointSet):
def __init__(self):
super().__init__()
self.names = np.array([
'HEAD TOP', 'NECK', # Head
'RIGHT SHOULDER', 'RIGHT ELBOW', 'RIGHT WRIST', # Right arm
'LEFT SHOULDER', 'LEFT ELBOW', 'LEFT WRIST', # Left arm
'RIGHT HIP', 'RIGHT KNEE', 'RIGHT ANKLE', # Right leg
'LEFT HIP', 'LEFT KNEE', 'LEFT ANKLE', # Left leg
'HIP', 'SPINE', 'NOSE' # Spine
])
self.names.flags.writeable = False
self.limb_graph = [
(10, 9), (9, 8), (8, 14), # Right leg
(13, 12), (12, 11), (11, 14), # Left leg
(0, 16), (16, 1), # Head to Thorax
(1, 15), (15, 14), # Thorax to Hip
(4, 3), (3, 2), (2, 1), # Right arm
(7, 6), (6, 5), (5, 1), # Left arm
]
self.number_of_joints = 17
self.left_joint_indices = [5, 6, 7, 11, 12, 13]
self.right_joint_indices = [2, 3, 4, 8, 9, 10]
self.center_joint_indices = [0, 1, 14, 15, 16]
self.left_line_indices = [3, 4, 5, 13, 14, 15]
self.right_line_indices = [0, 1, 2, 10, 11, 12]
self.center_line_indices = [6, 7, 8, 9]
self.vertically_aligned_line_indices = [
(16, 0), (1, 16), (15, 1), (14, 15),
(13, 12), (12, 11), (10, 9), (9, 8)
]
def convert_to_common_14(self):
common14_index_order = [14, 8, 9, 10, 11, 12, 13, 1, 5, 6, 7, 2, 3, 4]
return Common14Joints(names=self.names[common14_index_order])
class OpenPoseJoints(JointSet):
def __init__(self):
super().__init__()
self.names = np.array([
'NOSE', 'NECK', # Head
'RIGHT SHOULDER', 'RIGHT ELBOW', 'RIGHT WRIST', # Right arm
'LEFT SHOULDER', 'LEFT ELBOW', 'LEFT WRIST', # Left arm
'HIP', # Hip
'RIGHT HIP', 'RIGHT KNEE', 'RIGHT ANKLE', # Right leg
'LEFT HIP', 'LEFT KNEE', 'LEFT ANKLE', # Left leg
'RIGHT EYE', 'LEFT EYE', 'RIGHT EAR', 'LEFT EAR', # Face
'LEFT BIG TOE', 'LEFT SMALL TOE', 'LEFT HEEL', # Left foot
'RIGHT BIG TOE', 'RIGHT SMALL TOE', 'RIGHT HEEL' # Right foot
])
self.names.flags.writeable = False
self.limb_graph = [
(1, 0), (17, 15), (15, 0), (16, 0), (18, 16), # Head
(4, 3), (3, 2), (2, 1), # Right arm
(7, 6), (6, 5), (5, 1), # Left arm
(1, 8), # Spine
(23, 22), (22, 11), (24, 11), (11, 10), (10, 9), (9, 8), # Right leg
(20, 19), (19, 14), (21, 14), (14, 13), (13, 12), (12, 8) # Left leg
]
self.number_of_joints = 25
self.left_joint_indices = [5, 6, 7, 12, 13, 14, 16, 18, 19, 20, 21]
self.right_joint_indices = [2, 3, 4, 9, 10, 11, 15, 17, 22, 23, 34]
self.center_joint_indices = [0, 1, 8]
self.left_line_indices = [3, 4, 8, 9, 10, 18, 19, 20, 21, 22, 23]
self.right_line_indices = [1, 2, 5, 6, 7, 12, 13, 14, 15, 16, 17]
self.center_line_indices = [0, 11]
self.vertically_aligned_line_indices = [
(1, 0), (8, 1), (11, 10), (10, 9),
(14, 13), (13, 12), (24, 11), (21, 14)
]
def convert_to_common_14(self):
common14_index_order = [8, 9, 10, 11, 12, 13, 14, 1, 5, 6, 7, 2, 3, 4]
return Common14Joints(names=self.names[common14_index_order])
def convert_to_only_stable_joints(self):
stable_joint_indices = np.arange(17)
return self.names[stable_joint_indices]
class CocoExJoints(JointSet):
def __init__(self):
super().__init__()
self.names = np.array([
'NOSE', # Head
'LEFT EYE', 'RIGHT EYE', # Eyes
'LEFT EAR', 'RIGHT EAR', # Ears
'LEFT SHOULDER', 'RIGHT SHOULDER', # Shoulders
'LEFT ELBOW', 'RIGHT ELBOW', # Elbows
'LEFT WRIST', 'RIGHT WRIST', # Wrists
'LEFT HIP', 'RIGHT HIP', # Hip
'LEFT KNEE', 'RIGHT KNEE', # Knees
'LEFT ANKLE', 'RIGHT ANKLE', # Ankles
'HIP', 'NECK' # Spine
])
self.names.flags.writeable = False
self.limb_graph = [
(0, 1), (1, 3), # Left face
(0, 2), (2, 4), # Right face
(0, 18), (18, 17), # Spine
(18, 5), (5, 7), (7, 9), # Left arm
(18, 6), (6, 8), (8, 10), # Right arm
(17, 11), (11, 13), (13, 15), # Left leg
(17, 12), (12, 14), (14, 16) # Right leg
]
self.number_of_joints = 19
self.left_joint_indices = [1, 3, 5, 7, 9, 11, 13, 15]
self.right_joint_indices = [2, 4, 6, 8, 10, 12, 14, 16]
self.center_joint_indices = [0, 17, 18]
self.left_line_indices = [0, 1, 6, 7, 8, 12, 13, 14]
self.right_line_indices = [2, 3, 9, 10, 11, 15, 16, 17]
self.center_line_indices = [4, 5]
self.vertically_aligned_line_indices = [
(18, 0), (17, 18), (16, 14),
(14, 12), (15, 13), (13, 11)
]
def convert_to_common_14(self):
common14_index_order = [17, 12, 14, 16, 11, 13, 15, 18, 5, 7, 9, 6, 8, 10]
return Common14Joints(names=self.names[common14_index_order])
class PanopticJoints(JointSet):
def __init__(self):
super().__init__()
self.names = np.array([
'NECK', 'NOSE', 'HIP', # Spine
'LEFT SHOULDER', 'LEFT ELBOW', 'LEFT WRIST', # Left arm
'LEFT HIP', 'LEFT KNEE', 'LEFT ANKLE', # Left leg
'RIGHT SHOULDER', 'RIGHT ELBOW', 'RIGHT WRIST', # Right arm
'RIGHT HIP', 'RIGHT KNEE', 'RIGHT ANKLE', # Right leg
'LEFT EYE', 'LEFT EAR', # Left face
'RIGHT EYE', 'RIGHT EAR' # Right face
])
self.names.flags.writeable = False
self.limb_graph = [
(0, 1), (0, 2), # Spine
(0, 3), (3, 4), (4, 5), # Left arm
(2, 6), (6, 7), (7, 8), # Left leg
(0, 9), (9, 10), (10, 11), # Right arm
(2, 12), (12, 13), (13, 14), # Right leg
(1, 15), (15, 16), # Left face
(1, 17), (17, 18) # Right face
]
self.number_of_joints = 19
self.left_joint_indices = [3, 4, 5, 6, 7, 8, 15, 16]
self.right_joint_indices = [9, 10, 11, 12, 13, 14, 17, 18]
self.center_joint_indices = [0, 1, 2]
self.left_line_indices = [2, 3, 4, 5, 6, 7, 14, 15]
self.right_line_indices = [8, 9, 10, 11, 12, 13, 16, 17]
self.center_line_indices = [0, 1]
self.vertically_aligned_line_indices = [
(0, 1), (2, 0), (8, 7),
(7, 6), (14, 13), (13, 12)
]
def convert_to_common_14(self):
common14_index_order = [2, 12, 13, 14, 6, 7, 8, 0, 3, 4, 5, 9, 10, 11]
return Common14Joints(names=self.names[common14_index_order])
class Common14Joints(JointSet):
def __init__(self, names=np.array([
'HIP', # Hip
'RIGHT HIP', 'RIGHT KNEE', 'RIGHT ANKLE', # Right leg
'LEFT HIP', 'LEFT KNEE', 'LEFT ANKLE', # Left leg
'NECK', # Head
'LEFT SHOULDER', 'LEFT ELBOW', 'LEFT WRIST', # Left arm
'RIGHT SHOULDER', 'RIGHT ELBOW', 'RIGHT WRIST' # Right arm
])):
super().__init__()
self.names = names
self.names.flags.writeable = False
self.limb_graph = [
(0, 1), (1, 2), (2, 3), # Right leg
(0, 4), (4, 5), (5, 6), # Left leg
(0, 7), # Spine
(7, 8), (8, 9), (9, 10), # Left arm
(7, 11), (11, 12), (12, 13) # Right arm
]
self.number_of_joints = 14
self.left_joint_indices = [4, 5, 6, 8, 9, 10]
self.right_joint_indices = [1, 2, 3, 11, 12, 13]
self.center_joint_indices = [0, 7]
self.left_line_indices = [3, 4, 5, 7, 8, 9]
self.right_line_indices = [0, 1, 2, 10, 11, 12]
self.center_line_indices = [6]
self.vertically_aligned_line_indices = [(7, 0), (3, 2), (2, 1), (6, 5), (5, 4)]
def convert_to_common_14(self):
return Common14Joints(names=self.names)
|
ScarryBear77/skelvis | skelvis/loss.py | <gh_stars>1-10
import numpy as np
def L2(pred: np.ndarray, gt: np.ndarray) -> np.ndarray:
return np.linalg.norm(np.subtract(pred, gt), ord=2, axis=-1)
def L1(pred: np.ndarray, gt: np.ndarray) -> np.ndarray:
return np.linalg.norm(np.subtract(pred, gt), ord=1, axis=-1)
|
ScarryBear77/skelvis | skelvis/visualizer.py | <filename>skelvis/visualizer.py
import pickle
from typing import Optional, List, Dict, Callable, IO, Tuple
import ipywidgets as widgets
import k3d
import numpy as np
from IPython.display import display
from ipywidgets import Play, Checkbox, IntSlider, Button, ColorPicker, VBox, Tab, HBox, Accordion, Widget, Label
from k3d.plot import Plot
from .jointset import JointSet, MuPoTSJoints, OpenPoseJoints, CocoExJoints, PanopticJoints, Common14Joints
from .loss import L2
from .objects import Skeleton, DrawableSkeleton, Color, Positions
def create_video_player(fps: int, number_of_frames: int) -> Play:
return Play(value=0, min=0, max=number_of_frames, step=1,
interval=1000 / fps, description='Press play', disabled=False)
class VideoPlayer:
def __init__(self, plot: Plot, fps: int, frames: np.ndarray, video_player: Play = None):
self.plot: Plot = plot
self.video_player: Optional[Play] = \
video_player if video_player is not None else create_video_player(fps, number_of_frames=frames.shape[0] - 1)
self.fps: int = fps
self.frames: np.ndarray = frames
def get_video_player_widget(self) -> HBox:
frame_slider = IntSlider(value=self.video_player.value, min=self.video_player.min,
max=self.video_player.max, step=self.video_player.step, description='Frame')
next_frame_button = Button(description='Next frame')
next_frame_button.on_click(self.__update_to_next_frame)
previous_frame_button = Button(description='Previous frame')
previous_frame_button.on_click(self.__update_to_previous_frame)
widgets.jslink((self.video_player, 'value'), (frame_slider, 'value'))
widgets.jslink((self.video_player, 'value'), (self.plot, 'time'))
return HBox([self.video_player, previous_frame_button, frame_slider, next_frame_button])
def __update_to_next_frame(self, button) -> None:
if self.video_player.max > self.video_player.value:
self.video_player.value += 1
def __update_to_previous_frame(self, button) -> None:
if self.video_player.min < self.video_player.value:
self.video_player.value -= 1
class SkeletonColorUpdater:
def __init__(self, skeleton: DrawableSkeleton, left_color_picker: ColorPicker,
right_color_picker: ColorPicker, center_color_picker: ColorPicker):
self.skeleton: DrawableSkeleton = skeleton
self.left_color_picker = left_color_picker
self.right_color_picker = right_color_picker
self.center_color_picker = center_color_picker
def update_left_objects(self, current_color):
for obj in self.skeleton.get_left_objects():
obj.color = self.__get_as_hex_int(current_color.new)
def update_right_objects(self, current_color):
for obj in self.skeleton.get_right_objects():
obj.color = self.__get_as_hex_int(current_color.new)
def update_center_objects(self, current_color):
for obj in self.skeleton.get_center_objects():
obj.color = self.__get_as_hex_int(current_color.new)
def update_all_objects(self, current_color):
new_color = current_color.new
for obj in self.skeleton:
obj.color = self.__get_as_hex_int(new_color)
self.left_color_picker.value = new_color
self.right_color_picker.value = new_color
self.center_color_picker.value = new_color
@staticmethod
def __get_as_hex_int(color: str):
return int(color.replace('#', '0x'), 16)
class ColorPickerSynchronizer:
def __init__(self, left_skeleton_color_pickers: List[ColorPicker], right_skeleton_color_pickers: List[ColorPicker],
center_skeleton_color_pickers: List[ColorPicker], all_skeleton_color_pickers: List[ColorPicker],
left_synchronizer_color_picker: ColorPicker, right_synchronizer_color_picker: ColorPicker,
center_synchronizer_color_picker: ColorPicker):
self.left_skeleton_color_pickers: List[ColorPicker] = left_skeleton_color_pickers
self.right_skeleton_color_pickers: List[ColorPicker] = right_skeleton_color_pickers
self.center_skeleton_color_pickers: List[ColorPicker] = center_skeleton_color_pickers
self.all_skeleton_color_pickers: List[ColorPicker] = all_skeleton_color_pickers
self.left_synchronizer_color_picker: ColorPicker = left_synchronizer_color_picker
self.right_synchronizer_color_picker: ColorPicker = right_synchronizer_color_picker
self.center_synchronizer_color_picker: ColorPicker = center_synchronizer_color_picker
def sync_left_color_pickers(self, current_color):
for color_picker in self.left_skeleton_color_pickers:
color_picker.value = current_color.new
def sync_right_color_pickers(self, current_color):
for color_picker in self.right_skeleton_color_pickers:
color_picker.value = current_color.new
def sync_center_color_pickers(self, current_color):
for color_picker in self.center_skeleton_color_pickers:
color_picker.value = current_color.new
def sync_all_color_pickers(self, current_color):
for color_picker in self.all_skeleton_color_pickers:
color_picker.value = current_color.new
self.left_synchronizer_color_picker.value = current_color.new
self.right_synchronizer_color_picker.value = current_color.new
self.center_synchronizer_color_picker.value = current_color.new
class ColorChanger:
def __init__(self):
super(ColorChanger, self).__init__()
def get_color_changer_widget(self, skeletons: List[DrawableSkeleton]) -> Tab:
skeleton_color_changer_tabs: List[VBox] = []
pred_color_pickers: List[Tuple[ColorPicker, ColorPicker, ColorPicker, ColorPicker]] = []
gt_color_pickers: List[Tuple[ColorPicker, ColorPicker, ColorPicker, ColorPicker]] = []
for skeleton in skeletons:
skeleton_color_changer_tab, color_pickers = self.__create_color_changer_tab(skeleton)
skeleton_color_changer_tabs.append(skeleton_color_changer_tab)
if skeleton.is_ground_truth:
gt_color_pickers.append(color_pickers)
else:
pred_color_pickers.append(color_pickers)
if len(pred_color_pickers) != 0 and len(gt_color_pickers) != 0:
pred_skeleton_color_changer_tab = self.__create_color_synchronizer_tab(pred_color_pickers)
gt_skeleton_color_changer_tab = self.__create_color_synchronizer_tab(gt_color_pickers)
skeleton_color_changer_tabs.append(pred_skeleton_color_changer_tab)
skeleton_color_changer_tabs.append(gt_skeleton_color_changer_tab)
color_changer_widget: Tab = Tab(children=skeleton_color_changer_tabs)
for i in range(len(skeletons)):
color_changer_widget.set_title(i, 'Skeleton {:d} colors'.format(i + 1))
if len(skeleton_color_changer_tabs) > len(skeletons):
color_changer_widget.set_title(len(skeletons), 'Pred skeleton colors')
color_changer_widget.set_title(len(skeletons) + 1, 'GT skeleton colors')
return color_changer_widget
def __create_color_changer_tab(self, skeleton: DrawableSkeleton) -> \
Tuple[VBox, Tuple[ColorPicker, ColorPicker, ColorPicker, ColorPicker]]:
left_color_picker: ColorPicker = ColorPicker(
description='Left color:',
value=self.__get_as_html_color(skeleton.get_left_objects()[0].color))
right_color_picker: ColorPicker = ColorPicker(
description='Right color:',
value=self.__get_as_html_color(skeleton.get_right_objects()[0].color))
center_color_picker: ColorPicker = ColorPicker(
description='Center color:',
value=self.__get_as_html_color(skeleton.get_center_objects()[0].color))
all_color_picker: ColorPicker = ColorPicker(description='All color:', value='#ffffff')
skeleton_color_updater: SkeletonColorUpdater = SkeletonColorUpdater(
skeleton, left_color_picker, right_color_picker, center_color_picker)
left_color_picker.observe(skeleton_color_updater.update_left_objects, names='value')
right_color_picker.observe(skeleton_color_updater.update_right_objects, names='value')
center_color_picker.observe(skeleton_color_updater.update_center_objects, names='value')
all_color_picker.observe(skeleton_color_updater.update_all_objects, names='value')
return (VBox([left_color_picker, right_color_picker, center_color_picker, all_color_picker]),
(left_color_picker, right_color_picker, center_color_picker, all_color_picker))
@staticmethod
def __create_color_synchronizer_tab(
color_pickers: List[Tuple[ColorPicker, ColorPicker, ColorPicker, ColorPicker]]) -> VBox:
left_color_picker: ColorPicker = ColorPicker(description='Left color:', value='#ffffff')
right_color_picker: ColorPicker = ColorPicker(description='Right color:', value='#ffffff')
center_color_picker: ColorPicker = ColorPicker(description='Center color:', value='#ffffff')
all_color_picker: ColorPicker = ColorPicker(description='All color:', value='#ffffff')
color_picker_synchronizer: ColorPickerSynchronizer = ColorPickerSynchronizer(
left_skeleton_color_pickers=[color_picker[0] for color_picker in color_pickers],
right_skeleton_color_pickers=[color_picker[1] for color_picker in color_pickers],
center_skeleton_color_pickers=[color_picker[2] for color_picker in color_pickers],
all_skeleton_color_pickers=[color_picker[3] for color_picker in color_pickers],
left_synchronizer_color_picker=left_color_picker,
right_synchronizer_color_picker=right_color_picker,
center_synchronizer_color_picker=center_color_picker)
left_color_picker.observe(color_picker_synchronizer.sync_left_color_pickers, names='value')
right_color_picker.observe(color_picker_synchronizer.sync_right_color_pickers, names='value')
center_color_picker.observe(color_picker_synchronizer.sync_center_color_pickers, names='value')
all_color_picker.observe(color_picker_synchronizer.sync_all_color_pickers, names='value')
return VBox([left_color_picker, right_color_picker, center_color_picker, all_color_picker])
@staticmethod
def __get_as_html_color(color: int) -> str:
return '{0:#0{1}x}'.format(color, 8).replace('0x', '#')
class LossContainer:
def __init__(self, pred_skeletons: np.ndarray, gt_skeletons: np.ndarray,
joint_set: JointSet, loss: Callable[[np.ndarray, np.ndarray], np.ndarray],
loss_precision: int = 3, video_player: Optional[Play] = None):
if video_player is not None:
self.is_video: bool = True
self.pred_skeletons: np.ndarray = np.swapaxes(pred_skeletons, 0, 1)
self.gt_skeletons: np.ndarray = np.swapaxes(gt_skeletons, 0, 1)
# Fields related to playing videos
self.video_player = video_player
self.min_loss_frame_index: int = 0
self.max_loss_frame_index: int = 0
else:
self.is_video: bool = False
self.pred_skeletons: np.ndarray = np.swapaxes(pred_skeletons[np.newaxis], 0, 1)
self.gt_skeletons: np.ndarray = np.swapaxes(gt_skeletons[np.newaxis], 0, 1)
self.loss_precision: int = loss_precision
self.loss_value_format: str = '{:.' + str(self.loss_precision) + 'f}'
self.joint_set: JointSet = joint_set
self.number_of_skeletons: int = self.pred_skeletons.shape[0]
# Joint loss related fields
self.joint_losses: np.ndarray = loss(self.pred_skeletons, self.gt_skeletons)
self.min_joint_loss_indices = np.argmin(self.joint_losses, axis=-1)
self.max_joint_loss_indices = np.argmax(self.joint_losses, axis=-1)
# Joint loss related labels
self.joint_loss_labels: List[List[Label]] = []
self.all_joint_loss_labels: List[Label] = [Label(value='test') for _ in range(self.number_of_skeletons)]
self.min_joint_loss_name_labels: List[Label] = [Label(value='test') for _ in range(self.number_of_skeletons)]
self.max_joint_loss_name_labels: List[Label] = [Label(value='test') for _ in range(self.number_of_skeletons)]
# Skeleton loss related fields
self.skeleton_losses = np.sum(self.joint_losses, axis=-1)
self.all_losses = np.sum(self.skeleton_losses, axis=0)
self.min_skeleton_loss_indices = np.argmin(self.skeleton_losses, axis=0) + 1
self.max_skeleton_loss_indices = np.argmax(self.skeleton_losses, axis=0) + 1
# Skeleton loss related labels
self.skeleton_loss_labels: List[Label] = [Label(value='test') for _ in range(self.number_of_skeletons)]
self.all_loss_label: Label = Label(value='test')
self.min_loss_index_label: Label = Label(value='test')
self.max_loss_index_label: Label = Label(value='test')
def get_loss_tab(self) -> VBox:
loss_tab: Tab = self.__create_empty_loss_tabs()
self.__set_loss_labels(0)
if self.is_video:
self.video_player.observe(self.__update_losses, names='value')
precision_adjuster: IntSlider = IntSlider(value=self.loss_precision, min=1, max=8,
step=1, description='Precision')
precision_adjuster.observe(self.__update_loss_precision, names='value')
return VBox(children=[precision_adjuster, loss_tab])
def __create_empty_loss_tabs(self) -> Tab:
skeleton_loss_tabs: List[HBox] = [self.__create_loss_tab_for_skeleton(i)
for i in range(self.number_of_skeletons)]
all_losses_tab: HBox = self.__create_all_losses_tab()
loss_tabs: List[HBox] = [all_losses_tab] + skeleton_loss_tabs
loss_tab: Tab = Tab(children=loss_tabs)
loss_tab.set_title(0, "All losses")
for i in range(len(loss_tabs) - 1):
loss_tab.set_title(i + 1, 'Skeleton {:d} losses'.format(i + 1))
return loss_tab
def __create_loss_tab_for_skeleton(self, index: int) -> HBox:
joint_loss_labels: List[Label] = [Label(value='test') for _ in range(self.joint_set.number_of_joints)]
joint_names: VBox = VBox(children=[Label(value=name) for name in self.joint_set.names])
joint_losses: VBox = VBox(children=joint_loss_labels)
self.joint_loss_labels.append(joint_loss_labels)
statistics_labels: VBox = VBox(
children=[Label(value='All losses:'),
Label(value='Joint with min loss:'),
Label(value='Joint with max loss:')])
skeleton_values: VBox = VBox(
children=[self.all_joint_loss_labels[index],
self.min_joint_loss_name_labels[index],
self.max_joint_loss_name_labels[index]])
return HBox(children=[joint_names, joint_losses, statistics_labels, skeleton_values])
def __create_all_losses_tab(self) -> HBox:
skeleton_labels: VBox = VBox(
children=[Label(value='Skeleton {:d} loss:'.format(i + 1)) for i in range(self.number_of_skeletons)])
skeleton_losses: VBox = VBox(children=self.skeleton_loss_labels)
statistics_labels_column: List[Label] = [Label(value='All losses:'),
Label(value='Min loss skeleton index:'),
Label(value='Max loss skeleton index:')]
statistics_values_column: List[Label] = [self.all_loss_label,
self.min_loss_index_label,
self.max_loss_index_label]
jump_buttons: List[Button] = self.__create_frame_jump_buttons(statistics_labels_column,
statistics_values_column)
statistics_labels: VBox = VBox(children=statistics_labels_column)
statistics_values: VBox = VBox(children=statistics_values_column)
all_losses_tab: HBox = HBox(
children=[skeleton_labels, skeleton_losses,
statistics_labels, statistics_values
] + ([VBox(children=jump_buttons)] if len(jump_buttons) > 0 else [])
)
return all_losses_tab
def __create_frame_jump_buttons(self, statistics_labels_column: List[Label],
statistics_values_column: List[Label]) -> List[Button]:
jump_buttons: List[Button] = []
if self.is_video:
self.min_loss_frame_index: int = np.argmin(self.all_losses)
min_loss_jump_button: Button = Button(description='Min loss frame')
min_loss_jump_button.on_click(self.__jump_to_min_loss_frame)
self.max_loss_frame_index: int = np.argmax(self.all_losses)
max_loss_jump_button: Button = Button(description='Max loss frame')
max_loss_jump_button.on_click(self.__jump_to_max_loss_frame)
statistics_labels_column.append(Label(value='Min loss frame index:'))
statistics_labels_column.append(Label(value='Max loss frame index:'))
statistics_values_column.append(Label(value=str(self.min_loss_frame_index)))
statistics_values_column.append(Label(value=str(self.max_loss_frame_index)))
jump_buttons.append(min_loss_jump_button)
jump_buttons.append(max_loss_jump_button)
return jump_buttons
def __jump_to_min_loss_frame(self, button):
self.video_player.value = self.min_loss_frame_index
def __jump_to_max_loss_frame(self, button):
self.video_player.value = self.max_loss_frame_index
def __set_loss_labels(self, index: int) -> None:
for i in range(self.number_of_skeletons):
self.skeleton_loss_labels[i].value = self.loss_value_format.format(self.skeleton_losses[i][index])
for j in range(self.joint_set.number_of_joints):
self.joint_loss_labels[i][j].value = self.loss_value_format.format(self.joint_losses[i][index][j])
self.all_joint_loss_labels[i].value = self.loss_value_format.format(self.skeleton_losses[i][index])
self.min_joint_loss_name_labels[i].value = self.joint_set.names[self.min_joint_loss_indices[i][index]]
self.max_joint_loss_name_labels[i].value = self.joint_set.names[self.max_joint_loss_indices[i][index]]
self.all_loss_label.value = self.loss_value_format.format(self.all_losses[index])
self.min_loss_index_label.value = str(self.min_skeleton_loss_indices[index])
self.max_loss_index_label.value = str(self.max_skeleton_loss_indices[index])
def __update_losses(self, current_frame):
self.__set_loss_labels(current_frame.new)
def __update_loss_precision(self, precision):
self.loss_precision = precision.new
self.loss_value_format = '{:.' + str(self.loss_precision) + 'f}'
if self.is_video:
self.__set_loss_labels(int(self.video_player.value))
else:
self.__set_loss_labels(0)
class SkeletonVisualizer:
""" A 3D skeleton visualizer which can visualize skeletons in 3D plots."""
def __init__(self, joint_set: JointSet, size_scalar: float = 1.0):
self.joint_set: JointSet = joint_set
self.plot: Optional[Plot] = None
self.size_scalar: float = size_scalar
self.skeletons: List[DrawableSkeleton] = []
self.joint_names_visible: Checkbox = Checkbox(description="Show Joint Names")
self.joint_coordinates_visible: Checkbox = Checkbox(description="Show Coordinates")
def visualize(
self, skeletons: np.ndarray, colors: List[Color] = None, include_names: bool = False,
include_coordinates: bool = False, automatic_camera_orientation: bool = False,
is_gt_list: List[bool] = None, additional_tabs: List[Tuple[str, Widget]] = None) -> None:
self.__assert_include_arguments(include_names, include_coordinates)
self.__assert_skeleton_shapes(skeletons)
colors = self.__init_colors(skeletons.shape[0], colors)
if include_names:
skeleton_converter = Skeleton.to_drawable_skeleton_with_names
elif include_coordinates:
skeleton_converter = Skeleton.to_drawable_skeleton_with_coordinates
else:
skeleton_converter = Skeleton.to_drawable_skeleton
self.__create_skeleton_plot(
skeletons=skeletons, skeleton_converter=skeleton_converter, colors=colors,
automatic_camera_orientation=automatic_camera_orientation, is_gt_list=is_gt_list)
self.plot.display()
self.__link_text_widgets(include_names, include_coordinates)
visibility_widget: HBox = HBox([self.joint_names_visible, self.joint_coordinates_visible])
color_changer: ColorChanger = ColorChanger()
color_changer_tab: Tab = color_changer.get_color_changer_widget(self.skeletons)
self.__display_interface([('Change visibilities', visibility_widget),
('Change colors', color_changer_tab)] +
([] if additional_tabs is None else additional_tabs))
def visualize_from_file(
self, file_name: str, colors: List[Color] = None, include_names: bool = False,
include_coordinates: bool = False, automatic_camera_orientation: bool = False,
is_gt_list: List[bool] = None, additional_tabs: List[Tuple[str, Widget]] = None) -> None:
skeletons = self.__load_from_file(file_name)
self.visualize(skeletons, colors, include_names, include_coordinates,
automatic_camera_orientation, is_gt_list, additional_tabs)
def visualize_with_ground_truths(
self, pred_skeletons: np.ndarray, gt_skeletons: np.ndarray,
pred_colors: List[Color] = None, gt_colors: List[Color] = None,
include_names: bool = False, include_coordinates: bool = False,
automatic_camera_orientation: bool = False, include_losses: bool = True,
loss: Callable[[np.ndarray, np.ndarray], np.ndarray] = L2, loss_precision: int = 3) -> None:
assert pred_skeletons.shape == gt_skeletons.shape, \
'The predicate and ground truth skeleton arrays must have the same shape.'
pred_colors = self.__init_colors(pred_skeletons.shape[0], pred_colors)
gt_colors = self.__init_colors(gt_skeletons.shape[0], gt_colors)
skeletons: np.ndarray = np.concatenate((pred_skeletons, gt_skeletons), axis=0)
colors: List[Color] = pred_colors + gt_colors
is_gt_list: List[bool] = [False] * len(pred_skeletons) + [True] * len(gt_skeletons)
if include_losses:
loss_container: LossContainer = LossContainer(pred_skeletons, gt_skeletons,
self.joint_set, loss, loss_precision)
self.visualize(
skeletons, colors, include_names, include_coordinates,
automatic_camera_orientation, is_gt_list,
additional_tabs=[('Losses', loss_container.get_loss_tab())] if include_losses else None)
def visualize_from_file_with_ground_truths(
self, pred_file_name: str, gt_file_name: str,
pred_colors: List[Color] = None, gt_colors: List[Color] = None,
include_names: bool = False, include_coordinates: bool = False,
automatic_camera_orientation: bool = False, include_losses: bool = True,
loss: Callable[[np.ndarray, np.ndarray], np.ndarray] = L2, loss_precision: int = 3) -> None:
pred_skeletons = self.__load_from_file(pred_file_name)
gt_skeletons = self.__load_from_file(gt_file_name)
self.visualize_with_ground_truths(
pred_skeletons, gt_skeletons, pred_colors, gt_colors, include_names, include_coordinates,
automatic_camera_orientation, include_losses, loss, loss_precision)
def visualize_video(
self, frames: np.ndarray, colors: List[Color] = None, fps: int = 15,
include_names: bool = False, include_coordinates: bool = False,
automatic_camera_orientation: bool = False, is_gt_list: List[bool] = None,
player: Optional[Play] = None, additional_tabs: List[Tuple[str, Widget]] = None) -> None:
self.__assert_include_arguments(include_names, include_coordinates)
assert len(frames.shape) == 4
first_frame: np.ndarray = frames[0]
self.__assert_skeleton_shapes(first_frame)
colors = self.__init_colors(first_frame.shape[0], colors)
skeleton_timestamps: List[Dict[str, np.ndarray]] = self.__get_skeleton_positions_timestamps(frames)
if include_names:
skeleton_converter = Skeleton.to_drawable_skeleton_for_video_with_names
elif include_coordinates:
skeleton_converter = Skeleton.to_drawable_skeleton_for_video_with_coordinates
else:
skeleton_converter = Skeleton.to_drawable_skeleton_for_video
self.__create_skeleton_plot(
skeletons=first_frame, skeleton_converter=skeleton_converter, colors=colors, positions=skeleton_timestamps,
automatic_camera_orientation=automatic_camera_orientation, is_gt_list=is_gt_list)
self.__link_text_widgets(include_names, include_coordinates)
self.plot.display()
visibility_widget: HBox = HBox([self.joint_names_visible, self.joint_coordinates_visible])
video_player: VideoPlayer = VideoPlayer(self.plot, fps, frames, player)
video_player_widget: HBox = video_player.get_video_player_widget()
color_changer: ColorChanger = ColorChanger()
color_changer_tab: Tab = color_changer.get_color_changer_widget(self.skeletons)
self.__display_interface([('Play video', video_player_widget),
('Change visibilities', visibility_widget),
('Change colors', color_changer_tab)] +
([] if additional_tabs is None else additional_tabs))
def visualize_video_from_file(
self, file_name: str, colors: List[Color] = None, fps: int = 15,
include_names: bool = False, include_coordinates: bool = False,
automatic_camera_orientation: bool = False, is_gt_list: List[bool] = None,
player: Optional[Play] = None, additional_tabs: List[Tuple[str, Widget]] = None) -> None:
frames = self.__load_from_file(file_name)
self.visualize_video(
frames, colors, fps, include_names, include_coordinates,
automatic_camera_orientation, is_gt_list, player, additional_tabs)
def visualize_video_with_ground_truths(
self, pred_frames: np.ndarray, gt_frames: np.ndarray,
pred_colors: List[Color] = None, gt_colors: List[Color] = None,
fps: int = 15, include_names: bool = False, include_coordinates: bool = False,
automatic_camera_orientation: bool = False, include_losses: bool = True,
loss: Callable[[np.ndarray, np.ndarray], np.ndarray] = L2, loss_precision: int = 3) -> None:
pred_colors = self.__init_colors(pred_frames.shape[1], pred_colors)
gt_colors = self.__init_colors(gt_frames.shape[1], gt_colors)
frames: np.ndarray = np.concatenate((pred_frames, gt_frames), axis=1)
colors: List[Color] = pred_colors + gt_colors
is_gt_list: List[bool] = [False] * len(pred_colors) + [True] * len(gt_colors)
player: Play = create_video_player(fps, frames.shape[0] - 1)
if include_losses:
loss_container: LossContainer = LossContainer(pred_frames, gt_frames, self.joint_set,
loss, loss_precision, player)
self.visualize_video(
frames, colors, fps, include_names, include_coordinates,
automatic_camera_orientation, is_gt_list, player,
additional_tabs=[('Losses', loss_container.get_loss_tab())] if include_losses else None)
def visualize_video_from_file_with_ground_truths(
self, pred_file_name: str, gt_file_name: str,
pred_colors: List[Color] = None, gt_colors: List[Color] = None,
fps: int = 15, include_names: bool = False, include_coordinates: bool = False,
automatic_camera_orientation: bool = False, include_losses: bool = True,
loss: Callable[[np.ndarray, np.ndarray], np.ndarray] = L2, loss_precision: int = 3) -> None:
pred_frames = self.__load_from_file(pred_file_name)
gt_frames = self.__load_from_file(gt_file_name)
self.visualize_video_with_ground_truths(
pred_frames, gt_frames, pred_colors, gt_colors, fps, include_names, include_coordinates,
automatic_camera_orientation, include_losses, loss, loss_precision)
@staticmethod
def __load_from_file(file_name: str) -> np.ndarray:
file: IO = open(file_name, 'rb')
skeletons = pickle.load(file)
file.close()
return skeletons
@staticmethod
def __init_colors(number_of_skeletons: int, colors: List[Color]) -> List[Color]:
if colors is None:
colors = ['default'] * number_of_skeletons
else:
assert number_of_skeletons == len(colors), \
'The \'skeletons\' and \'colors\' parameters must be the same length.'
return colors
def __assert_skeleton_shapes(self, skeletons: np.ndarray) -> None:
assert len(skeletons.shape) == 3, 'The \'skeletons\' parameter should be a 3 dimensional numpy array.'
assert skeletons.shape[1] == self.joint_set.number_of_joints, \
'The number of joints of skeletons and the number of joints in the specified joint set must be the same.'
assert skeletons.shape[2] == 3, 'The skeleton joint coordinates must be 3 dimensional'
@staticmethod
def __assert_include_arguments(include_names: bool, include_coordinates: bool) -> None:
if include_names is True and include_coordinates is True:
raise AttributeError('Either names or coordinates can be showed, but not both.')
@staticmethod
def __get_skeleton_positions_timestamps(frames: np.ndarray) -> List[Dict[str, np.ndarray]]:
frames_swapped: np.ndarray = np.swapaxes(frames, 0, 1)
return [{
str(timestamp_index): frames_swapped[current_frame_index][timestamp_index]
for timestamp_index in range(len(frames_swapped[current_frame_index]))
} for current_frame_index in range(len(frames_swapped))]
def __create_skeleton_plot(self, skeletons: np.ndarray, skeleton_converter: Callable[[Skeleton], DrawableSkeleton],
colors: List[Color], positions: Optional[Positions] = None,
automatic_camera_orientation: bool = False, is_gt_list: List[bool] = None) -> None:
self.__init_skeleton_plot(skeletons, automatic_camera_orientation)
skeleton_part_size: float = self.__calculate_skeleton_part_size(skeletons)
if positions is None:
positions = skeletons
skeletons: List[Skeleton] = self.__get_skeletons(positions, colors, skeleton_part_size, is_gt_list)
drawable_skeletons: List[DrawableSkeleton] = list(map(skeleton_converter, skeletons))
self.__add_skeletons_to_plot(drawable_skeletons)
def __init_skeleton_plot(self, skeletons: np.ndarray, automatic_camera_orientation: bool = False) -> None:
self.plot = k3d.plot(antialias=1, camera_auto_fit=False)
centroid: np.ndarray = np.average(np.average(skeletons, axis=0), axis=0)
if automatic_camera_orientation:
camera_up_vector: np.ndarray = np.zeros(shape=(3,))
for line_indices in self.joint_set.vertically_aligned_line_indices:
camera_up_vector += np.sum(
skeletons[:, line_indices[1]] - skeletons[:, line_indices[0]], axis=0)
camera_up_vector /= np.linalg.norm(camera_up_vector, ord=2)
self.plot.camera = [0.0, 0.0, 0.0, # Camera position
centroid[0], centroid[1], centroid[2], # Camera looking at
camera_up_vector[0], camera_up_vector[1], camera_up_vector[2]] # Camera up vector
else:
self.plot.camera = [0.0, 0.0, 0.0, # Camera position
0.0, 0.0, centroid[2], # Camera looking at
0.0, -1.0, 0.0] # Camera up vector
def __calculate_skeleton_part_size(self, skeletons: np.ndarray) -> float:
max_values = [abs(skeleton).max() for skeleton in skeletons]
return (min(max_values) / 100.0) * self.size_scalar
def __get_skeletons(self, positions: List[Positions], colors: List[Color],
skeleton_part_size: float, is_gt_list: List[bool] = None) -> List[Skeleton]:
if is_gt_list is None:
is_gt_list = [False] * len(colors)
position_index, color_index, ground_truth_index = 0, 1, 2
return list(map(lambda parameter_tuple: Skeleton(
joint_positions=parameter_tuple[position_index], joint_set=self.joint_set,
part_size=skeleton_part_size, color=parameter_tuple[color_index],
is_ground_truth=parameter_tuple[ground_truth_index]
), zip(positions, colors, is_gt_list)))
def __add_skeletons_to_plot(self, skeletons: List[DrawableSkeleton]) -> None:
self.skeletons = []
for drawable_skeleton in skeletons:
self.plot += drawable_skeleton
self.skeletons.append(drawable_skeleton)
def __link_text_widgets(self, include_names: bool, include_coordinates: bool) -> None:
if include_names:
self.__link_joint_name_visibility_with_checkbox()
if include_coordinates:
self.__link_joint_coordinate_visibility_with_checkbox()
def __link_joint_name_visibility_with_checkbox(self) -> None:
for skeleton in self.skeletons:
for joint_name in skeleton.joint_names:
widgets.jslink((joint_name, 'visible'), (self.joint_names_visible, 'value'))
def __link_joint_coordinate_visibility_with_checkbox(self) -> None:
for skeleton in self.skeletons:
for joint_coordinate in skeleton.joint_coordinates:
widgets.jslink((joint_coordinate, 'visible'), (self.joint_coordinates_visible, 'value'))
@staticmethod
def __display_interface(widget_tuples: List[Tuple[str, Widget]]) -> None:
interface: Accordion = Accordion(children=list(map(lambda widget_tuple: widget_tuple[1], widget_tuples)))
for i in range(len(widget_tuples)):
interface.set_title(i, widget_tuples[i][0])
display(interface)
class MuPoTSVisualizer(SkeletonVisualizer):
def __init__(self, size_scalar: float = 1.0):
super().__init__(joint_set=MuPoTSJoints(), size_scalar=size_scalar)
class OpenPoseVisualizer(SkeletonVisualizer):
def __init__(self, size_scalar: float = 1.0):
super().__init__(joint_set=OpenPoseJoints(), size_scalar=size_scalar)
class CocoExVisualizer(SkeletonVisualizer):
def __init__(self, size_scalar: float = 1.0):
super().__init__(joint_set=CocoExJoints(), size_scalar=size_scalar)
class PanopticVisualizer(SkeletonVisualizer):
def __init__(self, size_scalar: float = 1.0):
super().__init__(joint_set=PanopticJoints(), size_scalar=size_scalar)
class Common14Visualizer(SkeletonVisualizer):
def __init__(self, size_scalar: float = 1.0):
super().__init__(joint_set=Common14Joints(), size_scalar=size_scalar)
|
shivaco/DBL-python-webhook | listener.py | <reponame>shivaco/DBL-python-webhook
import sys, datetime, json, re
from discord import Webhook, RequestsWebhookAdapter, Embed, Color
from flask import Flask, request, abort, jsonify
from pymongo import MongoClient
import os
with open("settings.json") as settings:
data = json.load(settings)
webhook_password = data['webhook_password']
discord_webhook = data['discord_webhook_url']
if discord_webhook != "":
webhook_id = int(re.search(r"\d+", discord_webhook).group())
webhook_token = re.search(r"(?!.*\/)+(.*)", discord_webhook).group()
mongo = MongoClient()
app = Flask(__name__)
@app.route('/test', methods=['GET', 'POST'])
def test():
return jsonify(dict(request.headers))
@app.route('/dblwebhook', methods=['POST'])
def webhook():
sys.stdout.flush()
if request.headers.get('Authorization') == webhook_password:
user_id = request.json.get('user')
bot_id = request.json.get('bot') # ID of the bot that was upvoted
request_type = request.json.get('type')
weekend_status = request.json.get('isWeekend')
now = datetime.datetime.utcnow()
mongo.voters.vote.insert_one({
'type': request_type,
'user': user_id,
'bot': bot_id,
'weekend': weekend_status,
'time': now
})
if discord_webhook != "":
embed_title = "Test" if request_type == 'test' else 'New upvote!'
embed = Embed(title=embed_title, description=f"**Upvoter: <@{user_id}>** ({user_id})\n**Upvoted bot:** <@{bot_id}> ({bot_id})", timestamp=datetime.datetime.utcnow(), color=Color.green())
webhook = Webhook.partial(webhook_id, webhook_token, adapter=RequestsWebhookAdapter())
webhook.send(embed=embed)
return '', 200
else:
abort(400)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
mongo.close()
|
Carl-Rabbit/quick-web | BUVBack/app.py | <filename>BUVBack/app.py
from flask import Flask, request, jsonify
from flask_cors import CORS
import json
import os
import calculate_iou
FILE_ABS_PATH = os.path.dirname(__file__)
app = Flask(__name__)
CORS(app)
@app.route('/api/test/counter/', methods=['POST'])
def update_counter():
params = request.json
counter = int(params['counter'])
counter += 1
resp = {'counter': counter}
return jsonify(resp), 200, {"Content-Type": "application/json"}
@app.route('/api/test/hello/', methods=['POST'])
def hello_resp():
params = request.json
msg = int(params['msg'])
print(msg)
return "hello VUE"
@app.route('/api/test/fetchData/', methods=['POST'])
def fetch_data():
data_path = '{}/data/miserables.json'.format(FILE_ABS_PATH)
with open(data_path, 'r') as input_file:
data = json.load(input_file)
return data
@app.route('/api/test/fetchMap/', methods=['POST'])
def fetch_map():
data_path = '{}/data/line_station.json'.format(FILE_ABS_PATH)
# 将读取方式改为rb解决读取问题
with open(data_path, 'rb') as input_file:
data = json.load(input_file)
return data
@app.route('/api/test/fetchRoute/', methods=['POST'])
def fetch_route():
data_path = '{}/data/example_1.json'.format(FILE_ABS_PATH)
with open(data_path, 'rb') as input_file:
data = json.load(input_file)
return data
@app.route('/api/test/fetchAll/', methods=['POST'])
def fetch_all():
data_path = '{}/data/allRouteInfo.json'.format(FILE_ABS_PATH)
with open(data_path, 'rb') as input_file:
data = json.load(input_file)
return data
@app.route('/api/test/fetchAllData/', methods=['POST'])
def fetch_alldata():
data_path = '{}/data/data.json'.format(FILE_ABS_PATH)
with open(data_path, 'r', encoding="gbk") as input_file:
data = json.load(input_file)
return data
@app.route('/api/test/calIOU/', methods=['POST'])
def calIOU():
data = request.get_json(silent=True)
print(data)
flag = calculate_iou.calculate_iou(data)
return flag
if __name__ == '__main__':
app.run()
|
Carl-Rabbit/quick-web | BUVBack/calculate_iou.py | <reponame>Carl-Rabbit/quick-web
# -*- coding: utf-8 -*-
import numpy as np
import shapely
from shapely.geometry import Polygon, MultiPoint # 多边形
def calculate_iou(linedata):
# line1 = [2, 0, 2, 2, 0, 0, 0, 2] # 四边形四个点坐标的一维数组表示,[x,y,x,y....]
# a = np.array(linedata["line1"]).reshape(4, 2) # 四边形二维坐标表示
a = linedata["line1"]
poly1 = Polygon(a).convex_hull # python四边形对象,会自动计算四个点,最后四个点顺序为:左上 左下 右下 右上 左上
# print(type(poly1))
# print(Polygon(a).convex_hull) # 可以打印看看是不是这样子
# line2 = [1, 1, 4, 1, 4, 4, 1, 4]
# b = np.array(linedata["line2"]).reshape(4, 2)
b = linedata["line2"]
poly2 = Polygon(b).convex_hull
# print(poly2)
# print(Polygon(b).convex_hull)
union_poly = np.concatenate((a, b)) # 合并两个box坐标,变为8*2
# print(union_poly)
# print(MultiPoint(union_poly).convex_hull) # 包含两四边形最小的多边形点
if not poly1.intersects(poly2): # 如果两四边形不相交
iou = 0
else:
try:
inter_area = poly1.intersection(poly2).area # 相交面积
# print(inter_area)
# union_area = poly1.area + poly2.area - inter_area
union_area = MultiPoint(union_poly).convex_hull.area
# print(union_area)
if union_area == 0:
iou = 0
# iou = float(inter_area) / (union_area-inter_area) #错了
iou = float(inter_area) / union_area
# iou=float(inter_area) /(poly1.area+poly2.area-inter_area)
# 源码中给出了两种IOU计算方式,第一种计算的是: 交集部分/包含两个四边形最小多边形的面积
# 第二种: 交集 / 并集(常见矩形框IOU计算方式)
except shapely.geos.TopologicalError:
print('shapely.geos.TopologicalError occured, iou set to 0')
iou = 0
#print(iou)
#return [iou]
# iou = "123"
# iou = str(456)
print(iou)
return str(iou)
# if __name__ == '__main__':
# res = calculate_iou()
# print("res is",res, type(res))
# print(iou)
|
mbs38/mqttRuleBackend | ruleBackend.py | import time
import paho.mqtt.client as mqtt
import signal
import sys
import threading
topics=[]
publisher=None
subscriber=None
connected=None
def disconnecthandler(mqc,userdata,rc):
print("Disconnected from broker")
global connected
connected = False
def init(host,port,user=None,password=<PASSWORD>):
global publisher
publisher = Publisher(host,port,user,password)
global subscriber
subscriber = Subscriber(host,port,user,password)
def start():
subscriber.connect()
class Subscriber:
def __init__(self,host,port,user,pw):
self.clientid=None
self.mqc=None
self.host=host
self.port=port
self.user=user
self.pw=pw
self.topics=[]
self.handlers=[]
self.clientid="mqttRuleBackend-subscriber-"+ str(time.time())
def addTopic(self,topic,handler):
self.topics.append((topic,1))
self.handlers.append((topic,handler))
def connect(self):
self.mqc=mqtt.Client(client_id=self.clientid)
if self.user is not None and self.pw is not None:
self.mqc.username_pw_set(self.user,self.pw)
self.mqc.on_connect=self.connecthandler
self.mqc.on_disconnect=disconnecthandler
self.mqc.on_message=self.messagehandler
self.mqc.on_log=self.on_log
self.mqc.disconnected = True
self.mqc.connect(self.host,self.port,60)
self.mqc.loop_start()
global connected
connected = True
print("New client: "+self.clientid)
def messagehandler(self,mqc,userdata,msg):
payload=str(msg.payload.decode("utf-8"))
topic=str(msg.topic)
for t in self.handlers:
if t[0] == topic:
t[1](topic,payload)
def connecthandler(self,mqc,userdata,flags,rc):
self.mqc.subscribe(self.topics)
print("Subscribing to: "+str(self.topics))
def on_log(client, userdata, level, buff):
print("log: ",buff)
class Publisher:
def __init__(self,host,port,user,pw):
self.host=host
self.port=port
self.user=user
self.pw=pw
self.clientid="mqttRuleBackend-publisher-"+ str(time.time())
print("New client: "+self.clientid)
self.mqc=mqtt.Client(client_id=self.clientid)
if self.user is not None and self.pw is not None:
self.mqc.username_pw_set(self.user,self.pw)
self.mqc.on_log=self.on_log
self.mqc.disconnected = True
self.mqc.on_disconnect=disconnecthandler
self.mqc.connect(self.host,self.port,60)
self.mqc.loop_start()
def on_log(client, userdata, level, buff):
print("log: ",buff)
def send(self,topic,payload):
self.mqc.publish(topic,payload,qos=1,retain=False)
class Topic:
def __init__(self,rule,topic,react_on):
self.topic=topic
self.react_on=react_on
self.oldPayload=None
self.rule=rule
subscriber.addTopic(self.topic,self.messagehandler)
def messagehandler(self,topic,payload):
if self.react_on == "on_message":
self.executeRule(payload,topic)
else:
if self.react_on.startswith("on_payload:"):
stripped=self.react_on.lstrip("on_payload:")
if payload == stripped:
self.executeRule(payload,topic)
else:
if self.react_on == "on_change":
if self.oldPayload is not None:
if self.oldPayload != payload:
self.executeRule(payload,topic)
self.oldPayload=payload
else:
self.oldPayload=payload
def executeRule(self,payload,topic):
try:
sbl=threading.Thread(target=self.rule,args=(payload,topic))
sbl.daemon = True
sbl.start()
except Exception as e:
print("Error when executing rule: "+str(e))
class State:
def __init__(self,topic):
self.topic=topic
self.state=""
subscriber.addTopic(self.topic,self.messagehandler)
def messagehandler(self,topic,payload):
self.state=payload
def on_log(client, userdata, level, buff):
print("log: ",buff)
def signal_handler(signal, frame):
print('Exiting ' + sys.argv[0])
global connected
connected = False
signal.signal(signal.SIGINT, signal_handler)
|
mbs38/mqttRuleBackend | example_rule.py | import ruleBackend
import time
host="localhost"
port=1883
user = ""
password = ""
ruleBackend.init(host,port,user,password)
# if you need authentication change user and password accordingly
#######################################################
# Which states do you want to be available?
someBool1 = ruleBackend.State("modbus/sth/state")
someInt1 = ruleBackend.State("modbus/int/state")
# Attention: type of the states is always string
# One word on timing: If a rule and a state have the same topic,
# make sure you initialize the state before you initialize the rule
# (in this example line 11 and 12 before lines 46 to 50). Otherwise
# the state will be updated after the rule has been executed.
#######################################################
# Define the rules here:
# a global variable
zaehler = 1
def rule1(payload,topic):
global zaehler
print(topic+": "+payload)
print("Calling rule 1")
print("..for the "+str(zaehler)+" time")
zaehler = zaehler + 1
def rule2(payload,topic):
print("Calling rule 2")
print(topic+": "+payload)
if zaehler > 4:
print("1. rule has been called "+zaehler+"times!")
def rule3(payload,topic):
print("Calling rule 3")
print(topic+": "+payload)
#######################################################
# set topics the rules will trigger on
#is triggered when the value changes
ruleBackend.topics.append(ruleBackend.Topic(rule1,"device/someCrap1/state","on_change"))
#is triggered whenever a new message arrives on the topic
ruleBackend.topics.append(ruleBackend.Topic(rule2,"device/someCrap2/state","on_message"))
#is triggered when the message payload is '1234'
ruleBackend.topics.append(ruleBackend.Topic(rule3,"device/someCrap3/state","on_payload:1234"))
#######################################################
ruleBackend.start()
#main loop
while ruleBackend.connected:
time.sleep(1)
#how to use states
print("State sth: "+someBool1.state)
print("State int: "+someInt1.state)
#how to send a message
ruleBackend.publisher.send("device/dog/bark","True")
|
mbs38/mqttRuleBackend | ruleRunner.py | <filename>ruleRunner.py<gh_stars>0
#!/bin/python3
# Copyright 2022, <NAME>, www.maxbrueggemann.de
# This program searches the current directory for python files
# that have a filename beginning with "rRun". These files are then
# executed as subprocesses. The subprocesses will be terminated when
# the corresponding file has vanished. Has a file has been modified,
# the subprocess will be restarted.
import os
import subprocess
import sys
import multiprocessing
import time
import signal
import sys
class Rule:
def __init__(self,filename):
self.filename=filename
self.fileExists=True
try:
self.time=os.path.getmtime(self.filename)
except:
self.time=0
self.subprocess=None
print("Found script "+filename)
self.start()
def kill(self):
print("Killing script "+self.filename)
self.subprocess.kill()
def start(self):
print("Starting script "+self.filename)
self.subprocess = subprocess.Popen([sys.executable, self.filename])
def modified(self):
time=self.time
try:
time=os.path.getmtime(self.filename)
except:
pass
if self.time != time:
self.time=time
print(self.filename+" changed. Reloading.")
self.kill()
self.start()
scriptList=[]
def signal_handler(signal, frame):
for x in scriptList:
x.kill()
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
while True:
fileList=os.listdir(".")
for x in scriptList:
x.fileExists=False
for x in fileList:
found = False
if x.startswith("rRun") and x.endswith(".py"):
for y in scriptList:
if y.filename == x:
y.fileExists=True
found=True
if not found:
scriptList.append(Rule(x))
time.sleep(3)
for x in scriptList:
x.modified()
if not x.fileExists:
print("Script "+x.filename+" is gone.")
x.kill()
scriptList.remove(x)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.