text
stringlengths 8
6.05M
|
|---|
from schema.user import FIELDS
from cerberus import Validator
from cerberus.errors import ValidationError
from util.error.errors import NotValidParameterError
def validate_user_create(req, res, resource, params):
schema = {
'username': FIELDS['username'],
'email': FIELDS['email'],
'password': FIELDS['password'],
'details': FIELDS['details'],
'balance': FIELDS['quantity']
}
validate(schema, req)
def validate_login(req, res, resource, params):
schema = {
'email': FIELDS['email'],
'password': FIELDS['password']
}
validate(schema, req)
def validate_money_transfer_create(req, res, resource, params):
schema = {
'borrower': FIELDS['username'],
'quantity': FIELDS['quantity']
}
validate(schema, req)
def validate(schema, req):
v = Validator(schema)
try:
if not v.validate(req.context['data']):
raise NotValidParameterError(v.errors)
except ValidationError:
raise NotValidParameterError('Invalid request %s' % req.context)
|
# This file is part of AstroHOG
#
# Copyright (C) 2013-2017 Juan Diego Soler
import numpy as np
from astropy.convolution import convolve_fft
from astropy.convolution import Gaussian2DKernel
from congrid import *
from scipy import ndimage
import pycircstat as circ
from nose.tools import assert_equal, assert_true
import matplotlib.pyplot as plt
import collections
import multiprocessing
# ------------------------------------------------------------------------------------------------------------------------
def HOG_PRS(phi):
# Calculates the projected Rayleigh statistic of the distributions of angles phi.
#
# INPUTS
# phi - angles between -pi/2 and pi/2
#
# OUTPUTS
# Zx - value of the projected Rayleigh statistic
# s_Zx -
# meanPhi -
angles=2.*phi
Zx=np.sum(np.cos(angles))/np.sqrt(np.size(angles)/2.)
temp=np.sum(np.cos(angles)*np.cos(angles))
s_Zx=np.sqrt((2.*temp-Zx*Zx)/np.size(angles))
Zy=np.sum(np.sin(angles))/np.sqrt(np.size(angles)/2.)
temp=np.sum(np.sin(angles)*np.sin(angles))
s_Zx=np.sqrt((2.*temp-Zy*Zy)/np.size(angles))
meanPhi=0.5*np.arctan2(Zy, Zx)
return Zx, s_Zx, meanPhi
# -------------------------------------------------------------------------------------------------------------------------------
def HOGvotes_simple(phi):
# Calculates the correlation
#
# INPUTS
#
# OUTPUTS
#
#
sz=np.shape(phi)
corrframe=np.zeros(sz)
#paraThres=20.*np.pi/180.
#condPara=np.logical_and(np.isfinite(phi), np.logical_or(phi < paraThres, phi > np.pi-paraThres)).nonzero()
#corrframe[condPara]=1.
corrframe=np.cos(phi)
corrframe[np.isnan(phi).nonzero()]=0. #np.nan
Zx, s_Zx, meanPhi = HOG_PRS(phi[np.isfinite(phi).nonzero()])
return Zx, corrframe
# -------------------------------------------------------------------------------------------------------------------------------
def HOGvotes_blocks(phi, wd=3):
# Calculates the correlation
#
# INPUTS
#
# OUTPUTS
#
#
sz=np.shape(phi)
corrframe=np.zeros(sz)
for i in range(0, sz[0]):
for k in range(0, sz[1]):
if (i<wd):
if (k<wd):
temp=phi[0:i+wd,0:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
elif (k>sz[1]-1-wd):
temp=phi[0:i+wd,k-wd:sz[1]-1]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
else:
temp=phi[0:i+wd,k-wd:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
elif (i>sz[0]-1-wd):
if (k<wd):
temp=phi[i-wd:sz[1]-1,0:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
elif (k>sz[1]-1-wd):
temp=phi[i-wd:sz[0]-1,k-wd:sz[1]-1]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
else:
temp=phi[i-wd:sz[0]-1,k-wd:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
elif (k<wd):
if (i<wd):
temp=phi[0:i+wd,0:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
elif (i>sz[0]-1-wd):
temp=phi[i-wd:sz[0]-1,0:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
else:
temp=phi[i-wd:i+wd,0:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
elif (k>sz[1]-1-wd):
if (i<wd):
temp=phi[0:i+wd,k-wd:sz[1]-1]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
elif (i>sz[0]-1-wd):
temp=phi[i-wd:sz[0]-1,k-wd:sz[1]-1]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
else:
temp=phi[i-wd:i+wd,k-wd:sz[1]-1]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
else:
temp=phi[i-wd:i+wd,k-wd:k+wd]; corrframe[i,k]=np.mean(temp[np.isfinite(temp).nonzero()])
corrframe[np.isnan(phi).nonzero()]=0.
nPara=np.size(corrframe[(corrframe>0.).nonzero()])
nGood=np.size(phi[np.isfinite(phi).nonzero()])
hogcorr=nPara/float(nGood)
return hogcorr, corrframe
# -------------------------------------------------------------------------------------------------------------------------------
def HOGcorr_frame(frame1, frame2, gradthres1=0., gradthres2=0., pxsz=1., ksz=1., res=1., mask1=0, mask2=0, wd=1, allow_huge=False, regrid=False):
# Calculates the spatial correlation between frame1 and frame2 using the HOG methods
#
# INPUTS
# frame1 -
# frame2 -
#
# OUTPUTS
# hogcorr -
# corrframe -
sf=3. #Number of pixels per kernel FWHM
pxksz =ksz/pxsz
pxres =res/pxsz
sz1=np.shape(frame1)
if (ksz > 1):
weight=(pxsz/ksz)**2
if (regrid):
intframe1=congrid(frame1, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
intframe2=congrid(frame2, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
if np.array_equal(np.shape(frame1), np.shape(mask1)):
intmask1=congrid(mask1, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
intmask1[(intmask1 > 0.).nonzero()]=1.
if np.array_equal(np.shape(frame2), np.shape(mask2)):
intmask2=congrid(mask2, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
intmask2[(intmask2 > 0.).nonzero()]=1.
else:
intframe1=frame1
intframe2=frame2
intmask1=mask1
intmask2=mask2
smoothframe1=ndimage.filters.gaussian_filter(frame1, [pxksz, pxksz], order=[0,0], mode='nearest')
#convolve_fft(intframe1, Gaussian2DKernel(pxksz), allow_huge=allow_huge)
smoothframe2=ndimage.filters.gaussian_filter(frame2, [pxksz, pxksz], order=[0,0], mode='nearest')
#convolve_fft(intframe2, Gaussian2DKernel(pxksz), allow_huge=allow_huge)
#grad1=np.gradient(smoothframe1)
#grad2=np.gradient(smoothframe2)
dI1dx=ndimage.filters.gaussian_filter(frame1, [pxksz, pxksz], order=[0,1], mode='nearest')
dI1dy=ndimage.filters.gaussian_filter(frame1, [pxksz, pxksz], order=[1,0], mode='nearest')
dI2dx=ndimage.filters.gaussian_filter(frame2, [pxksz, pxksz], order=[0,1], mode='nearest')
dI2dy=ndimage.filters.gaussian_filter(frame2, [pxksz, pxksz], order=[1,0], mode='nearest')
else:
weight=(pxsz/res)**2
intframe1=frame1
intframe2=frame2
intmask1=mask1
intmask2=mask2
smoothframe1=frame1
smoothframe2=frame2
#grad1=np.gradient(intframe1)
#grad2=np.gradient(intframe2)
dI1dx=ndimage.filters.gaussian_filter(frame1, [1, 1], order=[0,1], mode='nearest')
dI1dy=ndimage.filters.gaussian_filter(frame1, [1, 1], order=[1,0], mode='nearest')
dI2dx=ndimage.filters.gaussian_filter(frame2, [1, 1], order=[0,1], mode='nearest')
dI2dy=ndimage.filters.gaussian_filter(frame2, [1, 1], order=[1,0], mode='nearest')
# Calculation of the relative orientation angles
#tempphi0=np.arctan2(grad1[1]*grad2[0]-grad1[0]*grad2[1], grad1[0]*grad2[0]+grad1[1]*grad2[1])
tempphi=np.arctan2(dI1dx*dI2dy-dI1dy*dI2dx, dI1dx*dI2dx+dI1dy*dI2dy)
phi=np.arctan(np.tan(tempphi))
# Excluding small gradients
normGrad1=np.sqrt(dI1dx*dI1dx+dI1dy*dI1dy) #np.sqrt(grad1[1]**2+grad1[0]**2)
normGrad2=np.sqrt(dI2dx*dI2dx+dI2dy*dI2dy) #np.sqrt(grad2[1]**2+grad2[0]**2)
bad=np.logical_or(normGrad1 <= gradthres1, normGrad2 <= gradthres2).nonzero()
phi[bad]=np.nan
corrframe=phi#np.cos(2.*phi)
# Excluding masked regions
if np.array_equal(np.shape(intframe1), np.shape(intmask1)):
corrframe[(intmask1 == 0.).nonzero()]=np.nan
if np.array_equal(np.shape(intframe2), np.shape(intmask2)):
corrframe[(intmask2 == 0.).nonzero()]=np.nan
good=np.logical_and(np.logical_and(np.isfinite(phi), intmask1 > 0), intmask2 > 0).nonzero()
else:
good=np.logical_and(np.isfinite(phi), intmask1 > 0).nonzero()
else:
good=np.isfinite(phi).nonzero()
Zx, s_Zx, meanPhi = HOG_PRS(phi[good])
wghts=0.*phi[good]+weight
rvl=circ.descriptive.resultant_vector_length(2.*phi[good], w=wghts)
can=circ.descriptive.mean(2.*phi[good], w=wghts)/2.
pz, Z = circ.tests.rayleigh(2.*phi[good], w=wghts)
pv, V = circ.tests.vtest(2.*phi[good], 0., w=wghts)
#if (wd > 1):
# hogcorr, corrframe =HOGvotes_blocks(phi, wd=wd)
#else:
# hogcorr, corrframe =HOGvotes_simple(phi)
circstats=[rvl, Z, V, pz, pv]
return circstats, corrframe, smoothframe1, smoothframe2
#return Zx, corrframe, smoothframe1
# -------------------------------------------------------------------------------------------------------------------------------
def HOGcorr_frameandvec(frame1, vecx, vecy, gradthres=0., vecthres=0., pxsz=1., ksz=1., res=1., mask1=0, mask2=0, wd=1, allow_huge=False, regrid=False):
# Calculates the spatial correlation between frame1 and the vector field described by vecx and vecy using the HOG methods
#
# INPUTS
# frame1 - input map
# vecx - x-component of the input vector field
# vecy - y-component of the input vector field
#
# OUTPUTS
# hogcorr -
# corrframe -
sf=3. #Number of pixels per kernel FWHM
pxksz =ksz/pxsz
pxres =res/pxsz
sz1=np.shape(frame1)
if (ksz > 1):
if (regrid):
intframe1=congrid(frame1, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
intvecx =congrid(vecx, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
intvecy =congrid(vecy, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
if np.array_equal(np.shape(frame1), np.shape(mask1)):
intmask1=congrid(mask1, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
intmask1[(intmask1 > 0.).nonzero()]=1.
if np.array_equal(np.shape(frame2), np.shape(mask2)):
intmask2=congrid(mask2, [np.int(np.round(sf*sz1[0]/pxres)), np.int(np.round(sf*sz1[1]/pxres))])
intmask2[(intmask2 > 0.).nonzero()]=1.
else:
intframe1=frame1
intvecx=vecx
intvecy=vecy
intmask1=mask1
intmask2=mask2
#smoothframe1=convolve_fft(intframe1, Gaussian2DKernel(pxksz), allow_huge=allow_huge)
smoothframe1=ndimage.filters.gaussian_filter(frame1, [pxksz, pxksz], order=[0,0], mode='nearest')
#grad1=np.gradient(smoothframe1)
dI1dx=ndimage.filters.gaussian_filter(frame1, [pxksz, pxksz], order=[0,1], mode='nearest')
dI1dy=ndimage.filters.gaussian_filter(frame1, [pxksz, pxksz], order=[1,0], mode='nearest')
else:
intframe1=frame1
smoothframe1=frame1
intvecx=vecx
intvecy=vecy
intmask1=mask1
intmask2=mask2
#grad1=np.gradient(intframe1)
dI1dx=ndimage.filters.gaussian_filter(frame1, [1, 1], order=[0,1], mode='nearest')
dI1dy=ndimage.filters.gaussian_filter(frame1, [1, 1], order=[1,0], mode='nearest')
# ========================================================================================================================
normGrad1=np.sqrt(dI1dx*dI1dx+dI1dy*dI1dy) #np.sqrt(grad1[1]**2+grad1[0]**2)
normVec=np.sqrt(intvecx*intvecx + intvecy*intvecy)
bad=np.logical_or(normGrad1 <= gradthres, normVec <= vecthres).nonzero()
normGrad1[bad]=1.; normVec[bad]=1.;
#tempphi=np.arctan2(grad1[1]*intvecy-grad1[0]*intvecx, grad1[1]*intvecx+grad1[0]*intvecy)
tempphi=np.arctan2(dI1dx*intvecy-dI1dy*intvecx, dI1dx*intvecx+dI1dy*intvecy)
tempphi[bad]=np.nan
phi=np.arctan(np.tan(tempphi))
#if np.array_equal(np.shape(frame1), np.shape(mask1)):
# if np.array_equal(np.shape(normVec), np.shape(mask2)):
# phi[np.logical_or(mask1==0, mask2==0).nonzero()]=np.nan
# good=np.logical_and(mask1 > 0., mask2 > 0.).nonzero()
# else:
# phi[(mask1==0).nonzero()]=np.nan
# good=(mask1 > 0.).nonzero()
#else:
# good=np.isfinite(phi).nonzero()
corrframe=np.cos(2.*phi)
if np.array_equal(np.shape(intframe1), np.shape(intmask1)):
corrframe[(intmask1 == 0.).nonzero()]=np.nan
if np.array_equal(np.shape(intvecx), np.shape(intmask2)):
corrframe[(intmask2 == 0.).nonzero()]=np.nan
good=np.logical_and(np.logical_and(np.isfinite(phi), intmask1 > 0), intmask2 > 0).nonzero()
else:
good=np.logical_and(np.isfinite(phi), intmask1 > 0).nonzero()
else:
good=np.isfinite(phi).nonzero()
Zx, s_Zx, meanPhi = HOG_PRS(phi[good])
#if (wd > 1):
# hogcorr, corrframe =HOGvotes_blocks(phi, wd=wd)
#else:
# hogcorr, corrframe =HOGvotes_simple(phi)
#plt.imshow(phi, origin='lower')
#plt.colorbar()
#plt.show()
#import pdb; pdb.set_trace()
return Zx, corrframe, smoothframe1
# ================================================================================================================
def HOGcorr_cube(cube1, cube2, z1min, z1max, z2min, z2max, pxsz=1., ksz=1., res=1., mask1=0, mask2=0, wd=1, gradthres1=0., gradthres2=0., regrid=False, allow_huge=False):
# Calculates the correlation
#
# INPUTS
#
# OUTPUTS
#
#
print('Computing HOG correlation')
print(z1max-z1min+1,z2max-z2min+1)
sf=3. #Number of pixels per kernel FWHM
pxksz =ksz/pxsz
pxres =res/pxsz
sz1=np.shape(cube1)
sz2=np.shape(cube2)
rplane=np.zeros([z1max+1-z1min, z2max+1-z2min])
zplane=np.zeros([z1max+1-z1min, z2max+1-z2min])
vplane=np.zeros([z1max+1-z1min, z2max+1-z2min])
pzplane=np.zeros([z1max+1-z1min, z2max+1-z2min])
pvplane=np.zeros([z1max+1-z1min, z2max+1-z2min])
corrplane=np.zeros([z1max+1-z1min, z2max+1-z2min])
corrframe=np.zeros([sz1[1],sz1[2]])
scube1=np.zeros(sz1)
scube2=np.zeros(sz2)
#if (regrid):
# corrcube=np.zeros([sz1[0], np.int(np.round(sf*sz1[1]/pxres)), np.int(np.round(sf*sz1[2]/pxres))]) #np.zeros(sz1)
# corrframe_temp=np.zeros([np.int(np.round(sf*sz1[1]/pxres)), np.int(np.round(sf*sz1[2]/pxres))]) #np.zeros([sz1[1],sz1[2]])
# maskcube=np.zeros([sz1[0], np.int(np.round(sf*sz1[1]/pxres)), np.int(np.round(sf*sz1[2]/pxres))]) #np.zeros(sz1)
#else:
corrcube=np.zeros([z1max+1-z1min, z2max+1-z2min,sz1[1],sz1[2]])
corrframe_temp=np.zeros([sz1[1],sz1[2]])
maskcube=np.zeros(sz1)
for i in range(z1min, z1max+1):
for k in range(z2min, z2max+1):
print(i-z1min,k-z2min)
frame1=cube1[i,:,:]
frame2=cube2[k,:,:]
if np.array_equal(np.shape(cube1), np.shape(mask1)):
if np.array_equal(np.shape(cube2), np.shape(mask2)):
circstats, corrframe, sframe1, sframe2 = HOGcorr_frame(frame1, frame2, pxsz=pxsz, ksz=ksz, res=res, mask1=mask1[i,:,:], mask2=mask2[k,:,:], gradthres1=gradthres1, gradthres2=gradthres2, wd=wd, regrid=regrid, allow_huge=allow_huge)
else:
circstats, corrframe, sframe1, sframe2 = HOGcorr_frame(frame1, frame2, pxsz=pxsz, ksz=ksz, res=res, mask1=mask1[i,:,:], gradthres1=gradthres1, gradthres2=gradthres2, wd=wd, regrid=regrid, allow_huge=allow_huge)
else:
circstats, corrframe, sframe1, sframe2 = HOGcorr_frame(frame1, frame2, ksz=ksz, gradthres1=gradthres1, gradthres2=gradthres2, wd=wd, allow_huge=allow_huge)
rplane[i-z1min,k-z2min]=circstats[0]
zplane[i-z1min,k-z2min]=circstats[1]
vplane[i-z1min,k-z2min]=circstats[2]
pzplane[i-z1min,k-z2min]=circstats[3]
pvplane[i-z1min,k-z2min]=circstats[4]
corrcube[i-z1min,k-z2min,:,:]=corrframe
scube2[k,:,:]=sframe2
scube1[i,:,:]=sframe1
#import pdb; pdb.set_trace()
#return corrplane, corrcube, scube1, scube2
return [rplane,zplane,vplane,pzplane,pvplane], corrcube, scube1, scube2
# ================================================================================================================
def HOGcorr_cubeandpol(cube1, ex, ey, z1min, z1max, pxsz=1., ksz=1., res=1., mask1=0, mask2=0, wd=1, rotatepol=False, regrid=False, allow_huge=False):
# Calculates the correlation
#
# INPUTS
#
# OUTPUTS
#
#
print('Computing HOG correlation')
print(z1max-z1min)
sf=3. #Number of pixels per kernel FWHM
pxksz =ksz/pxsz
pxres =res/pxsz
sz1=np.shape(cube1)
sz2=np.shape(ex)
if(rotatepol):
xvec= ey
yvec=-ex
else:
xvec= ex
yvec= ey
normVec=np.sqrt(xvec*xvec+yvec*yvec)
corrvec=0.*np.arange(z1min,z1max+1)
corrframe=np.zeros([sz1[1],sz1[2]])
corrcube=np.zeros(sz1)
scube=np.zeros(sz1)
for i in range(z1min, z1max+1):
print(i-z1min)
if np.array_equal(np.shape(cube1), np.shape(mask1)):
if np.array_equal(np.shape(normVec), np.shape(mask2)):
corr, corrframe, sframe = HOGcorr_frameandvec(cube1[i,:,:], xvec, yvec, pxsz=pxsz, ksz=ksz, res=res, mask1=mask1[i,:,:], mask2=mask2, wd=wd, regrid=regrid)
else:
corr, corrframe, sframe = HOGcorr_frameandvec(cube1[i,:,:], xvec, yvec, pxsz=pxsz, ksz=ksz, res=res, mask1=mask1[i,:,:], wd=wd, regrid=regrid)
else:
corr, corrframe, sframe = HOGcorr_frameandvec(cube1[i,:,:], xvec, yvec, pxsz=pxsz, ksz=ksz, res=res, wd=wd, regrid=regrid)
corrvec[i-z1min]=corr
#corrcube[i-z1min]=corrframe
corrcube[i,:,:]=corrframe
scube[i,:,:]=sframe
return corrvec, corrcube, scube
|
from selenium import webdriver
import time
a=webdriver.Chrome()
a.get("http://www.baidu.com")
a.maximize_window()
a.find_element_by_id('kw').send_keys('哈哈哈 kimoji')
a.find_element_by_xpath("")
#相对定位 以//开头 //标签名[@属性名=值 and 或者or ]例如 //input[@name="wd" and @autocomplete="off"]
#层级定位 //父标签[@属性名-值]/子标签[@属性名-值]//子孙标签[@属性名-值]
#绝对定位 依赖页面的顺序和位置 以/开头,例如/html/div/...
a.find_element_by_id('su').submit()
time.sleep(15)
|
from collections import defaultdict
d = defaultdict(list)
d['a'].append(1)
d['a'].append(2)
d['b'].append(4)
print d
d = defaultdict(set)
d['a'].add(1)
d['a'].add(2)
d['b'].add(4)
print d
d = {}
d.setdefault('a', []).append(1)
d.setdefault('a', []).append(2)
d.setdefault('b', []).append(4)
print d
print d['a'][1]
'''
d = defaultdict(list)
for key, value in pairs:
d.[key].append(value)
'''
|
import time
base=float(input("Please enter the base of the triangle :"))
height = float(input("Please enter the height of the triangle :"))
print("Calculating area of triangle on the basis of your input ...")
area=(0.5)*base*height
time.sleep(1)
print("AREA :",area)
|
import click
import confuse
import sqlalchemy.dialects
from .alias import alias
from .domain import domain
from .user import user
cfg = confuse.Configuration("vmail-manager", __name__)
@click.group(context_settings=dict(max_content_width=120))
@click.option(
"--dialect",
type=click.Choice(sqlalchemy.dialects.__all__),
help="Dialect of the Database.",
)
@click.option(
"--driver", type=click.STRING, help="Driver to connect to the database host.",
)
@click.option(
"--host", "-h", type=click.STRING, help="Hostname of the database host.",
)
@click.option(
"--port", "-p", type=click.INT, help="Port number of Database host.",
)
@click.option(
"--username", "-u", type=click.STRING,
)
@click.option(
"--password",
"-p",
type=click.STRING,
help="Provide password via cli. If no value is given, prompt for entering password will be shown.",
)
@click.option(
"--database", "-d", type=click.STRING, help="Name of the database",
)
@click.pass_context
def cli(ctx, dialect, driver, host, port, username, password, database):
"""Management of database for vmail-setup.
User config is loaded from ~/.config/vmail-manager/config.yaml.
Priority of options are OPTIONS > USER-CONFIG > DEFAULTS.
"""
for param in [
("dialect", dialect),
("driver", driver),
("host", host),
("port", port),
("username", username),
("password", password),
("database", database),
]:
if param[1] is not None:
cfg["DB"][param[0]] = param[1]
if cfg["DB"]["password"].get() is None:
cfg["DB"]["password"] = click.prompt("Password", hide_input=True)
ctx.obj = cfg
pass
cli.add_command(domain)
cli.add_command(user)
cli.add_command(alias)
|
import collections
import os
from xml.etree.ElementTree import Element as ET_Element
from .vision import VisionDataset
try:
from defusedxml.ElementTree import parse as ET_parse
except ImportError:
from xml.etree.ElementTree import parse as ET_parse
from typing import Any, Callable, Dict, List, Optional, Tuple
from PIL import Image
from .utils import download_and_extract_archive, verify_str_arg
DATASET_YEAR_DICT = {
"2012": {
"url": "http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar",
"filename": "VOCtrainval_11-May-2012.tar",
"md5": "6cd6e144f989b92b3379bac3b3de84fd",
"base_dir": os.path.join("VOCdevkit", "VOC2012"),
},
"2011": {
"url": "http://host.robots.ox.ac.uk/pascal/VOC/voc2011/VOCtrainval_25-May-2011.tar",
"filename": "VOCtrainval_25-May-2011.tar",
"md5": "6c3384ef61512963050cb5d687e5bf1e",
"base_dir": os.path.join("TrainVal", "VOCdevkit", "VOC2011"),
},
"2010": {
"url": "http://host.robots.ox.ac.uk/pascal/VOC/voc2010/VOCtrainval_03-May-2010.tar",
"filename": "VOCtrainval_03-May-2010.tar",
"md5": "da459979d0c395079b5c75ee67908abb",
"base_dir": os.path.join("VOCdevkit", "VOC2010"),
},
"2009": {
"url": "http://host.robots.ox.ac.uk/pascal/VOC/voc2009/VOCtrainval_11-May-2009.tar",
"filename": "VOCtrainval_11-May-2009.tar",
"md5": "a3e00b113cfcfebf17e343f59da3caa1",
"base_dir": os.path.join("VOCdevkit", "VOC2009"),
},
"2008": {
"url": "http://host.robots.ox.ac.uk/pascal/VOC/voc2008/VOCtrainval_14-Jul-2008.tar",
"filename": "VOCtrainval_11-May-2012.tar",
"md5": "2629fa636546599198acfcfbfcf1904a",
"base_dir": os.path.join("VOCdevkit", "VOC2008"),
},
"2007": {
"url": "http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtrainval_06-Nov-2007.tar",
"filename": "VOCtrainval_06-Nov-2007.tar",
"md5": "c52e279531787c972589f7e41ab4ae64",
"base_dir": os.path.join("VOCdevkit", "VOC2007"),
},
"2007-test": {
"url": "http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar",
"filename": "VOCtest_06-Nov-2007.tar",
"md5": "b6e924de25625d8de591ea690078ad9f",
"base_dir": os.path.join("VOCdevkit", "VOC2007"),
},
}
class _VOCBase(VisionDataset):
_SPLITS_DIR: str
_TARGET_DIR: str
_TARGET_FILE_EXT: str
def __init__(
self,
root: str,
year: str = "2012",
image_set: str = "train",
download: bool = False,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
transforms: Optional[Callable] = None,
):
super().__init__(root, transforms, transform, target_transform)
self.year = verify_str_arg(year, "year", valid_values=[str(yr) for yr in range(2007, 2013)])
valid_image_sets = ["train", "trainval", "val"]
if year == "2007":
valid_image_sets.append("test")
self.image_set = verify_str_arg(image_set, "image_set", valid_image_sets)
key = "2007-test" if year == "2007" and image_set == "test" else year
dataset_year_dict = DATASET_YEAR_DICT[key]
self.url = dataset_year_dict["url"]
self.filename = dataset_year_dict["filename"]
self.md5 = dataset_year_dict["md5"]
base_dir = dataset_year_dict["base_dir"]
voc_root = os.path.join(self.root, base_dir)
if download:
download_and_extract_archive(self.url, self.root, filename=self.filename, md5=self.md5)
if not os.path.isdir(voc_root):
raise RuntimeError("Dataset not found or corrupted. You can use download=True to download it")
splits_dir = os.path.join(voc_root, "ImageSets", self._SPLITS_DIR)
split_f = os.path.join(splits_dir, image_set.rstrip("\n") + ".txt")
with open(os.path.join(split_f)) as f:
file_names = [x.strip() for x in f.readlines()]
image_dir = os.path.join(voc_root, "JPEGImages")
self.images = [os.path.join(image_dir, x + ".jpg") for x in file_names]
target_dir = os.path.join(voc_root, self._TARGET_DIR)
self.targets = [os.path.join(target_dir, x + self._TARGET_FILE_EXT) for x in file_names]
assert len(self.images) == len(self.targets)
def __len__(self) -> int:
return len(self.images)
class VOCSegmentation(_VOCBase):
"""`Pascal VOC <http://host.robots.ox.ac.uk/pascal/VOC/>`_ Segmentation Dataset.
Args:
root (string): Root directory of the VOC Dataset.
year (string, optional): The dataset year, supports years ``"2007"`` to ``"2012"``.
image_set (string, optional): Select the image_set to use, ``"train"``, ``"trainval"`` or ``"val"``. If
``year=="2007"``, can also be ``"test"``.
download (bool, optional): If true, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
transforms (callable, optional): A function/transform that takes input sample and its target as entry
and returns a transformed version.
"""
_SPLITS_DIR = "Segmentation"
_TARGET_DIR = "SegmentationClass"
_TARGET_FILE_EXT = ".png"
@property
def masks(self) -> List[str]:
return self.targets
def __getitem__(self, index: int) -> Tuple[Any, Any]:
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is the image segmentation.
"""
img = Image.open(self.images[index]).convert("RGB")
target = Image.open(self.masks[index])
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target
class VOCDetection(_VOCBase):
"""`Pascal VOC <http://host.robots.ox.ac.uk/pascal/VOC/>`_ Detection Dataset.
Args:
root (string): Root directory of the VOC Dataset.
year (string, optional): The dataset year, supports years ``"2007"`` to ``"2012"``.
image_set (string, optional): Select the image_set to use, ``"train"``, ``"trainval"`` or ``"val"``. If
``year=="2007"``, can also be ``"test"``.
download (bool, optional): If true, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
(default: alphabetic indexing of VOC's 20 classes).
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, required): A function/transform that takes in the
target and transforms it.
transforms (callable, optional): A function/transform that takes input sample and its target as entry
and returns a transformed version.
"""
_SPLITS_DIR = "Main"
_TARGET_DIR = "Annotations"
_TARGET_FILE_EXT = ".xml"
@property
def annotations(self) -> List[str]:
return self.targets
def __getitem__(self, index: int) -> Tuple[Any, Any]:
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is a dictionary of the XML tree.
"""
img = Image.open(self.images[index]).convert("RGB")
target = self.parse_voc_xml(ET_parse(self.annotations[index]).getroot())
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target
@staticmethod
def parse_voc_xml(node: ET_Element) -> Dict[str, Any]:
voc_dict: Dict[str, Any] = {}
children = list(node)
if children:
def_dic: Dict[str, Any] = collections.defaultdict(list)
for dc in map(VOCDetection.parse_voc_xml, children):
for ind, v in dc.items():
def_dic[ind].append(v)
if node.tag == "annotation":
def_dic["object"] = [def_dic["object"]]
voc_dict = {node.tag: {ind: v[0] if len(v) == 1 else v for ind, v in def_dic.items()}}
if node.text:
text = node.text.strip()
if not children:
voc_dict[node.tag] = text
return voc_dict
|
######################################################################
#Programmer: Mateusz Przezdziecki date: 1/30/21
#File: prac_proj_1.py
#Purpose: Change the color of a single color.
######################################################################
import numpy as np
import matplotlib.image as mpimg # mpimg.imread(path)
import matplotlib.pyplot as plt # plt.imshow(np.array)
from scipy.ndimage import generic_filter, convolve
import cv2 # cv2.kmeans and prebuilt computer vision functions ie grayscale
#Load image and make a copy
image = mpimg.imread('salad.jpg')
plt.imshow(output_image)
plt.show()
|
import sys
sys.path.append('../queue_and_stack')
from dll_queue import Queue
from dll_stack import Stack
from queue import Queue
class BinarySearchTree:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
# Insert the given value into the tree
def insert(self, value):
if value < self.value:
if self.left is not None:
self.left.insert(value)
# Base case
else:
self.left = BinarySearchTree(value)
# If value is greater than or equal to self.value...
else:
if self.right is not None:
self.right.insert(value)
# Base case
else:
self.right = BinarySearchTree(value)
# Return True if the tree contains the value
# False if it does not
def contains(self, target):
# Base case
if target == self.value:
return True
if target < self.value:
if self.left is not None:
return self.left.contains(target)
# Base case
else:
return False
if target > self.value:
if self.right is not None:
return self.right.contains(target)
# Base case
else:
return False
# Return the maximum value found in the tree
def get_max(self):
def determine_max(prev_max, tree):
if tree.value >= prev_max:
if tree.right is not None:
return determine_max(prev_max, tree.right)
else:
prev_max = tree.value
return prev_max
# If tree.value is less than the prev max...
else:
return prev_max
return determine_max(self.value, self)
# Call the function `cb` on the value of each node
# You may use a recursive or iterative approach
def for_each(self, cb):
cb(self.value)
if self.left is not None:
self.left.for_each(cb)
if self.right is not None:
self.right.for_each(cb)
# DAY 2 Project -----------------------
# Print all the values in order from low to high
# Hint: Use a recursive, depth first traversal
def in_order_print(self, node):
_list = []
def keep_track(node):
if node is None:
return
else:
_list.append(node.value)
keep_track(node.left)
keep_track(node.right)
keep_track(self)
_list.sort()
for item in _list:
print(item)
# Print the value of every node, starting with the given node,
# in an iterative breadth first traversal
def bft_print(self, node):
q = Queue()
q.put(node)
while not q.empty():
_node = q.get()
print(_node.value)
if _node.left is not None:
q.put(_node.left)
if _node.right is not None:
q.put(_node.right)
# Print the value of every node, starting with the given node,
# in an iterative depth first traversal
def dft_print(self, node):
print(node.value)
if self.left is not None:
self.left.dft_print(node.left)
if self.right is not None:
self.right.dft_print(node.right)
# STRETCH Goals -------------------------
# Note: Research may be required
# Print Pre-order recursive DFT
def pre_order_dft(self, node):
pass
# Print Post-order recursive DFT
def post_order_dft(self, node):
pass
|
#!/home/zcyang/git/script/Python/venv/bin/python
from ddos import Control_ddos
import sys
aclname = "acl1"
if __name__ == "__main__":
ar = sys.argv
ob = Control_ddos("10.0.100.114")
if ar[1] == "s1":
print(ob.send_command("show ddos global acl-ipv4"))
if ar[1] == "s2":
print(ob.send_command("show ddos global address4"))
if ar[1] == "s3":
print(ob.send_command("show ddos global addressgrp"))
if ar[1] == "s4":
print(ob.send_command("show ddos global service"))
if ar[1] == "d1":
args = {"variable_dict": {"aname": ar[2]}, "template": "del_acl.txt"}
print(ob.control_config(**args))
if ar[1] == "d2":
args = {"variable_dict": {"aname": ar[2]}, "template": "del_address.txt"}
print(ob.control_config(**args))
if ar[1] == "d3":
args = {"variable_dict": {"gname": ar[2]}, "template": "del_group.txt"}
print(ob.control_config(**args))
if ar[1] == "d4":
args = {"variable_dict": {"sname": ar[2]}, "template": "del_service.txt"}
print(ob.control_config(**args))
if ar[1] == "a11":
args = {"variable_dict": {"aname": aclname, "sname": ar[2]}, "template": "add_acl_sourcea.txt"}
print(ob.control_config(**args))
if ar[1] == "a12":
args = {"variable_dict": {"aname": aclname, "gname": ar[2]}, "template": "add_acl_sourceg.txt"}
print(ob.control_config(**args))
if ar[1] == "a13":
args = {"variable_dict": {"aname": aclname, "sname": ar[2]}, "template": "add_acl_dsta.txt"}
print(ob.control_config(**args))
if ar[1] == "a14":
args = {"variable_dict": {"aname": aclname, "gname": ar[2]}, "template": "add_acl_dstg.txt"}
print(ob.control_config(**args))
if ar[1] == "a15":
args = {"variable_dict": {"aname": aclname, "sname": ar[2]}, "template": "add_acl_service.txt"}
print(ob.control_config(**args))
if ar[1] == "a16":
args = {"variable_dict": {"aname": aclname, "gname": ar[2]}, "template": "add_acl_serviceg.txt"}
print(ob.control_config(**args))
if ar[1] == "a2":
args = {"variable_dict": {"aname": ar[2], "ip": ar[3]}, "template": "add_address_mask.txt"}
print(ob.control_config(**args))
if ar[1] == "a21":
args = {"variable_dict": {"aname": ar[2], "sip": ar[3], "bip": ar[4]}, "template": "add_address_range.txt"}
print(ob.control_config(**args))
if ar[1] == "a3":
args = {"variable_dict": {"gname": ar[2], "sname": ar[3]}, "template": "add_group.txt"}
print(ob.control_config(**args))
if ar[1] == "a41":
args = {"variable_dict": {"sname": ar[2], "sport": ar[3], "bport": ar[4]}, "template": "add_service_dport.txt"}
print(ob.control_config(**args))
if ar[1] == "a42":
args = {"variable_dict": {"sname": ar[2], "sport": ar[3], "bport": ar[4]}, "template": "add_service_sport.txt"}
print(ob.control_config(**args))
if ar[1] == "a43":
args = {"variable_dict": {"sname": ar[2], "type": ar[3]}, "template": "add_service_type.txt"}
print(ob.control_config(**args))
|
AIRPORTS = {'origin': 'x', 'destination': 'y'}
TEMPERATURES = {'x': 'temperature_x', 'y': 'temperature_y'}
PRECIPITATION = {'x': 'precipitation_x', 'y': 'precipitation_y'}
VISIBILITY = {'x': 'visibility_x', 'y': 'visibility_y'}
WINDSPEED = {'x': 'wind_speed_x', 'y': 'wind_speed_y'}
CATEGORICAL_INPUTS = ['carrier_code', 'day', 'weekday', 'month', 'origin_airport', 'destination_airport']
CONTINUOUS_INPUTS = ['scheduled_elapsed_time', 'scheduled_departure_dt',
'temperature_x', 'precipitation_x', 'visibility_x', 'wind_speed_x',
'temperature_y', 'precipitation_y', 'visibility_y', 'wind_speed_y']
INPUT_NAMES = {'carrier_code', 'origin_airport', 'destination_airport', 'day', 'month', 'weekday',
'scheduled_departure_dt', 'scheduled_elapsed_time',
'temperature', 'precipitation', 'visibility', 'wind_speed'}
DATETIME_FORMAT = '%d/%m/%y %H:%M'
|
import pockexport.export
import json
import pickle
from load.gsheet import Gsheet
def extract_pocket(consumer_key, access_token):
pocketData = pockexport.export.get_json(consumer_key=consumer_key, access_token=access_token)
with open('source/pocket/all.json', 'w') as outfile:
json.dump(pocketData,outfile,indent=2)
def extract_gsheet(id, range):
pocket_spreadsheet = Gsheet(id, range)
pocket_spreadsheet_data = pocket_spreadsheet.get()['values']
with open('source/pocket/loaded.pickle', 'wb') as outfile:
pickle.dump(pocket_spreadsheet_data, outfile)
|
'''
Author: MK_Devil
Date: 2022-01-13 11:13:09
LastEditTime: 2022-01-14 14:13:31
LastEditors: MK_Devil
'''
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import sqlite3
# 建立数据库连接
conn = sqlite3.connect(r'.\实例\4、sqlite3\Alchemy.db')
# 创建游标
cur = conn.cursor()
# 查询输出所有
# cur.execute(r'select * from material')
# print(cur.fetchall())
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies library_dirs (in link_settings) are properly found.
"""
import sys
import TestGyp
test = TestGyp.TestGyp()
lib_dir = test.tempdir('secret_location')
test.run_gyp('test.gyp',
'-D', 'abs_path_to_secret_library_location={0}'.format(lib_dir),
chdir='subdir')
# Must build each target independently, since they are not in each others'
# 'dependencies' (test.ALL does NOT work here for some builders, and in any case
# would not ensure the correct ordering).
test.build('test.gyp', 'mylib', chdir='subdir')
test.build('test.gyp', 'libraries-search-path-test', chdir='subdir')
expect = """Hello world
"""
test.run_built_executable(
'libraries-search-path-test', chdir='subdir', stdout=expect)
if sys.platform in ('win32', 'cygwin'):
test.run_gyp('test-win.gyp',
'-D',
'abs_path_to_secret_library_location={0}'.format(lib_dir),
chdir='subdir')
test.build('test.gyp', 'mylib', chdir='subdir')
test.build('test-win.gyp',
'libraries-search-path-test-lib-suffix',
chdir='subdir')
test.run_built_executable(
'libraries-search-path-test-lib-suffix', chdir='subdir', stdout=expect)
test.pass_test()
test.cleanup()
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import pytest
from pants.fs.fs import safe_filename
class FixedDigest:
def __init__(self, size):
self._size = size
def update(self, value):
pass
def hexdigest(self):
return self._size * "*"
def test_bad_name() -> None:
with pytest.raises(ValueError):
safe_filename(os.path.join("more", "than", "a", "name.game"))
def test_noop() -> None:
assert "jack.jill" == safe_filename("jack", ".jill", max_length=9)
assert "jack.jill" == safe_filename("jack", ".jill", max_length=100)
def test_shorten() -> None:
assert "**.jill" == safe_filename("jack", ".jill", digest=FixedDigest(2), max_length=8)
def test_shorten_readable() -> None:
assert "j.**.e.jill" == safe_filename(
"jackalope", ".jill", digest=FixedDigest(2), max_length=11
)
def test_shorten_fail() -> None:
with pytest.raises(ValueError):
safe_filename("jack", ".beanstalk", digest=FixedDigest(3), max_length=12)
|
import click
import numpy as np
import logging
import pickle
from sklearn.preprocessing import RobustScaler
from sklearn.utils import check_random_state
from recnn.preprocessing import rewrite_content
from recnn.preprocessing import permute_by_pt
from recnn.preprocessing import extract
from recnn.recnn import event_predict
logging.basicConfig(level=logging.INFO,
format="[%(asctime)s %(levelname)s] %(message)s")
@click.command()
@click.argument("filename_train")
@click.argument("filename_test")
@click.argument("filename_model")
@click.argument("n_events_train")
@click.argument("n_events_test")
@click.argument("filename_output")
@click.option("--pflow", is_flag=True, default=False)
@click.option("--n_jets_per_event", default=10)
@click.option("--random_state", default=1)
def test(filename_train,
filename_test,
filename_model,
n_events_train,
n_events_test,
filename_output,
pflow=False,
n_jets_per_event=10,
random_state=1):
# Initialization
n_events_train = int(n_events_train)
n_events_test = int(n_events_test)
logging.info("Calling with...")
logging.info("\tfilename_train = %s" % filename_train)
logging.info("\tfilename_test = %s" % filename_test)
logging.info("\tfilename_model = %s" % filename_model)
logging.info("\tn_events_train = %d" % n_events_train)
logging.info("\tn_events_test = %d" % n_events_test)
logging.info("\tfilename_output = %s" % filename_output)
logging.info("\tpflow = %s" % pflow)
logging.info("\tn_jets_per_event = %d" % n_jets_per_event)
logging.info("\trandom_state = %d" % random_state)
rng = check_random_state(random_state)
# Make data
logging.info("Loading train data + preprocessing...")
fd = open(filename_train, "rb")
# training file is assumed to be formatted a sequence of pickled pairs
# (e_i, y_i), where e_i is a list of (phi, eta, pt, mass, jet) tuples.
X = []
y = []
for i in range(n_events_train):
e_i, y_i = pickle.load(fd)
original_features = []
jets = []
for j, (phi, eta, pt, mass, jet) in enumerate(e_i[:n_jets_per_event]):
if len(jet["tree"]) > 1:
original_features.append((phi, eta, pt, mass))
jet = extract(permute_by_pt(rewrite_content(jet)), pflow=pflow)
jets.append(jet)
if len(jets) == n_jets_per_event:
X.append([np.array(original_features), jets])
y.append(y_i)
y = np.array(y)
fd.close()
logging.info("\tfilename = %s" % filename_train)
logging.info("\tX size = %d" % len(X))
logging.info("\ty size = %d" % len(y))
# Building scalers
logging.info("Building scalers...")
tf_features = RobustScaler().fit(
np.vstack([features for features, _ in X]))
tf_content = RobustScaler().fit(
np.vstack([j["content"] for _, jets in X for j in jets]))
X = None
y = None
# Loading test data
logging.info("Loading test data + preprocessing...")
fd = open(filename_test, "rb")
# training file is assumed to be formatted a sequence of pickled pairs
# (e_i, y_i), where e_i is a list of (phi, eta, pt, mass, jet) tuples.
X = []
y = []
for i in range(n_events_test):
e_i, y_i = pickle.load(fd)
original_features = []
jets = []
for j, (phi, eta, pt, mass, jet) in enumerate(e_i[:n_jets_per_event]):
if len(jet["tree"]) > 1:
original_features.append((phi, eta, pt, mass))
jet = extract(permute_by_pt(rewrite_content(jet)), pflow=pflow)
jets.append(jet)
if len(jets) == n_jets_per_event:
X.append([np.array(original_features), jets])
y.append(y_i)
y = np.array(y)
fd.close()
logging.info("\tfilename = %s" % filename_train)
logging.info("\tX size = %d" % len(X))
logging.info("\ty size = %d" % len(y))
# Scaling
logging.info("Scaling...")
for i in range(len(X)):
X[i][0] = tf_features.transform(X[i][0])
for j in X[i][1]:
j["content"] = tf_content.transform(j["content"])
# Testing
logging.info("Testing...")
predict = event_predict
fd = open(filename_model, "rb")
params = pickle.load(fd)
fd.close()
all_y_pred = []
for start in range(0, len(y), 1000):
y_pred = predict(params, X[start:start+1000],
n_jets_per_event=n_jets_per_event)
all_y_pred.append(y_pred)
y_pred = np.concatenate(all_y_pred)
# Save
output = np.hstack((y.reshape(-1, 1),
y_pred.reshape(-1, 1)))
fd = open(filename_output, "wb")
pickle.dump(output, fd, protocol=2)
fd.close()
if __name__ == "__main__":
test()
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import cv2 # pip install opencv-python
import readInput # this is not a package, this is a local file (located in the folder)
import pandas as pd
from sklearn.decomposition import PCA
import numpy as np
import time
import pickle
import os
import sys
# In[2]:
dir_path = os.path.dirname(os.path.realpath(__file__))
os.chdir(dir_path)
def create_SVM():
svm = cv2.ml.SVM_create()
svm.setType(cv2.ml.SVM_C_SVC)
svm.setKernel(cv2.ml.SVM_RBF)
svm.setC(0.1)
svm.setGamma(0.1)
return svm
def getHogDescriptors(img_arr):
# HOG Parameters:
winSize = 32
blockSize = 12
blockStride = 4
cellSize = 4
nbins = 18
derivAperture = 1
winSigma = -1.
histogramNormType = 0
L2HysThreshold = 0.2
gammaCorrection = True
nlevels = 64
signedGradient = True
hog = cv2.HOGDescriptor((winSize,winSize),(blockSize, blockSize),(blockStride,blockStride),(cellSize,cellSize),nbins,derivAperture, winSigma,histogramNormType,L2HysThreshold,gammaCorrection,nlevels,signedGradient)
return np.array([hog.compute(img).flatten() for img in img_arr])
def getPCA(X_train):
t1 = time.time()
pca = PCA(3000)
pca.fit(X_train)
print('dumping pca file.')
with open('SvmHogPca', 'wb') as writeFile:
pickle.dump(pca, writeFile)
print(f'pca took {time.time()-t1} seconds.')
return pca
def trainHogSvmClassifier():
print('Training HOG + SVM Classifier')
svm = create_SVM()
train_img_arr, y_train = readInput.readTrainData()
X_train = getHogDescriptors(train_img_arr)
pca = getPCA(X_train)
X_train = pca.transform(X_train)
t2 = time.time()
svm.train(np.asarray(X_train), cv2.ml.ROW_SAMPLE, np.asarray(y_train, dtype=np.int32))
print(f'SVM training took {time.time()-t2} seconds')
print('Dumping classifier to file.')
svm.save('SvmHogModel')
return svm, pca
def predictTestData(svm, pca):
t1 = time.time()
test_img_arr, y_test = readInput.readTestData()
X_test = getHogDescriptors(test_img_arr)
X_test = pca.transform(X_test)
predictions = svm.predict(np.asarray(X_test))[1].ravel()
accuracy = (y_test == predictions).mean()
print(f'Test Accuracy = {accuracy*100} %.')
print(f'Predicted in {time.time()-t1} seconds.')
pred_df = pd.DataFrame(data={'Predictions' : predictions, 'Actual' : y_test})
return pred_df
def saveDfToCsv(df):
df.to_csv('mod_2_PredictionsHogSvm.csv')
print('Saved Predictions.csv')
# In[3]:
def getClassifier(arg):
if arg.lower() == 'forcetrain':
clf_svm, pca = trainHogSvmClassifier()
else:
try:
print('opening pre-trained model file.')
#with open(r"SvmHogModel", "rb") as inputFile:
clf_svm = cv2.ml.SVM_load('SvmHogModel')
with open(r"SvmHogPca", "rb") as inputFile:
pca = pickle.load(inputFile)
print('Loaded Pre-trained HogSVM classifier and pca.')
except FileNotFoundError:
print('File not found. Initiate Force Training.')
clf_svm, pca = trainHogSvmClassifier()
return clf_svm, pca
# In[4]:
def main(arg):
# Loading VGG16 model and defining feature extractor
#arg = 'forcetrain' #sys.argv[1]
#arg = 'dummy'
clf, pca = getClassifier(arg)
df = predictTestData(clf, pca)
saveDfToCsv(df)
print('Program Exited succesfully.')
# In[6]:
arg_cnt = len(sys.argv)
if arg_cnt == 1:
print("""Error: Argument missing. Please use 'forcetrain' or 'pretrain'""")
else:
arg = sys.argv[1]
if arg.lower() not in ['forcetrain', 'pretrain']:
print("""Error: Incorrect argument. Please use 'forcetrain' or 'pretrain'""")
exit(1)
else:
main(arg)
exit(0)
|
from value_objects.util.decorate import wraps
def once( compute ):
'''
Use @once instead of @property when you want a cached property
'''
# global count ensures uniqueness even when the function is unnamed (i.e. lambda)
global once_count
once_count += 1
# including __name__ for debugging convenience
key = '__once%s__%s' % ( once_count, compute.__name__ )
@property
@wraps( compute )
def cached_compute( s ):
if not hasattr( s, key ):
val = compute( s )
setattr( s, key, val )
return getattr( s, key )
return cached_compute
once_count = 0
|
import numpy as np
import matplotlib.pyplot as plt
from mnist import MNIST
def load_data():
# Load data
mndata = MNIST('data_files')
train_x, train_y = mndata.load_training()
test_x, test_y = mndata.load_testing()
# Convert to numpy arrays
train_x = np.array(train_x).T / 255
train_y = np.array(train_y).reshape((1, -1))
test_x = np.array(test_x).T / 255
test_y = np.array(test_y).reshape((1, -1))
return train_x, train_y, test_x, test_y
def display(a, index):
plt.imshow(a.T[index].reshape(28, 28), cmap="gray", vmin=0, vmax=1)
plt.show()
|
#=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
# 2018 July 7 polygon set
import os
os.chdir('C:\\Users\\James\\Documents\\data science education\\GA\\DSI\\capstone\\stars\\code')
%run -i alpha_utils
%run -i train_prep_lib
os.chdir('C:\\Users\\James\\Documents\\data science education\\GA\\DSI\\capstone\\stars\\data\\polygons')
os.mkdir('july7')
os.chdir('july7')
folder = os.getcwd()
generate_set_of_raw_polygon_tifs(n_pts=200, n_sampled_pts=60,
alpha_factor=0.9, n_sets=5000,
folder=folder, rnd_seed=0, format='png')
#=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
# 2018 July 8 polygon set
import os
os.chdir('C:\\Users\\James\\Documents\\data science education\\GA\\DSI\\capstone\\stars')
%run -i code/alpha_utils
%run -i code/train_prep_lib
os.mkdir('./data/polygons/july8')
folder = 'data/polygons/july8'
generate_set_of_raw_polygon_tifs(n_pts=80, n_sampled_pts=50,
alpha_factor=0.9, n_sets=5000,
folder=folder, rnd_seed=0, format='png');
#=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
# 2018 July 9 polygon set
import os
os.chdir('C:\\Users\\James\\Documents\\data science education\\GA\\DSI\\capstone\\stars')
%run -i code/alpha_utils
%run -i code/train_prep_lib
os.mkdir('./data/polygons/july9')
folder = 'data/polygons/july9'
generate_set_of_raw_polygon_tifs(n_pts=100, n_sampled_pts=30,
alpha_factor=0.9, n_sets=5000,
folder=folder, rnd_seed=0, format='png');
#=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
# 2018 July 10 polygon set
import os
os.chdir('C:\\Users\\James\\Documents\\data science education\\GA\\DSI\\capstone\\stars')
%run -i code/alpha_utils
%run -i code/train_prep_lib
os.mkdir('./data/polygons/july10')
folder = 'data/polygons/july10'
generate_set_of_raw_polygon_tifs(n_pts=100, n_sampled_pts=50,
alpha_factor=0.9, n_sets=5000,
folder=folder, rnd_seed=0, format='png');
|
#I pledge my honor that I have abided by the Stevens Honor System. Jill McDonald
#Problem 1
def bmi(weight, height):
bmi = weight * 720 / height ** 2
print('Your BMI is', bmi)
if bmi < 19:
print('Your BMI is considered below the healthy range.')
elif bmi <= 25:
print('Your BMI is considered within the healthy range.')
else:
print('Your BMI is considered above the healthy range.')
weight = float(input('How much do you weigh: '))
height = float(input('How tall are you in inches: '))
bmi(weight, height)
|
# import tcs
# import daqctrl, inspect
# ------------------------------------------------------------------
# install the script by:
# cd $INTROOT/config/scripts
# ln -s $guiInstalDir/ctaOperatorGUI/ctaGuiBack/ctaGuiBack/acs/guiACS_schedBlocks_script0.py
# ------------------------------------------------------------------
# ------------------------------------------------------------------
from random import Random
rndGen = Random(10987268332)
waitTime = dict()
waitTime['config_daq'] = rndGen.randint(1, 3)
waitTime['config_camera'] = rndGen.randint(1, 5)
waitTime['config_mount'] = rndGen.randint(2, 7)
waitTime['finish_daq'] = rndGen.randint(1, 6)
waitTime['finish_camera'] = rndGen.randint(1, 3)
waitTime['finish_mount'] = rndGen.randint(1, 2)
def get_short_wait(duration, wait_type):
return waitTime[wait_type] if duration > 1 else 1
# ------------------------------------------------------------------
#
# ------------------------------------------------------------------
__phases__ = [
"configuring",
"config_daq",
"config_camera",
"config_mount",
"take_data",
"closing",
"finish_daq",
"finish_camera",
"finish_mount",
]
# ------------------------------------------------------------------
#
# ------------------------------------------------------------------
def configuring():
coords = observationBlock.src.coords
p = None
try:
p = (coords.equatorial.ra, coords.equatorial.dec)
except:
pass
if not p:
try:
p = (coords.horizontal.alt, coords.horizontal.az)
except:
pass
if not p:
try:
p = (coords.galactic.lon, coords.galactic.lat)
except:
pass
if not p:
p = (0, 0)
print "Coordinates used: (" + str(p[0]) + ", " + str(p[1]) + ")"
try:
divergence = schedulingBlock.config.instrument.pointing_mode.divergent_.divergence
print "Divergence used: " + str(divergence)
except:
print "Pointing mode is not divergent"
pass
# resources.target = tcs.SkyEquatorialTarget(
# p[0], p[1], tcs.ICRS, tcs.J2000, 0.0, 0.0, 0.0, 0.0
# )
allowPhaseStart("config_daq")
allowPhaseStart("config_camera")
allowPhaseStart("config_mount")
return
# ------------------------------------------------------------------
def config_daq():
updatePhase("config_daq", "config_daq has began ...", 0)
allowPhaseStart("config_camera")
allowPhaseStart("config_mount")
# operationStatus = daq().operationStatus
# # Check daq operational status
# if operationStatus != daqctrl.NOMINAL and operationStatus != daqctrl.IDLE:
# raise RuntimeError('DAQ status not idle/nominal: ' + operationStatus)
# # Configure daq
# daqConfigured = configureDAQ()
# if not daqConfigured:
# raise RuntimeError('DAQ configuration failed')
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'config_daq'))
updatePhase("config_daq", "config_daq has ended...", 100)
return
# ------------------------------------------------------------------
def config_camera():
updatePhase("config_camera", "config_camera has began ...", 0)
allowPhaseStart("config_mount")
# cameraConfig = schedulingBlock.config.camera_configuration
# telescopes.configureCameras(cameraConfig)
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'config_camera'))
updatePhase("config_camera", "config_camera has ended...", 100)
return
# ------------------------------------------------------------------
def config_mount():
updatePhase("config_mount", "config_mount has began ...", 0)
# telescopes.startSlewing(resources.target)
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'config_mount'))
updatePhase("config_mount", "config_mount has ended...", 100)
return
# ------------------------------------------------------------------
def take_data():
updatePhase("take_data", "take_data has began ...", 0)
# daq().moveToNextOutputBlock(daqctrl.ZFITS_ZLIB)
# resources.trackingDuration = blockDuration
# telescopes.startTracking(resources.trackingDuration,resources.target)
# telescopes.startDataTaking()
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(blockDuration)
# telescopes.stopDataTaking()
updatePhase("take_data", "take_data has ended...", 100)
return
# ------------------------------------------------------------------
def closing():
allowPhaseStart("finish_daq")
allowPhaseStart("finish_camera")
allowPhaseStart("finish_mount")
return
# ------------------------------------------------------------------
def finish_daq():
updatePhase("finish_daq", "finish_daq has began ...", 0)
allowPhaseStart("finish_camera")
allowPhaseStart("finish_mount")
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'finish_daq'))
updatePhase("finish_daq", "finish_daq has ended...", 100)
return
# ------------------------------------------------------------------
def finish_camera():
updatePhase("finish_camera", "finish_camera has began ...", 0)
allowPhaseStart("finish_mount")
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'finish_camera'))
updatePhase("finish_camera", "finish_camera has ended...", 100)
return
# ------------------------------------------------------------------
def finish_mount():
updatePhase("finish_mount", "finish_mount has began ...", 0)
# add wiating time since waitToFinish is useless ............
# telescopes.waitToFinish()
wait(get_short_wait(blockDuration, 'finish_mount'))
updatePhase("finish_mount", "finish_mount has ended...", 100)
return
# ------------------------------------------------------------------
def cleanUp():
pass
|
import unittest
from katas.kyu_6.validate_credit_card_number import validate
class ValidateCreditCardNumberTestCase(unittest.TestCase):
def test_true(self):
self.assertTrue(validate(26))
def test_true_2(self):
self.assertTrue(validate(91))
def test_true_3(self):
self.assertTrue(validate(1230))
def test_true_4(self):
self.assertTrue(validate(2121))
def test_true_5(self):
self.assertTrue(validate(912030))
def test_true_6(self):
self.assertTrue(validate(2626262626262626))
def test_true_7(self):
self.assertTrue(validate(4111111111111111))
def test_false(self):
self.assertFalse(validate(1))
def test_false_2(self):
self.assertFalse(validate(92))
def test_false_3(self):
self.assertFalse(validate(123))
def test_false_4(self):
self.assertFalse(validate(1714))
def test_false_5(self):
self.assertFalse(validate(922030))
def test_false_6(self):
self.assertFalse(validate(8675309))
|
#!/usr/bin/env python3
#
# This example demonstrates the use of consistent radial transport
# on f_re and n_re.
#
# Run as
#
# $ ./generate.py
# $ ../../build/iface/dreami dream_settings.h5
#
# ###################################################################
import numpy as np
import sys
sys.path.append('../../py/')
from DREAM.DREAMSettings import DREAMSettings
import DREAM.Settings.Equations.ColdElectronTemperature as T_cold
import DREAM.Settings.Equations.ElectricField as Efield
import DREAM.Settings.Equations.HotElectronDistribution as FHot
import DREAM.Settings.Equations.IonSpecies as Ions
import DREAM.Settings.Equations.RunawayElectrons as Runaways
import DREAM.Settings.Solver as Solver
import DREAM.Settings.TransportSettings as Transport
ds = DREAMSettings()
# Physical parameters
E = 0.5 # Electric field strength (V/m)
n = 5e19 # Electron density (m^-3)
T = 1e3 # Temperature (eV)
# Grid parameters
pMax = 50 # maximum momentum in units of m_e*c
Np = 50 # number of momentum grid points
Nxi = 24 # number of pitch grid points
tMax = 1 # simulation time in seconds
Nt = 50 # number of time steps
Nr = 8 # number of radial grid points
dBOverB = 1e-3 # Magnetic perturbation strength
R0 = 1.6 # Tokamak major radius
# Set E_field
#ds.eqsys.E_field.setPrescribedData(E)
ds.eqsys.E_field.setType(Efield.TYPE_SELFCONSISTENT)
ds.eqsys.E_field.setBoundaryCondition(bctype=Efield.BC_TYPE_PRESCRIBED, V_loop_wall_R0=E*2*np.pi, R0=R0)
# Set temperature
ds.eqsys.T_cold.setPrescribedData(T)
# Set ions
ds.eqsys.n_i.addIon(name='D', Z=1, iontype=Ions.IONS_PRESCRIBED_FULLY_IONIZED, n=n)
# Disable avalanche generation
ds.eqsys.n_re.setAvalanche(Runaways.AVALANCHE_MODE_FLUID_HESSLOW)
ds.eqsys.n_re.setDreicer(Runaways.DREICER_RATE_NEURAL_NETWORK)
# Disable hot-tail grid
ds.hottailgrid.setEnabled(False)
# Runaway grid
ds.runawaygrid.setNxi(Nxi)
ds.runawaygrid.setNp(Np)
ds.runawaygrid.setPmax(pMax)
# Set initial hot electron Maxwellian
#ds.eqsys.f_re.setInitialValue(0)
# Set up radial grid
ds.radialgrid.setB0(3)
ds.radialgrid.setMinorRadius(0.5)
ds.radialgrid.setWallRadius(0.5)
ds.radialgrid.setNr(Nr)
# Set Rechester-Rosenbluth transport
ds.eqsys.f_re.transport.setMagneticPerturbation(dBOverB)
ds.eqsys.f_re.transport.setBoundaryCondition(Transport.BC_F_0)
# Set solver type
#ds.solver.setType(Solver.LINEAR_IMPLICIT) # semi-implicit time stepping
ds.solver.setType(Solver.NONLINEAR)
ds.solver.setVerbose(False)
ds.solver.setLinearSolver(Solver.LINEAR_SOLVER_MKL)
#ds.solver.tolerance.set(reltol=1e-4)
# include otherquantities to save to output
ds.other.include('fluid')
# Set time stepper
ds.timestep.setTmax(tMax)
ds.timestep.setNt(Nt)
ds.output.setTiming(stdout=True, file=True)
ds.output.setFilename('output.h5')
# Save settings to HDF5 file
ds.save('dream_settings.h5')
|
from django import forms
from .models import Read
class ReadingForm(forms.ModelForm):
class Meta:
model = Read
fields = ('title', 'url', 'data')
# your_name = forms.CharField(label='Your name', max_length=100)
# title = forms.CharField(label='Title',max_length=200)
# url = forms.URLField(label='Url')
# data = forms.CharField(label='Content', max_length=4000, widget=forms.Textarea)
# created_date = forms.DateField()
# country = models.CharField(max_length=50)
|
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from config import pms_app
# sqlite_url = "postgresql://postgres:nikhil@localhost:5432/probe_management_system"
sqlite_url = "postgresql://postgres:nikhil@localhost:5432/probe_management_system"
pms_app.config["SQLALCHEMY_ECHO"] = True
pms_app.config["SQLALCHEMY_DATABASE_URI"] = sqlite_url
pms_app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
# Create the SqlAlchemy db instance
db = SQLAlchemy(pms_app)
# Initialize Marshmallow
ma = Marshmallow(pms_app)
|
# Generated by Django 2.2.1 on 2019-05-19 20:15
from django.db import migrations
import localflavor.generic.models
class Migration(migrations.Migration):
dependencies = [
('events', '0007_event_event_country'),
]
operations = [
migrations.AlterField(
model_name='event',
name='event_country',
field=localflavor.generic.models.BICField(blank=True, max_length=11),
),
]
|
from os.path import join
from SCons.Script import Import, SConscript
Import("env")
SConscript(
join(env.PioPlatform().get_package_dir("framework-zephyr"), "scripts",
"platformio", "platformio-build.py"), exports="env")
|
html_template = """
<html>
<head>
<title>Widget export</title>
<!-- Load RequireJS, used by the IPywidgets for dependency management -->
<script
src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js"
integrity="sha256-Ae2Vz/4ePdIu6ZyI/5ZGsYnb+m0JlOmKPjt6XZ9JJkA="
crossorigin="anonymous">
</script>
<!-- Load IPywidgets bundle for embedding. -->
<script
data-jupyter-widgets-cdn="https://unpkg.com/"
data-jupyter-widgets-cdn-only
src="https://cdn.jsdelivr.net/npm/@jupyter-widgets/html-manager@*/dist/embed-amd.js"
crossorigin="anonymous">
</script>
<!-- The state of all the widget models on the page -->
<script type="application/vnd.jupyter.widget-state+json">
{manager_state}
</script>
</head>
<body>
<h1>Widget export</h1>
<div id="first-slider-widget">
<!-- This script tag will be replaced by the view's DOM tree -->
<script type="application/vnd.jupyter.widget-view+json">
{widget_views[0]}
</script>
</div>
<hrule />
<div id="second-slider-widget">
<!-- This script tag will be replaced by the view's DOM tree -->
<script type="application/vnd.jupyter.widget-view+json">
{widget_views[1]}
</script>
</div>
</body>
</html>
"""
|
import time
from server.blockchain.blockchain import Blockchain
from server.config import SECONDS
blockchain = Blockchain()
times = []
for i in range(1000):
start_time = time.time_ns()
#add_block function takes time to execute; which makes a time difference
blockchain.add_block(i)
end_time = time.time_ns()
time_to_mine = (end_time - start_time) / SECONDS
times.append(time_to_mine)
#len is the length
average_time = sum(times) / len(times)
print(f'New block difficulty: {blockchain.chain[-1].difficulty}')
print(f'Time to mine new block: {time_to_mine}s')
print(f'Average time to add blocks: {average_time}s\n')
"""
Testing the number of time needed for adding 1000 blocks
init blockchain
Blockchain()
for loop:
Blockchain.add_block('argument is the data'); we can use dummy data for testing
"""
|
import unittest
from katas.beta.only_readable_once_list import SecureList
class SecureListTestCase(unittest.TestCase):
def setUp(self):
self.base = [1, 2, 3, 4]
def test_equals(self):
a = SecureList(self.base)
self.assertEqual(a[0], self.base[0])
self.assertEqual(a[0], self.base[1])
self.assertEqual(len(a), 2)
print 'Current List: {!s}'.format(a)
self.assertEqual(len(a), 0)
def test_equals_2(self):
b = SecureList(self.base)
print 'Current List: {!r}'.format(b)
self.assertEqual(len(b), 0)
|
truthy = True
falsy = False
age = 20
is_over_age = age >= 18
is_under_age = age < 18
is_twenty = age == 20
my_number = 5
user_number = int(input("Enter a number: "))
print(my_number == user_number)
print(my_number != user_number)
yes = True and True
no = True and False
print(no)
which_one_is_it = True or False
second_one = False or True
first_one = True or True
neither = False or False
absolutely = not False
another_no = not True
is_programmer = True
is_learning = False
awesome = is_programmer and is_learning
less_awesome = is_programmer and not is_learning
is_designer = False
great_guy = (is_programmer or is_designer) and is_learning
"""https://blog.tecladocode.com/logical-comparisons-in-python-and-or/"""
|
import matplotlib.pyplot as plt
import matplotlib.colors
import numpy as np
import seaborn as sb
import math
class Plot:
def __init__(self):
self.name = "Plot"
# colour map: agent properties in time
def Fig1(self, sellerP, numB, capital):
if sellerP.any() != 0:
fig = plt.figure()
current_cmap = matplotlib.cm.get_cmap("jet").copy()
current_cmap.set_bad(color='white')
plt.pcolormesh(sellerP, cmap=current_cmap, rasterized=True)
plt.title("Prices of seller sites in time (after rebirth)")
plt.ylabel("Time")
plt.xlabel("Position")
color_bar = plt.colorbar(label="Price")
color_bar.minorticks_on()
fig.savefig("data/Prices_in_time.pdf")
fig.show()
if numB != 0:
fig = plt.figure()
plt.pcolormesh(numB, cmap='jet', rasterized=True)
plt.title("Number of buyers at buyer sites in time (after relocation)")
plt.ylabel("Time")
plt.xlabel("Position")
color_bar = plt.colorbar(label="Counts")
color_bar.minorticks_on()
fig.savefig("data/NumB_in_time.pdf")
fig.show()
if capital.any() != 0:
fig = plt.figure()
current_cmap = matplotlib.cm.get_cmap("turbo").copy()
current_cmap.set_bad(color='white')
plt.pcolormesh(capital, cmap=current_cmap, rasterized=True)
plt.title("Capital of seller sites in time (after rebirth)")
plt.ylabel("Time")
plt.xlabel("Position")
color_bar = plt.colorbar(label="Capital")
color_bar.minorticks_on()
fig.savefig("data/Capital_in_time.pdf")
fig.show()
def Fig2(self, sellerP, num_sellers, time_steps, p_max, gamma, beta, delta, seed, num_bins):
fig = plt.figure()
widths = [1]
heights = [1, 3, 3]
gs = fig.add_gridspec(ncols=len(widths), nrows=len(heights), width_ratios=widths, height_ratios=heights, hspace=1.0)
init = fig.add_subplot(gs[1, 0])
final = fig.add_subplot(gs[2, 0])
plots = [init, final]
init.hist(sellerP[0], alpha=1, bins=num_bins, color='navy', linewidth=0.8)
final.hist(sellerP[-1], alpha=1, bins=num_bins, color='navy', linewidth=0.8)
for plot in plots:
plot.set_xlim(1, p_max)
plot.grid(axis='x')
plot.set_ylabel('Number of sellers')
plot.set_xlabel('Price')
init.text(0.01, 1.1, 'Initial distribution', fontsize=10, transform=init.transAxes)
final.text(0.01, 1.1, 'Final distribution', fontsize=10, transform=final.transAxes)
title = fig.add_subplot(gs[0, 0])
title.axis('off')
t = "Price distribution"
st = r'Number of sellers: ' + str(num_sellers) + r'; Time steps: ' + str(time_steps) + '\n' + \
r'$P_{max}$ = ' + str(p_max) + r'; $\gamma$ = ' + str(gamma) + r'; $\beta$ = ' + str(beta) + r'; $\Delta$ = ' + \
str(delta) + r'; seed = ' + str(seed)
title.text(0, 1.8, t, fontweight='bold',
fontsize=18,
verticalalignment='top',
horizontalalignment='left')
title.text(0, 1., st, fontsize=14, verticalalignment='top', horizontalalignment='left')
fig.savefig("data/Price_distribution.pdf")
fig.show()
def Fig3(self, frac_live_bRS, frac_live_aRS, left, bottom, width, height):
fig = plt.figure()
plt.title("Fraction of live sellers in time")
plt.ylabel("Fraction of live sellers (before rebirth)")
plt.xlabel("Time")
plt.plot(frac_live_bRS)
plt.axes([left, bottom, width, height])
plt.plot(frac_live_aRS, color='orange')
plt.title('after rebirth')
fig.savefig("data/Frac_liveS_in_time.pdf")
fig.show()
def Fig4(self, t, vacancy_befRebS, vacancy_afterRebS, left, bottom, width, height):
vac_site_befReb = [0 for i in range(0, t + 1)]
vac_site_afterReb = [0 for i in range(0, t + 1)]
for i in range(0, t + 1):
vac_site_befReb[i] = vacancy_befRebS[i].count(1)
vac_site_afterReb[i] = vacancy_afterRebS[i].count(1)
time = np.arange(t + 1)
fig = plt.figure()
plt.title("Number of vacant sites in time")
plt.xlabel("Time")
plt.ylabel("Number of vacant sites (before rebirth)")
plt.plot(time, vac_site_befReb)
plt.axes([left, bottom, width, height])
plt.plot(time, vac_site_afterReb, color='orange')
plt.title('after rebirth')
fig.savefig("data/VacancyS_in_time.pdf")
fig.show()
def Fig5(self, time, frac_live_bRS, mean_price, mean_capital):
fig, (ax1, ax2, ax3) = plt.subplots(3, sharex='all')
ax1.plot(frac_live_bRS[1:])
ax1.set(ylabel='Fraction alive')
ax2.plot(time, mean_price[1:])
ax2.set(ylabel='Mean price')
ax3.plot(time, mean_capital[1:])
ax3.set(xlabel='Time', ylabel='Mean capital')
fig.savefig("data/Seller_params_bR_osc.pdf")
fig.show()
def Fig6(self, t, frac_liveS, num_sellers, num_liveB):
# constant multiplication over list
num_liveS = [x * num_sellers for x in frac_liveS]
fig = plt.figure()
plt.plot(t, num_liveS[:], marker='o', color='navy', label='sellers')
plt.plot(t, num_liveB[:], marker='o', color='orange', label='buyers')
plt.legend(loc='best')
plt.title("Number of live agents at end of round in time")
plt.xlabel("Time")
plt.ylabel("Counts")
plt.grid(alpha=0.3)
fig.savefig("data/Number_alive.pdf")
fig.show()
def Fig7(self, priceS, bins):
#bins = math.ceil((np.nanmax(priceS) - np.nanmin(priceS)) / np.sqrt(2))
plt.figure()
plt.hist(priceS, bins=bins, alpha=0.3)
plt.xlabel("Price")
sb.kdeplot(priceS, bw_method=0.1, fill=True)
plt.savefig("data/Price_distribution.pdf")
plt.show()
|
from Utilities.ConfigurationsHelper import set_configuration, get_configuration
def initialize(bot):
for guild in bot.guilds:
guild_id = guild.id
# Default ADMIN_ROLE is either a role named Commissions or
# the top role in role hierarchy
if not get_configuration(guild_id, "ADMIN_ROLE"):
selected_role_id = None
for role in guild.roles:
if role.name.lower() == "commissions":
selected_role_id = role.id
if selected_role_id == None:
selected_role_id = guild.roles[-1].id
set_configuration(guild_id, "ADMIN_ROLE", selected_role_id)
# Default BOT_CHANNEL is either a text-channel named bots or
# a the first channel in the channel list
if not get_configuration(guild_id, "BOT_CHANNEL"):
selected_channel_id = None
for channel in guild.channels:
if channel.name.lower() == "bots":
selected_channel_id = channel.id
if selected_channel_id == None:
selected_channel_id = guild.channels[0].id
set_configuration(guild_id, "BOT_CHANNEL", selected_channel_id)
# Default NOTICE_DAYS_INTERVAL is: 3
if not get_configuration(guild_id, "NOTICE_DAYS_INTERVAL"):
set_configuration(guild_id, "NOTICE_DAYS_INTERVAL", 3)
# Default NOTICE_DAYS_INTERVAL is: 2
if not get_configuration(guild_id, "FINAL_NOTICE_DAYS_BEFORE"):
set_configuration(guild_id, "FINAL_NOTICE_DAYS_BEFORE", 2)
# Default DAYS_REMAINING_WARNING is:
if not get_configuration(guild_id, "DAYS_REMAINING_WARNING"):
set_configuration(
guild_id,
"DAYS_REMAINING_WARNING",
"{user_mention}, you have {remaining_days} days left to complete `{commission_name}` commission.",
)
# Default DAYS_REMAINING_FORFEIT_WARNING is:
if not get_configuration(guild_id, "DAYS_REMAINING_FORFEIT_WARNING"):
set_configuration(
guild_id,
"DAYS_REMAINING_FORFEIT_WARNING",
"{user_mention}, you have {remaining_days} days left to complete `{commission_name}` commission. You may forfeit the reward if the commission is not complete on time.",
)
# Default EXPIRED_NOTICE is:
if not get_configuration(guild_id, "EXPIRED_NOTICE"):
set_configuration(
guild_id,
"EXPIRED_NOTICE",
"{admin_role_mention} {user_mention}, your `{commission_name}` commission has expired.",
)
# Default INVALID_PERMISSION is You do not have permission to use that command!:
if not get_configuration(guild_id, "INVALID_PERMISSION"):
set_configuration(
guild_id,
"INVALID_PERMISSION",
"You do not have permission to use that command!",
)
|
from django.shortcuts import render
from manager.models import *
from django import http
from django.views import View
import json, requests
from django.db.models import Max
from django.db import transaction
from django.db import IntegrityError
from django.db.models import Sum, Count, Max, Min, Avg
def login_check(request):
dict_data = json.loads(request.body, strict = False)
mg_info = managerInfo.objects.filter(userid = dict_data['userid'], password = dict_data['password'], user_type = dict_data['user_type'])
login_response = {"userid":dict_data['userid'], "success":0, "power":dict_data['user_type']}
# 登录失败
if len(mg_info) == 0:
login_response['success'] = 1
return http.JsonResponse(login_response)
|
import numpy as np
import os
import clify
import argparse
from config import rl_config as config
config.update(
image_shape_grid=(2, 2),
reductions="sum",
)
grid = [dict(n_train=1, do_train=False)] + [dict(n_train=x) for x in 2**np.arange(0, 18, 2)]
parser = argparse.ArgumentParser()
parser.add_argument("--task", choices="A B C".split(), default='')
args, _ = parser.parse_known_args()
if args.task == "A":
grid = dict(n_train=2**np.arange(14, 18, 2))
config.update(parity='even')
elif args.task == "B":
A_dir = "/home/e2crawfo/rl_parity_A/"
config.load_path = [
os.path.join(A_dir, d, 'weights/best_of_stage_0') for d in os.listdir(A_dir)
]
config.update(parity='odd')
elif args.task == "C":
config.update(parity='odd')
else:
raise Exception()
from dps.hyper import build_and_submit, default_host_pool
clify.wrap_function(build_and_submit)(
config=config, distributions=grid, n_param_settings=None, host_pool=default_host_pool)
|
# -*- coding:utf-8 -*-
import numpy as numpy
from sympy import *
from math import log
'''
新的博弈模型
'''
def game(S,C = 3,C_DU = 2,C_BS = 1,N = 32,a = 2,e = 0.1):
C = 3 #RU对包的基本支付单价
C_DU = 2 #DU传输一个包的成本
C_BS = 1 #BS传输一个包的成本
N = 32 #总包数
a = 2 #满足因子
e = 0.1 #丢包率
x = Symbol('x')
# expr1 = x*(C_BS+(C*N*S)/(log(a)*(a+S*x)))
expr1 = C_BS + C*N*S/(log(a)*(a+S*x))+x*(-C*N*S*S)/(log(2)*(a+S*x)*(a+S*x))-C_DU
# N1 = solve(diff(expr1,x),x)
ans = solve(expr1,x)
lenth = len(ans)
N1 = ans[0]
for i in range(0,lenth):
if ans[i]>0:
N1 = ans[i]
temp = int(N1)
U_L =temp * (C_BS + C*N*S/(log(a)*(a+S*temp)))-C_DU*temp #向下取整的时候的效益
temp = temp+1
U_H = temp * (C_BS + C*N*S/(log(a)*(a+S*temp)))-C_DU*temp #向上取整的时候的效益
if U_L>U_H:
N1 = temp -1
U_DU = U_L
else:
N1 = temp
U_DU = U_H
b = C_BS + C*N*S/(log(a)*(a+S*N1))
U_BS = C*N*log(a+S*N1,a)-b*N1-(N-N1)*C_BS
result = [N1,b,U_BS,U_DU]
print result
return result
if __name__ == '__main__':
game(0.7)
|
import datetime
import threading
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
class Plotif():
def __init__(self):
self.axl=[]
self.twinx_l=[]
plt.ion()
def init_twinx(self, fig_list, n_fig, n_total_data):
c_twinx_l=[]
for i in range( n_fig ):
c_twinx_l.append(0)
for i in range( n_total_data ):
if c_twinx_l[ fig_list[i] ]==0 :
self.twinx_l.append(0)
c_twinx_l[fig_list[i]] =1
else:
self.twinx_l.append(fig_list[i])
def set_twinx(self, n_total_data):
for i in range( n_total_data ):
if self.twinx_l[i] != 0:
self.axl[i] = self.axl[ self.twinx_l[i] ].twinx()
# def arrange_ax_x(self, n_figure, ):
def set(self, n_figure, n_total_data, real0time1, fig_list ):
# self.fig, axl = plt.subplots(ncols=2, figsize=(10,4))
# plt.subplots(n_figure, 1)
for i in range (n_total_data):
# print ("AX==", i, n_figure)
if i==0 :
self.axl.append(plt.subplot(n_figure, 1, fig_list[i]+1) )
else:
self.axl.append(plt.subplot(n_figure, 1, fig_list[i]+1, sharex=self.axl[0]))
if(real0time1):
xfmt = mdates.DateFormatter("%H:%M:%S")
xloc = mdates.DayLocator()
for i in range(n_figure):
self.axl[i].xaxis.set_major_locator(xloc)
self.axl[i].xaxis.set_major_formatter(xfmt)
# def set_labels(self,idx, t, y):
def plot_datain(self,idx, t, y):
self.axl[idx].plot(t, y)
def set_xlim(self, ax_idx, min, max):
self.axl[ax_idx].set_xlim( min, max )
def set_ylim(self, ax_idx, min, max):
self.axl[ax_idx].set_ylim( min, max )
def grid_set(self, n):
for i in range(n):
self.axl[i].grid(which='major',axis='both', color='black',linestyle='--')
def plot_exe(self):
plt.draw()
plt.pause(0.01)
#plt.cla()
|
print("Hello World")
print("Goodbye World") #shows goodbye world on terminal
|
#!/usr/bin/env python
# encoding: utf-8
"""
Created by 'bens3' on 2015-01-21.
Copyright (c) 2013 'bens3'. All rights reserved.
Unpublish records marked non web publishable
python tasks/unpublish.py --local-scheduler --date 20150702
"""
import luigi
import ckanapi
from datetime import datetime, timedelta
from ke2mongo import config
from ke2mongo.log import log
from ke2mongo.lib.timeit import timeit
from ke2mongo.lib.ckan import ckan_delete
from ke2mongo.tasks.mongo_catalogue import MongoCatalogueTask
from ke2mongo.tasks.api import APITask
from ke2mongo.targets.mongo import MongoTarget
class UnpublishTask(APITask):
"""
Deprecated - once published, a record cannot be marked "do not publish to internet".
If a KE EMu record has been marked non web publishable, it needs to be deleted from CKAN
NB: This does not remove embargoed records which have already been published.
You cannot embargo a record after it's release.
"""
database = config.get('mongo', 'database')
keemu_schema_file = config.get('keemu', 'schema')
def requires(self):
# Mongo catalogue task for date must have run
yield MongoCatalogueTask(self.date)
@timeit
def run(self):
# Do not run if this is a full export date - all non-publishable records will
# Already have been removed
if int(self.full_export_date) == int(self.date):
log.info("No records to unpublish for full exports")
self.mark_complete()
return
collection = self.output().get_collection('ecatalogue')
# We only care about records who's status has changed in the past week (6 days to be sure)
date_object = datetime.strptime(str(self.date), '%Y%m%d')
q = dict(
AdmPublishWebNoPasswordFlag='N',
exportFileDate=self.date,
ISODateInserted={'$gte': date_object - timedelta(days=6)}
)
cursor = collection.find(q)
log.info('%s records to unpublish', cursor.count())
for record in cursor:
ckan_delete(self.remote_ckan, record)
# And mark the object as complete
self.mark_complete()
def mark_complete(self):
self.output().touch()
def output(self):
return MongoTarget(database=self.database, update_id=self.task_id)
if __name__ == "__main__":
luigi.run(main_task_cls=UnpublishTask)
|
"""
This type stub file was generated by pyright.
"""
import marshmallow as ma
"""Exception handler"""
class ErrorSchema(ma.Schema):
"""Schema describing the error payload
Not actually used to dump payload, but only for documentation purposes
"""
code = ...
status = ...
message = ...
errors = ...
class ErrorHandlerMixin:
"""Extend Api to manage error handling."""
ERROR_SCHEMA = ...
def handle_http_exception(self, error):
"""Return a JSON response containing a description of the error
This method is registered at app init to handle ``HTTPException``.
- When ``abort`` is called in the code, an ``HTTPException`` is
triggered and Flask calls this handler.
- When an exception is not caught in a view, Flask makes it an
``InternalServerError`` and calls this handler.
flask-smorest republishes webargs's
:func:`abort <webargs.flaskparser.abort>`. This ``abort`` allows the
caller to pass kwargs and stores them in ``exception.data`` so that the
error handler can use them to populate the response payload.
Extra information expected by this handler:
- `message` (``str``): a comment
- `errors` (``dict``): errors, typically validation errors in
parameters and request body
- `headers` (``dict``): additional headers
"""
...
|
import os,sys
import string
from optparse import OptionParser
import glob
import json
import pymongo
from pymongo import MongoClient
import datetime
__version__="1.0"
__status__ = "Dev"
###############################
def main():
usage = "\n%prog [options]"
parser = OptionParser(usage,version="%prog version___")
parser.add_option("-s","--server",action="store",dest="server",help="dev/tst/beta/prd")
(options,args) = parser.parse_args()
for key in ([options.server]):
if not (key):
parser.print_help()
sys.exit(0)
server = options.server
config_obj = json.loads(open("./conf/config.json", "r").read())
mongo_port = config_obj["dbinfo"]["port"][server]
host = "mongodb://127.0.0.1:%s" % (mongo_port)
admin_user, admin_pass = config_obj["dbinfo"]["admin"]["user"], config_obj["dbinfo"]["admin"]["password"]
admin_db = config_obj["dbinfo"]["admin"]["db"]
db_name = config_obj["dbinfo"]["dbname"]
db_user, db_pass = config_obj["dbinfo"][db_name]["user"], config_obj["dbinfo"][db_name]["password"]
try:
client = pymongo.MongoClient(host,
username=admin_user,
password=admin_pass,
authSource=admin_db,
authMechanism='SCRAM-SHA-1',
serverSelectionTimeoutMS=10000
)
client.server_info()
client[db_name].command('createUser', db_user, pwd=db_pass, roles=[{'role': 'readWrite', 'db': db_name}])
except pymongo.errors.ServerSelectionTimeoutError as err:
print (err)
except pymongo.errors.OperationFailure as err:
print (err)
if __name__ == '__main__':
main()
|
import json
import logging
import os
import re
import boto3
from botocore.exceptions import ClientError
from cfn_resource_provider import ResourceProvider
log = logging.getLogger()
log.setLevel(os.environ.get("LOG_LEVEL", "INFO"))
request_schema = {
"type": "object",
"required": ["Name"],
"properties": {
"Name": {
"type": "string",
"minLength": 1,
"pattern": "[a-zA-Z0-9_/]+",
"description": "the name of the secret",
},
"Description": {
"type": "string",
"default": "",
"description": "of the secret",
},
"KmsKeyId": {
"type": "string",
"default": "alias/aws/secretsmanager",
"description": "KMS key to use to encrypt the secret",
},
"SecretBinary": {
"type": "string",
"description": "base64 encoded binary secret",
},
"SecretString": {
"description": "secret string or json object or array to be converted to string",
"anyOf": [{"type": "string"}, {"type": "object"}, {"type": "array"}],
},
"RecoveryWindowInDays": {
"type": "integer",
"default": 30,
"description": "number of days a deleted secret can be restored",
"minimum": 7,
"maximum": 30,
},
"ClientRequestToken": {
"type": "string",
"description": "a unique identifier for the new version to ensure idempotency",
},
"NoEcho": {
"type": "boolean",
"default": True,
"description": "the secret as output parameter",
},
"Tags": {
"type": "array",
"items": {
"type": "object",
"required": ["Key", "Value"],
"properties": {"Key": {"type": "string"}, "Value": {"type": "string"}},
},
},
},
}
class SecretsManagerSecretProvider(ResourceProvider):
def __init__(self):
super(SecretsManagerSecretProvider, self).__init__()
self._value = None
self.request_schema = request_schema
self.sm = boto3.client("secretsmanager")
self.region = boto3.session.Session().region_name
self.account_id = (boto3.client("sts")).get_caller_identity()["Account"]
def convert_property_types(self):
try:
if "NoEcho" in self.properties and self.properties["NoEcho"] in [
"true",
"false",
]:
self.properties["NoEcho"] = self.properties["NoEcho"] == "true"
if "RecoveryWindowInDays" in self.properties:
self.properties["RecoveryWindowInDays"] = int(
self.properties["RecoveryWindowInDays"]
)
except ValueError as e:
log.error("failed to convert property types %s", e)
def create_arguments(self):
args = {
"Name": self.get("Name"),
"Description": self.get("Description"),
"ClientRequestToken": self.get("ClientRequestToken", self.request_id),
"KmsKeyId": self.get("KmsKeyId"),
}
if self.get("Tags") is not None:
args["Tags"] = self.get("Tags")
if self.get("SecretBinary") is not None:
args["SecretBinary"] = self.get("SecretBinary")
if self.get("SecretString") is not None:
s = self.get("SecretString")
args["SecretString"] = s if isinstance(s, str) else json.dumps(s)
return args
def set_return_attributes(self, response):
self.set_attribute("VersionId", response["VersionId"])
self.physical_resource_id = response["ARN"]
self.no_echo = self.get("NoEcho")
def create(self):
try:
args = self.create_arguments()
response = self.sm.create_secret(**args)
self.set_return_attributes(response)
except ClientError as e:
self.physical_resource_id = "could-not-create"
self.fail("{}".format(e))
def update(self):
if self.get_old("Name", self.get("Name")) != self.get("Name"):
self.fail("Cannot change the name of a secret")
return
try:
args = self.create_arguments()
args["SecretId"] = self.physical_resource_id
del args["Name"]
del args["Tags"]
response = self.sm.update_secret(**args)
self.set_return_attributes(response)
if self.get_old("Tags", self.get("Tags")) != self.get("Tags"):
if len(self.get_old("Tags")) > 0:
self.sm.untag_resource(
SecretId=self.physical_resource_id,
TagKeys=list(map(lambda t: t["Key"], self.get_old("Tags"))),
)
self.sm.tag_resource(
SecretId=self.physical_resource_id, Tags=self.get("Tags")
)
except ClientError as e:
self.fail("{}".format(e))
def delete(self):
if re.match(r"^arn:aws:secretsmanager:.*", self.physical_resource_id):
try:
self.sm.delete_secret(
SecretId=self.physical_resource_id,
RecoveryWindowInDays=self.get("RecoveryWindowInDays"),
)
self.success(
"Secret with the name %s is scheduled for deletion"
% self.get("Name")
)
except ClientError as e:
if e.response["Error"]["Code"] != "ResourceNotFoundException":
self.fail("{}".format(e))
else:
self.success(
"Delete request for secret with the name {} is ignored".format(
self.get("Name")
)
)
provider = SecretsManagerSecretProvider()
def handler(request, context):
return provider.handle(request, context)
|
from django.apps import AppConfig
class L24OConfig(AppConfig):
name = 'l24o'
|
# -*- coding: utf-8 -*-
import os.path
# Parsing Paths
for path in ['/one/two/three',
'/one/two/three/',
'/',
'.',
'']:
print path, ' : ', os.path.split(path)
'''
/one/two/three : ('/one/two', 'three')
/one/two/three/ : ('/one/two/three', '')
/ : ('/', '')
. : ('', '.')
: ('', '')
'''
# basename() returns a value equivalent to the second part of the split() value.
# dirname() returns the first part of the split path.
for path in ['/one/two/three',
'/one/two/three/',
'/',
'.',
'']:
print path, ' : ', os.path.basename(path)
'''
/one/two/three : three
/one/two/three/ :
/ :
. : .
:
'''
for path in ['/one/two/three',
'/one/two/three/',
'/',
'.',
'']:
print path, ' : ', os.path.dirname(path)
'''
/one/two/three : /one/two
/one/two/three/ : /one/two/three
/ : /
. :
:
'''
# Building Paths
# if a component is an absolute path, all previous components are thrown away and
# joining continues from the absolute path
for parts in [('one', 'two', 'three'),
('/', 'one', 'two', 'three'),
('/one', '/two', '/three')]:
print parts, ' : ', os.path.join(*parts)
'''
('one', 'two', 'three') : one/two/three
('/', 'one', 'two', 'three') : /one/two/three
('/one', '/two', '/three') : /three
'''
# Testing Files
f = os.path.abspath(__file__)
print 'File :', f
print 'Absolute :', os.path.isabs(f)
print 'Is File :', os.path.isfile(f)
print 'Is Dir :', os.path.isdir(f)
print 'Is Link :', os.path.islink(f)
print 'Mountpoint? :', os.path.ismount(f)
print 'Exists :', os.path.exists(f)
print 'Link Exists? :', os.path.lexists(f)
'''
Absolute : True
Is File : True
Is Dir : False
Is Link : False
Mountpoint? : False
Exists : True
Link Exists? : True
'''
# Traversing a Directory Tree
def visit(arg, dirname, names):
print dirname, arg
for name in names:
subname = os.path.join(dirname, name)
if os.path.isdir(subname):
print ' %s/' % name
else:
print ' %s' % name
os.path.walk('.', visit, '(start)')
'''
.
├── ospath.py
├── test1
│ ├── test11
│ └── test.txt
└── test2
. (start)
ospath.py
test1/
test2/
./test1 (start)
test11/
test.txt
./test1/test11 (start)
./test2 (start)
'''
|
#-*- coding: utf-8 -*-
import pandas as pd
data_file = 'discretization_data.xls'
data = pd.read_excel(data_file)
data = data[u'肝气郁结证型系数'].copy()
k = 4
d1 = pd.cut(data, k, labels=range(k))
print d1
w = [1.0 * i / k for i in range(k+1)]
# w = data.describe()
|
"""
https://leetcode.com/problems/merge-two-sorted-lists/
Easy
Merge Two Sorted Lists
Merge two sorted linked lists and return it as a sorted list. The list should be made by splicing together the nodes of the first two lists.
Input: l1 = [1,2,4], l2 = [1,3,4]
Output: [1,1,2,3,4,4]
Example 2:
Input: l1 = [], l2 = []
Output: []
Example 3:
Input: l1 = [], l2 = [0]
Output: [0]
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def mergeTwoLists(self, l1: ListNode, l2: ListNode) -> ListNode:
# print (l1, l2)
outl = ListNode(None)
out = outl
while l1 and l2:
if l1.val < l2.val:
out.next = ListNode(l1.val)
l1 = l1.next
else:
out.next = ListNode(l2.val)
l2 = l2.next
out = out.next
while l1:
out.next = ListNode(l1.val)
out = out.next
l1 = l1.next
while l2:
out.next = ListNode(l2.val)
out = out.next
l2 = l2.next
return outl.next
#l1 = [1,2,4]
l1 = ListNode(1)
l1.next = ListNode(2)
l1.next.next = ListNode(4)
#l2 = [1,3,4]
l2 = ListNode(1)
l2.next = ListNode(3)
l2.next.next = ListNode(4)
ans = Solution().mergeTwoLists(l1, l2)
|
import gzip
import os
from gensim import interfaces
from gensim.corpora.csvcorpus import CsvCorpus
from gensim.corpora.textcorpus import walk
from iranlowo.preprocessing import is_valid_owé_format, normalize_diacritics_text
from iranlowo.utils import is_text_nfc
class Corpus(interfaces.CorpusABC):
def __init__(self, path=None, text=None, stream=False, fformat='txt', cformat=None, labels=False, preprocess=None):
"""
Args:
path:
text:
"""
self.path = path
self.text = text
self.labels = labels
self.stream = stream
self.fformat = fformat
self.cformat = cformat
self.preprocess = preprocess
assert self.path or self.text, "You should pass either a path or text to read data from."
if not self.preprocess:
self.preprocess = [normalize_diacritics_text]
self.data = self.read_file_filename_or_text(text=text) if text else self.read_file_filename_or_text()
self.validate_format()
def __iter__(self):
for line in self.data:
yield line
def __len__(self):
return len(self.data)
@staticmethod
def save_corpus(fname, corpus, id2word=None, metadata=False):
pass
def streamfile(self, fobj):
num_text = 0
with fobj as obj:
for line in obj:
num_text += 1
yield line
def read_file_filename_or_text(self, f=None, text=None):
"""
Returns:
"""
path = f if f else self.path
out = []
if text:
return self.handle_preprocessing(text) if self.preprocess else text
elif isinstance(path, list):
for f in path:
text = self.read_file_filename_or_text(f)
out.append(text)
return out
else:
if isinstance(path, str):
if self.fformat == "txt":
text = open(path)
elif self.fformat == "csv":
text = CsvCorpus(path, self.labels)
elif self.fformat == 'gzip':
text = gzip.open(path)
else:
text = self.path.seek(0)
text = text.read() if not self.stream else ''.join(list(self.streamfile(text)))
return self.handle_preprocessing(text) if self.preprocess else text
def handle_preprocessing(self, text):
if callable(self.preprocess):
return self.preprocess(text)
if isinstance(self.preprocess, list):
for technique in self.preprocess:
text = technique(text)
return text
def validate_format(self):
"""
Returns:
"""
data = self.data
if isinstance(data, list):
data = ''.join(data)
if not self.cformat and not is_text_nfc(data):
raise TypeError("The corpus does not comply to the NFC corpus format")
elif self.cformat == "owe":
if not is_valid_owé_format(data):
raise TypeError("The corpus does not comply to the {0} corpus format".format(self.cformat))
else:
return True
def generate(self, size):
"""
Args:
size:
Returns:
"""
if not self.cformat:
raise ValueError("You need to specify a format for generating random text")
class DirectoryCorpus(Corpus):
def __init__(self, path, **kwargs):
self.dir_path = path
self.depth = kwargs.get('min_depth', 0)
self.path = list(self.read_files())
super(DirectoryCorpus, self).__init__(path=self.path, **kwargs)
def read_files(self):
walked = list(walk(self.dir_path))
if not walked:
raise NotADirectoryError("'{}' is not a valid directory".format(self.dir_path))
for depth, dirpath, _, filenames in walked:
if self.depth <= depth:
for path in filenames:
yield os.path.join(dirpath, path)
|
from typing import Any
from pyVmomi.vmodl import ManagedObject
def __getattr__(name: str) -> Any: ... # incomplete
class InvalidArgument(Exception): ...
class ManagedObjectNotFound:
obj: ManagedObject
|
from pymongo import MongoClient
# Criando a conexao com o Banco
mongo_con = MongoClient()
# Usar o Banco
db = mongo_con['flask-app']
|
from gensim.models import LdaModel
import numpy as np
import os
import pickle
from scipy.stats import entropy
import pandas as pd
import seaborn as sns
import plotly
import plotly.graph_objects as go
import plotly.express as px
import pyLDAvis
import pyLDAvis.gensim
import matplotlib.pyplot as plt
from utilities import topic_to_word_cloud
from collections import defaultdict
def compute_term_frequency_in_corpus(corpus):
term_freq = [0 for _ in range(vocab_len)]
for doc in corpus:
for word_tuple in doc:
term_freq[word_tuple[0]] += word_tuple[1]
return term_freq
def compute_lift(common_corpus, common_dictionary, word_prob):
vocab_size = len(common_dictionary)
lift_score = [0 for _ in range(vocab_size)]
for doc in common_corpus:
for word_tuple in doc:
lift_score[word_tuple[0]] += word_tuple[1]
for i in range(vocab_size):
lift_score[i] = word_prob[i] / (lift_score[i]*vocab_size)
return lift_score
def compute_jsd(p, q):
p = np.asarray(p)
q = np.asarray(q)
p /= p.sum()
q /= q.sum()
m = (p + q) / 2
return (entropy(p, m) + entropy(q, m)) / 2
def create_pyldavis_plot(lda, common_dictionary, common_corpus):
vis_data = pyLDAvis.gensim.prepare(lda, common_corpus, common_dictionary, sort_topics=False)
model_name = model_file.split("/")[-1]
out_filename = model_dir + "pyldavis/" + model_name + ".html"
outfile = open(out_filename, 'w')
pyLDAvis.save_html(vis_data, fileobj=outfile)
return out_filename
def plot_topic_share_stacked_bar_plot_plotly(df, filename):
print("Plotting stacked bar plot with plotly")
topic_ids = list(df['topic_id'].unique())
valid_topics = []
for k in topic_ids:
if df[df['topic_id']==k]['topic_weight'].sum() > 0.0:
valid_topics.append(k)
df = df[df.topic_id.isin(valid_topics)]
corpus_years = list(df['year'].unique())
fig = go.Figure()
for k in valid_topics:
topic_share = list(df[df['topic_id'] == k].topic_weight)
top_words = list(df[df['topic_id'] == k].topic_words)
topic_name = "Topic_"+str(k)
fig.add_trace(go.Bar(x=corpus_years,
y=topic_share,
name=top_words[0],
hovertext=top_words
))
fig.update_layout(barmode='stack',
xaxis={'categoryorder': 'category ascending'},
xaxis_tickangle=-45,
plot_bgcolor='#fff')
plotly.offline.plot(fig, filename=model_dir + 'threshold_doc/bar_plots/' + filename, auto_open=True)
print("Done saving Plotly plot as", filename, "!")
def compute_topic_popularity_by_year(df):
corpus_years = list(df['year'].unique())
for year in corpus_years:
df_year = df[df['year']==year]
df_year = df_year.assign(topic_rank=df_year['topic_weight'].rank(ascending=False))
df_year = df_year.sort_values(by='topic_rank')
csv_file = model_dir + "threshold_doc/topic_rankings/" + str(year) + ".csv"
df_year.to_csv(csv_file, sep='\t', encoding='utf-8', index=False)
def compute_topic_share_whole_corpus():
# compute normalized topic shares per year
print("Computing normalized topic share per year")
pickle_dir = "results/lda/"
topic_shares = []
pickle_files = sorted(os.listdir(pickle_dir))
for pf in pickle_files:
data = pickle.load(open(pickle_dir + pf, 'rb'))
doc_matrix = np.zeros((len(data), n_topics))
for i,doc in enumerate(data):
for tup in doc:
topic_prop = tup[1]
if topic_prop < topic_thresh:
doc_matrix[i,tup[0]] = 0.0
else:
doc_matrix[i,tup[0]] = topic_thresh
doc_matrix[i] /= doc_matrix[i].sum()
topic_share_ts = doc_matrix.sum(axis=0)
topic_share_ts /= np.sum(topic_share_ts)
#topic_share_ts[topic_share_ts < topic_thresh] = 0.0
#topic_share_ts /= np.sum(topic_share_ts)
topic_shares.append(topic_share_ts)
# create Dataframe from dict
topic_words_list = []
topic_id_list = []
topic_year_list = []
topic_share_list = []
for t in range(len(pickle_files)):
for k in range(n_topics):
topic_words_list.append(topic_words[k])
topic_share_list.append(topic_shares[t][k])
topic_id_list.append(k+1)
topic_year_list.append(start_year + t)
df = {"topic_id": topic_id_list,
"topic_words": topic_words_list,
"topic_weight": topic_share_list,
"year": topic_year_list}
df = pd.DataFrame.from_dict(df)
csv_file = "disappearing_lda.csv"
df.to_csv(csv_file, sep='\t', encoding='utf-8', index=False)
return df, topic_shares
def plot_topic_share_line_plot(topic_shares):
n_timeslices = len(topic_shares)
for k in range(n_topics):
plt.figure(figsize=(16, 10))
plt.plot()
topic_prob = [topic_shares[t][k] for t in range(n_timeslices)]
plt.plot(range(n_timeslices), topic_prob, marker='o', linestyle='-', linewidth=1, label='Topic_'+str(k+1))
plt.xticks(range(n_timeslices), labels=[str(start_year+y) for y in range(n_timeslices)], rotation='vertical')
plt.legend(["Topic "+str(k+1)])
plt.savefig(model_dir + "threshold_doc/line_plots/Topic_"+str(k+1)+".png")
plt.close()
#plt.show()
def create_word_clouds():
topics = lda.get_topics()
for k in range(n_topics):
print("topic = ", k)
topic_dist = topics[k]
image = topic_to_word_cloud(common_dictionary, topic_dist)
image_filename = model_dir + "word_clouds/Topic_" + str(k+1) + ".png"
image.save(image_filename)
start_year = 1854
model_dir = "trained_models/nlf/lda/"
model_file = model_dir + "lda_nlf_50topics"
lda = LdaModel.load(model_file)
common_dictionary = pickle.load(open(model_file + "_dict.pkl", "rb"))
common_corpus = pickle.load(open(model_file+"_corpus.pkl", "rb"))
n_topics = lda.num_topics
vocab_len = len(common_dictionary)
vocab = list(common_dictionary.keys())
topic_words = [" ".join([w[0] for w in lda.show_topic(k)]) for k in range(n_topics)]
topic_thresh = 0.005
df, topic_shares = compute_topic_share_whole_corpus()
#create_word_clouds()
#plot_topic_share_line_plot(topic_shares)
#compute_topic_popularity_by_year(df)
bar_plot_name = "lda_whole_corpus_topic_words.html"
plot_topic_share_stacked_bar_plot_plotly(df, bar_plot_name)
#
# create_pyldavis_plot(lda, common_dictionary, common_corpus)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, ast
from ConfigParser import ConfigParser
class Config():
def __init__(self, filename="settings.ini"):
self.defaulttimedict = {
"06:00":(12,151,250),
"06:30":(251,13,52),
"12:00":(247,37,21),
"23:30":(251,13,52),
"00:30":(12,151,250)
}
self.filename = filename
self.homepath = os.path.expanduser("~")
self.configfolder = os.path.join(self.homepath, ".studiouhr")
self.path = os.path.join(self.configfolder, self.filename)
self.config = ConfigParser()
self.initialize_settings_directory()
self.read_settings(self.path)
def initialize_settings_directory(self):
if not os.path.exists(self.configfolder):
os.mkdir(self.configfolder)
print "Created directory "+str(self.configfolder)
self.set_defaults()
self.write_settings(self.path)
def write_settings(self, path):
with open(path, 'w') as f:
self.config.write(f)
print "Wrote new settings to "+str(path)
f.close()
def read_settings(self, path):
self.config.read(path)
self.fullscreen = self.retrieve("Display", "fullscreen", True) == "True"
self.textformat = self.retrieve("Digits", "textformat", "%H:%M")
self.fontname = self.retrieve("Digits", "fontname", "Roboto Mono Thin")
self.fontsize = int(self.retrieve("Digits", "fontsize", 20))
self.xoffset = int(self.retrieve("Digits", "xoffset", 20))
self.yoffset = int(self.retrieve("Digits", "yoffset", 20))
self.displayarc = self.retrieve("Arc", "displayarc", True) == "True"
self.arcwidth = int(self.retrieve("Arc", "arcwidth", 1))
self.displayfivemarks = self.retrieve("Dots", "displayfivemarks", True) == "True"
self.displayseconddots = self.retrieve("Dots", "displayseconddots", True) == "True"
self.dotdiameter = int(self.retrieve("Dots", "dotdiameter", 50))
self.secondmargin = int(self.retrieve("Dots", "secondmargin", 60))
self.indicatormargin = int(self.retrieve("Dots", "indicatormargin", 15))
self.clockinterval = float(self.retrieve("Drawscheduler", "clockinterval", 0.01))
self.dotinterval = float(self.retrieve("Drawscheduler", "dotinterval", 0.02))
self.arcinterval = float(self.retrieve("Drawscheduler", "arcinterval", 0.02))
self.indicatorinterval = float(self.retrieve("Drawscheduler", "indicatorinterval", 0.02))
self.timedict = ast.literal_eval(self.retrieve("Zeitfarben", "timedict", self.defaulttimedict))
print "Read settings from "+str(path)
def retrieve(self, configsection, configoption, default):
""" Returns a Value from a config, if it fails returns defaults """
try:
return self.config.get(configsection, configoption)
except:
return default
def set_defaults(self):
# Sets the default values
# Create Sections
self.config.add_section("Display")
self.config.add_section("Digits")
self.config.add_section("Arc")
self.config.add_section("Dots")
self.config.add_section("Drawscheduler")
self.config.add_section("Zeitfarben")
# Set Values
self.config.set("Display", "fullscreen", True)
self.config.set("Digits", "textformat", "%H:%M")
self.config.set("Digits", "fontname", "Roboto Mono Thin")
self.config.set("Digits", "fontsize", 20)
self.config.set("Digits", "xoffset", 20)
self.config.set("Digits", "yoffset", 20)
self.config.set("Arc", "displayarc", True)
self.config.set("Arc", "arcwidth", 1)
self.config.set("Dots", "displayfivemarks", True)
self.config.set("Dots", "displayseconddots", True)
self.config.set("Dots", "dotdiameter", 50)
self.config.set("Dots", "secondmargin", 60)
self.config.set("Dots", "indicatormargin", 15)
self.config.set("Drawscheduler", "clockinterval", 0.01)
self.config.set("Drawscheduler", "dotinterval", 0.02)
self.config.set("Drawscheduler", "arcinterval", 0.02)
self.config.set("Drawscheduler", "indicatorinterval", 0.02)
self.config.set("Zeitfarben", "timedict", self.defaulttimedict)
|
n, m = map(int, input().split(' '))
txt = input().split(' ')
c = []
for i in range(n):
c.append(int(txt[i]))
answer = []
for i in range(len(c)-2):
for j in range(i+1, len(c)-1):
for k in range(j+1, len(c)):
o = c[i] + c[j] + c[k]
if c[i] + c[j] + c[k] <= m:
answer.append(o)
print(max(answer))
|
# Copyright 2021 DAI Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from functools import lru_cache
from typing import List, Dict, Optional
from web3 import Web3
from web3.datastructures import AttributeDict
from web3.middleware import geth_poa_middleware
from web3.types import BlockData, TxData, TxReceipt, HexStr
from .node import NodeConnectionPool
from ..exceptions import NodeConnectionException, ProcessingException
from ..models.objects_model import Transaction, BlockMetadata, TransactionMetadata, Call
from ..models.w3_model import W3Block, W3Transaction, W3Receipt, W3CallTree, W3Log
from ..semantics.standards import erc20
log = logging.getLogger(__name__)
def connect_chain(
http_hook: str = None, ipc_hook: str = None, ws_hook: str = None, poa: bool = False
) -> Web3 or None:
if http_hook:
provider = Web3.HTTPProvider
hook = http_hook
elif ipc_hook:
provider = Web3.IPCProvider
hook = ipc_hook
elif ws_hook:
provider = Web3.WebsocketProvider
hook = ws_hook
else:
provider = Web3.IPCProvider
hook = "\\\\.\\pipe\\geth.ipc"
w3 = Web3(provider(hook, request_kwargs={"timeout": 600}))
# middleware injection for POA chains
if poa:
w3.middleware_onion.inject(geth_poa_middleware, layer=0)
return w3
class NodeDataProvider:
default_chain: str
def __init__(self, default_chain=None):
self.default_chain = default_chain
def get_block(
self, block_number: int, chain_id: Optional[str] = None
) -> BlockMetadata:
...
def get_transaction(
self, tx_hash: str, chain_id: Optional[str] = None
) -> TransactionMetadata:
...
def get_full_transaction(
self, tx_hash: str, chain_id: Optional[str] = None
) -> Transaction:
...
def get_calls(self, tx_hash: str, chain_id: Optional[str] = None) -> Call:
...
def get_code_hash(
self, contract_address: str, chain_id: Optional[str] = None
) -> str:
...
def get_erc20_token(
self,
token_address: str,
contract_name: str,
functions,
chain_id: Optional[str] = None,
):
...
def guess_erc20_token(self, contract_address: str, chain_id: Optional[str] = None):
...
def guess_erc20_proxy(self, contract_address: str, chain_id: Optional[str] = None):
...
def guess_erc721_proxy(self, contract_address: str, chain_id: Optional[str] = None):
...
class Web3Provider(NodeDataProvider):
chain: Web3
def __init__(self, nodes: Dict[str, dict], default_chain=None):
super().__init__(default_chain)
self.nodes = nodes
def _get_node_connection(self, chain_id: Optional[str] = None) -> Web3:
chain_id = chain_id or self.default_chain
if chain_id is None:
raise ProcessingException(
"chain_id must be provided as an argument or constructor default"
)
if chain_id not in self.nodes:
raise ProcessingException(
"unknown chain_id, it must be defined in the EthTxConfig object"
)
for connection in NodeConnectionPool(nodes=self.nodes).get_connection(
chain=chain_id
):
w3 = connect_chain(http_hook=connection.url, poa=connection.poa)
if w3.isConnected():
log.info(
"Connected to: %s with latest block %s.",
connection,
w3.eth.block_number,
)
return w3
else:
log.warning("Connection failed to: %s", connection)
raise NodeConnectionException
# get the raw block data from the node
@lru_cache(maxsize=512)
def get_block(self, block_number: int, chain_id: Optional[str] = None) -> W3Block:
chain = self._get_node_connection(chain_id)
raw_block: BlockData = chain.eth.get_block(block_number)
block = W3Block(
chain_id=chain_id or self.default_chain,
difficulty=raw_block.difficulty,
extraData=raw_block.get("extraData", None),
gasLimit=raw_block.gasLimit,
gasUsed=raw_block.gasUsed,
hash=raw_block.hash,
logsBloom=raw_block.logsBloom,
miner=raw_block.miner,
nonce=raw_block.get("nonce", 0),
number=raw_block.number,
parentHash=raw_block.parentHash,
receiptsRoot=raw_block.receiptsRoot,
sha3Uncles=raw_block.sha3Uncles,
size=raw_block.size,
stateRoot=raw_block.stateRoot,
timestamp=raw_block.timestamp,
totalDifficulty=raw_block.totalDifficulty,
transactions=raw_block.transactions,
transactionsRoot=raw_block.transactionsRoot,
uncles=raw_block.uncles,
)
return block
# get the raw transaction data from the node
@lru_cache(maxsize=512)
def get_transaction(
self, tx_hash: str, chain_id: Optional[str] = None
) -> W3Transaction:
chain = self._get_node_connection(chain_id)
raw_tx: TxData = chain.eth.get_transaction(HexStr(tx_hash))
transaction = W3Transaction(
chain_id=chain_id or self.default_chain,
blockHash=raw_tx.blockHash,
blockNumber=raw_tx.blockNumber,
from_address=raw_tx["from"],
gas=raw_tx.gas,
gasPrice=raw_tx.gasPrice,
hash=raw_tx.hash,
input=raw_tx.input,
nonce=raw_tx.nonce,
r=raw_tx.r,
s=raw_tx.s,
to=raw_tx.to,
transactionIndex=raw_tx.transactionIndex,
v=raw_tx.v,
value=raw_tx.value,
)
return transaction
@lru_cache(maxsize=512)
def get_receipt(self, tx_hash: str, chain_id: Optional[str] = None) -> W3Receipt:
chain = self._get_node_connection(chain_id)
raw_receipt: TxReceipt = chain.eth.get_transaction_receipt(tx_hash)
_root = raw_receipt.root if hasattr(raw_receipt, "root") else None
_logs = [
W3Log(
tx_hash=tx_hash,
chain_id=chain_id or self.default_chain,
address=_log.address,
blockHash=_log.blockHash,
blockNumber=_log.blockNumber,
data=_log.data,
logIndex=_log.logIndex,
removed=_log.removed,
topics=_log.topics,
transactionHash=_log.transactionHash,
transactionIndex=_log.transactionIndex,
)
for _log in raw_receipt.logs
]
receipt = W3Receipt(
tx_hash=tx_hash,
chain_id=chain_id or self.default_chain,
blockHash=raw_receipt.blockHash,
blockNumber=raw_receipt.blockNumber,
contractAddress=raw_receipt.contractAddress,
cumulativeGasUsed=raw_receipt.cumulativeGasUsed,
from_address=raw_receipt["from"],
gasUsed=raw_receipt.gasUsed,
logs=_logs,
logsBloom=raw_receipt.logsBloom,
root=_root,
status=raw_receipt.get("status", True),
to_address=raw_receipt.to,
transactionHash=raw_receipt.transactionHash,
transactionIndex=raw_receipt.transactionIndex,
)
return receipt
@staticmethod
def _get_custom_calls_tracer():
return open(os.path.join(os.path.dirname(__file__), "static/tracer.js")).read()
@lru_cache(maxsize=512)
def get_calls(self, tx_hash: str, chain_id: Optional[str] = None) -> W3CallTree:
# tracer is a temporary fixed implementation of geth tracer
chain = self._get_node_connection(chain_id)
tracer = self._get_custom_calls_tracer()
response = chain.manager.request_blocking(
"debug_traceTransaction", [tx_hash, {"tracer": tracer, "timeout": "60s"}]
)
return self._create_call_from_debug_trace_tx(
tx_hash, chain_id or self.default_chain, response
)
# get the contract bytecode hash from the node
@lru_cache(maxsize=512)
def get_code_hash(
self, contract_address: str, chain_id: Optional[str] = None
) -> str:
chain = self._get_node_connection(chain_id)
byte_code = chain.eth.get_code(Web3.toChecksumAddress(contract_address))
code_hash = Web3.keccak(byte_code).hex()
return code_hash
# get the erc20 token data from the node
def get_erc20_token(
self,
token_address: str,
contract_name: str,
functions,
chain_id: Optional[str] = None,
):
name_abi = symbol_abi = decimals_abi = ""
if functions:
for function in functions.values():
if (
function.name == "name"
and len(function.inputs) == 0
and len(function.outputs) == 1
):
name_type = function.outputs[0].parameter_type
name_abi = (
'{"name":"name", "constant":true, "payable":false, "type":"function", '
' "inputs":[], "outputs":[{"name":"","type":"%s"}]}' % name_type
)
elif (
function.name == "symbol"
and len(function.inputs) == 0
and len(function.outputs) == 1
):
symbol_type = function.outputs[0].parameter_type
symbol_abi = (
'{"name":"symbol", "constant":true, "payable":false,"type":"function", '
' "inputs":[], "outputs":[{"name":"","type":"%s"}]}'
% symbol_type
)
elif (
function.name in ["decimals", "dec"]
and len(function.inputs) == 0
and len(function.outputs) == 1
):
decimals_type = function.outputs[0].parameter_type
decimals_abi = (
'{"name":"decimals", "constant":true, "payable":false,"type":"function", '
' "inputs":[], "outputs":[{"name":"","type":"%s"}]}'
% decimals_type
)
abi = f'[{",".join([name_abi, symbol_abi, decimals_abi])}]'
try:
chain = self._get_node_connection(chain_id)
token = chain.eth.contract(
address=Web3.toChecksumAddress(token_address), abi=abi
)
name = token.functions.name().call() if name_abi else contract_name
if isinstance(name, bytes):
name = name.decode("utf-8").replace("\x00", "")
symbol = token.functions.symbol().call() if symbol_abi else contract_name
if isinstance(symbol, bytes):
symbol = symbol.decode("utf-8").replace("\x00", "")
decimals = token.functions.decimals().call() if decimals_abi else 18
except Exception:
name = symbol = contract_name
decimals = 18
return dict(address=token_address, symbol=symbol, name=name, decimals=decimals)
# guess if the contract is and erc20 token and get the data
@lru_cache(maxsize=512)
def guess_erc20_token(self, contract_address, chain_id: Optional[str] = None):
chain = self._get_node_connection(chain_id)
byte_code = chain.eth.get_code(Web3.toChecksumAddress(contract_address)).hex()
if all(
"63" + signature[2:] in byte_code
for signature in (
erc20.erc20_transfer_function.signature,
erc20.erc20_transferFrom_function.signature,
erc20.erc20_approve_function.signature,
)
) and all(
signature[2:] in byte_code
for signature in (
erc20.erc20_transfer_event.signature,
erc20.erc20_approval_event.signature,
)
):
name_abi = (
'{"name":"name", "constant":true, "payable":false,'
' "type":"function", "inputs":[], "outputs":[{"name":"","type":"string"}]}'
)
symbol_abi = (
'{"name":"symbol", "constant":true, "payable":false,'
'"type":"function", "inputs":[], "outputs":[{"name":"","type":"string"}]}'
)
decimals_abi = (
'{"name":"decimals", "constant":true, "payable":false,'
'"type":"function", "inputs":[], "outputs":[{"name":"","type":"uint8"}]}'
)
abi = f'[{",".join([name_abi, symbol_abi, decimals_abi])}]'
try:
token = chain.eth.contract(
address=Web3.toChecksumAddress(contract_address), abi=abi
)
name = token.functions.name().call()
symbol = token.functions.symbol().call()
decimals = token.functions.decimals().call()
return dict(
address=contract_address,
symbol=symbol,
name=name,
decimals=decimals,
)
except Exception:
pass
return None
# guess if the contract is and erc20 token proxy and get the data
@lru_cache(maxsize=512)
def guess_erc20_proxy(self, contract_address, chain_id: Optional[str] = None):
chain = self._get_node_connection(chain_id)
name_abi = (
'{"name":"name", "constant":true, "payable":false,'
' "type":"function", "inputs":[], "outputs":[{"name":"","type":"string"}]}'
)
symbol_abi = (
'{"name":"symbol", "constant":true, "payable":false,'
'"type":"function", "inputs":[], "outputs":[{"name":"","type":"string"}]}'
)
decimals_abi = (
'{"name":"decimals", "constant":true, "payable":false,'
'"type":"function", "inputs":[], "outputs":[{"name":"","type":"uint8"}]}'
)
abi = f'[{",".join([name_abi, symbol_abi, decimals_abi])}]'
try:
token = chain.eth.contract(
address=Web3.toChecksumAddress(contract_address), abi=abi
)
name = token.functions.name().call()
symbol = token.functions.symbol().call()
decimals = token.functions.decimals().call()
return dict(symbol=symbol, name=name, decimals=decimals)
except Exception:
pass
return None
# guess if the contract is and erc721 token proxy and get the data
@lru_cache(maxsize=512)
def guess_erc721_proxy(self, contract_address, chain_id: Optional[str] = None):
chain = self._get_node_connection(chain_id)
name_abi = (
'{"name":"name", "constant":true, "payable":false,'
' "type":"function", "inputs":[], "outputs":[{"name":"","type":"string"}]}'
)
symbol_abi = (
'{"name":"symbol", "constant":true, "payable":false,'
'"type":"function", "inputs":[], "outputs":[{"name":"","type":"string"}]}'
)
abi = f'[{",".join([name_abi, symbol_abi])}]'
try:
token = chain.eth.contract(
address=Web3.toChecksumAddress(contract_address), abi=abi
)
name = token.functions.name().call()
symbol = token.functions.symbol().call()
return dict(symbol=symbol, name=name)
except Exception:
pass
return None
@lru_cache(maxsize=512)
def get_full_transaction(self, tx_hash: str, chain_id: Optional[str] = None):
w3transaction = self.get_transaction(tx_hash, chain_id)
w3receipt = self.get_receipt(tx_hash, chain_id)
w3calltree = self.get_calls(tx_hash, chain_id)
return Transaction.from_raw(
w3transaction=w3transaction, w3receipt=w3receipt, w3calltree=w3calltree
)
@staticmethod
def _create_call_from_debug_trace_tx(
tx_hash: str, chain_id: str, input_rpc: AttributeDict
) -> W3CallTree:
def prep_raw_dict(dct: [AttributeDict, Dict]):
if not isinstance(dct, dict):
dct = dct.__dict__
dct["from_address"] = dct.pop("from", None)
dct["to_address"] = dct.pop("to", None)
dct["input"] = dct.pop("input", "0x")
dct["output"] = dct.pop("output", "0x")
calls = dct.pop("calls", [])
return dct, calls
obj = input_rpc.__dict__
tmp_call_tree = []
w3input, main_parent_calls = prep_raw_dict(obj)
main_parent = W3CallTree(tx_hash=tx_hash, chain_id=chain_id, **w3input)
for main_parent_call in main_parent_calls:
w3input, main_parent_calls = prep_raw_dict(main_parent_call)
main_parent_child = W3CallTree(
tx_hash=tx_hash, chain_id=chain_id, **w3input
)
main_parent.calls.append(main_parent_child)
if len(main_parent_calls) > 0:
tmp_call_tree.append(
{"parent": main_parent_child, "children": main_parent_calls}
)
while len(tmp_call_tree) != 0:
new_call_tree = []
for pair in tmp_call_tree:
parent_call: W3CallTree = pair["parent"]
child_calls: List = pair["children"]
if child_calls is not None:
for child_call in child_calls:
w3input, child_child_call = prep_raw_dict(child_call)
child = W3CallTree(
tx_hash=tx_hash, chain_id=chain_id, **w3input
)
parent_call.calls.append(child)
if len(child_call) > 0:
new_call_tree.append(
{"parent": child, "children": child_child_call}
)
tmp_call_tree = new_call_tree
return main_parent
|
from pyspark import SparkContext
from pyspark import SparkConf
def CreateSparkContext():
'''
spark配置:
1.显示在spark或 hadoop-yarn UI界面的App名称
2.设置不显示spark执行进度以免界面太乱
'''
sparkConf = SparkConf().setAppName('FilmRecommend') \
.set('spark.ui.showConsoleProgress','false')
sc = SparkContext(conf=sparkConf)
print('master='+sc.master)
SetLogger(sc) #设置不要显示太多信息
SetPath(sc) # 设置文件读取路径
return sc
def SetLogger(sc):
logger = sc._jvm.org.apache.log4j
logger.LogManager.getLogger("org").setLevel(logger.Level.ERROR)
logger.LogManager.getLogger("akka").setLevel(logger.Level.ERROR)
logger.LogManager.getRootLogger().setLevel(logger.Level.ERROR)
def SetPath(sc):
global Path
if sc.master[0:5]=="local":
Path = "file:/home/hadoop/eclipse-workspace/FilmRecommend/"
else:
Path = "hdfs://ubuntu:9000/sparkproject/FilmRecommend/"
if __name__=='__main__':
print('开始执行 FilmRecommend')
sc = CreateSparkContext()
print('开始读取数据文件...')
textFile = sc.textFile(Path+"data/words.txt")
print("该数据文件共%s行" % textFile.count())
countsRDD = textFile.flatMap(lambda line:line.split(' ')) \
.map(lambda x:(x,1)).reduceByKey(lambda x,y:x+y)
print('文字统计共%s项数据' % countsRDD.count())
print('保存至文本文件output')
try:
countsRDD.saveAsTextFile(Path+'data/output')
except Exception as e:
print('输出目录已存在,请先删除原有目录')
sc.stop()
|
""" Представлен список чисел. Определить элементы списка, не имеющие
повторений. Сформировать итоговый массив чисел, соответствующих
требованию. Элементы вывести в порядке их следования в исходном списке.
Для выполнения задания обязательно использовать генератор."""
num_list = [1, 2, 3, 2, 4, 5, 5, 6, 1]
new_list = [itm for itm in num_list if num_list.count(itm) == 1]
print(new_list)
|
# Ryan Spies
# 8/5/2014
# Python 2.6.5
# This script calculates a mean daily max and min temperature for each month
#!!!!!!!!!!! Units left in degrees F !!!!!!!!!!!!!!!!!!!!!!!
#!!!!!!!!!!! Data must be 6 hour time steps !!!!!!!!!!!!!!!!!!!!!!
import os
import numpy as np
from dateutil import parser
from dateutil.relativedelta import relativedelta
import csv
path = os.getcwd()
######################## User Input Section ############################
rfc = 'NERFC'
# give directory of original RFC MAP/MAT files
map_dir = 'P:\\NWS\\Calibration_NWS\\NERFC\\fromNERFC\\MAT'
###################### End User Input ##################################
rfc_files = os.listdir(map_dir)
rfc_basins = []
for name in rfc_files:
if name.endswith('.txt'): # only use text files in diretory
rfc_basins.append(name)
for files in rfc_files:
# locat only .mat and .map files
if files[-9:-4] == 'MAT06' or files[-7:-4] == 'MAT':
basin = files[:5]
basin_title = str.upper(basin)
print basin_title
# enter file locations for old and new files
file1 = map_dir + '\\' + files
csvfile = open('P:\\NWS\\Calibration_NWS\\NERFC\\fromNERFC\\MAT\\' + basin_title + '_monthly_tmin_tmax_.csv','w')
writer = csv.writer(csvfile)
writer.writerow(['Monthly mean daily max and min temperatures (F)'])
writer.writerow(['Year', 'Variable', 'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'])
day_data= {}
fg = open(file1,'r')
# create a dictionary with 6hr data grouped into daily data
print 'Creating dictionary with 6hr data grouped into daily data...'
for each in fg:
spl = each.split('\t')
date = parser.parse(spl[0])
all_day = date.replace(hour=12)
temp = float(spl[1])
if temp < -100 or temp > 120:
print 'Warning... bad data: ' + str (date)
temp = 'na'
if all_day in day_data:
day_data[all_day].append(temp)
else:
day_data[all_day] = [temp]
fg.close()
start = min(day_data).replace(month=1,day=1,hour=12)
end = max(day_data).replace(month=12,day=1,hour=12)
# create two dictionaries with tmin and tmax data for each day
print 'Calculating Tmin and Tmax for each day...'
mnday_min = {}; mnday_max = {}
for day in day_data:
tmin = min(day_data[day])
tmax = max(day_data[day])
day_mn = day.replace(day=1)
if day_mn in mnday_min:
mnday_min[day_mn].append(tmin)
else:
mnday_min[day_mn] = [tmin]
if day_mn in mnday_max:
mnday_max[day_mn].append(tmax)
else:
mnday_max[day_mn] = [tmax]
# write to new csv file: the mean daily values for each month
print 'writing data to csv file...'
year_max = []; year_min = []
while start <= end:
## tmax output
if start in mnday_max:
year_max.append(np.average(mnday_max[start]))
else:
year_max.append('na')
if start.month == 12:
csvfile.write(str(start.year) + ',' + 'Tmax' + ',')
for each in year_max:
csvfile.write(str(each) + ',')
csvfile.write('\n')
year_max = []
# tmin output
if start in mnday_min:
year_min.append(np.average(mnday_min[start]))
else:
year_min.append('na')
if start.month == 12:
csvfile.write(str(start.year) + ',' + 'Tmin' + ',')
for each in year_min:
csvfile.write(str(each) + ',')
csvfile.write('\n')
year_min = []
#writer.writerow([start.year,'Tmax',np.average(mnday_max[start.replace(month=1)]),np.average(mnday_max[start.replace(month=2)]),np.average(mnday_max[start.replace(month=3)]),np.average(mnday_max[start.replace(month=4)]),np.average(mnday_max[start.replace(month=5)]),np.average(mnday_max[start.replace(month=6)]),np.average(mnday_max[start.replace(month=7)]),np.average(mnday_max[start.replace(month=8)]),np.average(mnday_max[start.replace(month=9)]),np.average(mnday_max[start.replace(month=10)]),np.average(mnday_max[start.replace(month=11)]),np.average(mnday_max[start.replace(month=12)])])
start = start + relativedelta(months=1)
csvfile.close()
print 'Finito!!!'
|
import string
KEYS_TO_ORD = {
'c' : 0,
'cis': 1,
'des': 1,
'd': 2,
'dis' : 3,
'e' : 4,
'es' : 4,
'f': 5,
'fis': 6,
'ges': 6,
'g' : 7,
'gis': 8,
'as': 8,
'a': 9,
'b': 10,
'ais': 10,
'h' : 11
}
ORD_TO_KEY = {
0 : 'c',
1 : 'cis',
2 : 'd',
3 : 'dis',
4 : 'e',
5 : 'f',
6 : 'fis',
7 : 'g',
8 : 'gis',
9 : 'a',
10 : 'b',
11 : 'h'
}
def transpose_chord(chord, transposition):
""" Transposes a chord >without a base sound< a given number of halftones up """
low = chord[0].lower() + chord[1:]
transposed = "??"
for prefix_length in [3,2,1]:
if len(low) >= prefix_length and low[:prefix_length] in KEYS_TO_ORD:
new_ord = (KEYS_TO_ORD[low[:prefix_length]] + transposition) % 12
transposed = ORD_TO_KEY[new_ord] + low[prefix_length:]
break
if chord[0].isupper():
return transposed[0].upper() + transposed[1:]
else:
return transposed
def transpose_sequence(chord_sequence, transposition):
""" Transposes a sequence of chords a given number of halftones up.
Chords with specified base sounds are split and the sound is transposed separately."""
input_chords = [str(x) for x in chord_sequence.split()]
output_chords = list()
for chord in input_chords:
if chord.find("/") != -1: # for chords with specified base sound
transposed = "/".join([transpose_chord(x, transposition) for x in chord.split("/")])
else:
transposed = transpose_chord(chord, transposition)
output_chords.append(transposed)
return ' '.join(output_chords)
def transpose_lyrics(parsed_lyrics, transposition):
""" Transposes a song represented as a list of paragraphs and returns result in the same format """
result = []
for paragraph in parsed_lyrics:
section = []
for (text, chords, is_indented, are_chords_extra) in paragraph:
if chords.find("(") != -1:
begin = chords.find("(")
end = chords.find(")")
if end == -1 or end < begin:
raise SyntaxError, "Incorrect '(' brackets in chords"
core = chords[:begin].strip()
bracketed = chords[begin+1:end].strip()
transposed = "%s (%s)" % (transpose_sequence(core, transposition), transpose_sequence(bracketed, transposition))
else:
transposed = transpose_sequence(chords.strip(), transposition)
section.append(
(
text,
transposed,
is_indented,
are_chords_extra
)
)
result.append(section)
return result
|
#!/usr/bin/env python
'''
chat_server.py -- Simple chat server for chat_client.py
'''
import sys
import socket
import select
from twisted.internet import protocol, reactor, endpoints
# TODO: Store all config values in a YAML config file.
HOST = '127.0.0.1'
PORT = 5000
MAX_CLIENTS = 3
RECV_BUFFER = 4096
# Zero value makes it wait forever.
SERVER_TIMEOUT = 0
# Track open connections
CONNECTION_LIST = []
def send_message(server_socket, originating_sock, message):
'''
Message all connected clients
'''
for socket_item in CONNECTION_LIST:
# Message everyone except the server and originating client.
if socket_item != server_socket and socket_item != originating_sock:
try:
socket_item.send(message)
except socket.error:
socket_item.close()
if socket_item in CONNECTION_LIST:
CONNECTION_LIST.remove(socket)
def chat_server():
'''
Chat server implementation
'''
# TODO: Extend support to include IPv6 protocol
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Avoid 'socket.error: [Errno 98] Address already in use' when socket is
# in TIME_WAIT state.
# https://docs.python.org/2/library/socket.html#example
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((HOST, PORT))
server_socket.listen(MAX_CLIENTS)
# Start by listening to icomming connections to this server
CONNECTION_LIST.append(server_socket)
print "Chat server started on {}:{} ".format(HOST, PORT)
while True:
# Wait for server socket events
ready_to_read, _, _ = select.select(CONNECTION_LIST, [], [],
SERVER_TIMEOUT)
for sock in ready_to_read:
if sock == server_socket:
# Incomming connection request
sockfd, addr = server_socket.accept()
CONNECTION_LIST.append(sockfd)
print "Connected with client {}".format(addr)
send_message(server_socket, sockfd,
"{} entered our chatting room" .format(addr))
else:
# Message received from a client
try:
data = sock.recv(RECV_BUFFER)
if data:
# Relay the message
msg = '{}: {}'.format(sock.getpeername(), data)
send_message(server_socket, sock, msg)
else:
# De-list the client
if sock in CONNECTION_LIST:
CONNECTION_LIST.remove(sock)
print "DEBUG: client {} is offline".format(addr)
# No data means the connection has been broken
send_message(server_socket, sock,
"Client {} is offline".format(addr))
except socket.error as err:
send_message(server_socket, sock,
"Client {} is offline".format(addr))
print "ERROR: Failed to comunicate with client. {}" \
"".format(err)
continue
server_socket.close()
#
# M A I N
#
if __name__ == "__main__":
sys.exit(chat_server())
|
from django.contrib.auth.models import User, AbstractUser
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
# Create your models here.
from django.db.models import CASCADE
from django.utils.datetime_safe import datetime
class Poster(AbstractUser):
pass
class Profile(models.Model):
objects = models.Manager()
user = models.OneToOneField(Poster, on_delete=models.CASCADE, related_name='profile')
first_name = models.CharField(max_length=50, blank = True)
last_name = models.CharField(max_length=50, blank=True)
gender = models.CharField(max_length=6, blank=True, default='')
date_of_birth = models.DateField(blank=True, default='1900-01-01')
avatar = models.FileField(default='default_pic.jpg')
class Post(models.Model):
created_date = models.DateTimeField(default=datetime.now)
title = models.CharField(max_length=250)
original_content = models.BooleanField(blank=True, default=False)
spoiler = models.BooleanField(blank=True, default=False)
nsfw = models.BooleanField(blank=True, default=False)
nr_likes = models.IntegerField(default=0)
class Meta:
abstract = True
def __str__(self):
return self.title
class Comment(models.Model):
user = models.ForeignKey(Profile, on_delete=models.CASCADE)
created_date = models.DateTimeField()
comment = models.CharField(max_length=250, default='', blank=True)
# Following fields are required for using GenericForeignKey
content_type = models.ForeignKey(ContentType, on_delete=CASCADE, blank=True)
object_id = models.PositiveIntegerField()
post = GenericForeignKey()
class Like(models.Model):
user = models.ForeignKey(Poster, on_delete=models.CASCADE)
created_date = models.DateTimeField()
# Following fields are required for using GenericForeignKey
content_type = models.ForeignKey(ContentType, on_delete=CASCADE)
object_id = models.PositiveIntegerField()
post = GenericForeignKey()
class Text(Post):
user = models.ForeignKey(Poster, on_delete=models.CASCADE)
text = models.CharField(max_length=1000, default='', blank=True)
likes = GenericRelation(Like, related_query_name='like_text')
comments = GenericRelation(Comment)
@property
def class_name(self):
return "texts"
class File(Post):
user = models.ForeignKey(Poster, on_delete=models.CASCADE)
file = models.FileField()
likes = GenericRelation(Like, related_query_name='like_file')
comments = GenericRelation(Comment)
@property
def class_name(self):
return "files"
class Link(Post):
user = models.ForeignKey(Poster, on_delete=models.CASCADE)
url = models.URLField()
likes = GenericRelation(Like, related_query_name='like_link')
comments = GenericRelation(Comment)
@property
def class_name(self):
return "links"
|
from threading import Thread
import requests
import time
import json
class wtm (Thread):
def __init__(self,win): #Intializing the request for the Yahoo Weather API
############### ALL COMMENTEND WAITING FOR API KEY ###############
#Basic info
self.url = "https://weather-fetcher-nmiot.herokuapp.com/weather"
Thread.__init__(self)
self.win = win #keeping a reference to the window object so we can pass info to it
def run(self):
while True:
try:
r = requests.get(url=self.url)
r.raise_for_status()
self.win.updateWeatherData(json.loads(r.text)["forecasts"])
time.sleep(3600) #Updating the weather every hour
except Exception as err:
print("couldn't get weather")
time.sleep(10) #try again in 10 seconds
#with open('res.json','r') as file: #Reading from res.json as placeholder
# response=json.load(file)
|
from PyQt5 import QtGui, QtWidgets
from bsp.leveleditor.DocObject import DocObject
from enum import IntEnum
# What viewport type can a tool be used in?
class ToolUsage(IntEnum):
View2D = 0
View3D = 1
Both = 2
class BaseTool(DocObject):
Name = "Tool"
KeyBind = None
WantButton = True
ToolTip = "Base tool"
StatusTip = None
Icon = None
Usage = ToolUsage.Both
def __init__(self, mgr):
DocObject.__init__(self, mgr.doc)
self.enabled = False
self.activated = False
self.mgr = mgr
self.options = None
def cleanup(self):
self.enabled = None
self.activated = None
self.mgr = None
DocObject.cleanup(self)
def toolTriggered(self):
pass
def enable(self):
print("Enable", self.Name)
self.enabled = True
self.activate()
def activate(self):
self.activated = True
self.doc.taskMgr.add(self.__updateTask, self.Name + "-UpdateTool")
if self.options:
self.options.setTool(self)
self.mgr.toolProperties.addGroup(self.options)
def __updateTask(self, task):
self.update()
return task.cont
def update(self):
pass
def disable(self):
print("Disable", self.Name)
self.deactivate()
self.enabled = False
self.ignoreAllGlobal()
def deactivate(self):
self.activated = False
self.doc.taskMgr.remove(self.Name + "-UpdateTool")
self.ignoreAll()
self.mgr.toolProperties.clear()
|
import subprocess
import psutil
import platform
import tensorflow_datasets as tfds
import json
import os
import http.client
import requests
import multiprocessing
from SCASSHManager import listen_and_accept_requests
import time
agent_registered = False
base_conda_env_installed = False
accepting_jobs = False
anaconda_url = 'https://repo.anaconda.com/archive/'
conda_download_dir = '/users/rijupahwa/Downloads/conda_repo/'
anaconda_installer = {'Darwin': 'https://repo.anaconda.com/archive/Anaconda3-2019.10-MacOSX-x86_64.sh' ,
'windows': 'https://repo.anaconda.com/archive/Anaconda3-2019.10-MacOSX-x86_64.sh' ,
'linux': 'https://repo.anaconda.com/archive/Anaconda3-2019.10-Linux-x86_64.sh' ,
'power_linux': 'https://repo.anaconda.com/archive/Anaconda3-2019.10-Linux-ppc64le.sh'}
cloud_server_hostname = "ec2-54-201-232-156.us-west-2.compute.amazonaws.com"
monitor_pid = None
py_function_pid = None
ssh_cloud_pid = None
notebook_service = None
py_function_service = None
notebook_service_port = None
py_function_service_port = None
ssh_cloud_status = None
agent_id = None
from enum import Enum
class SystemInfo:
def __init__(self ,
system_id ,
mac_addr ,
os ,
os_version ,
cpu_count ,
total_memory ,
cpu_architecture ,
processor_type ,
machine_type ,
gpu_type ,
gpu_model ,
total_disk_space ,
free_disk_space):
self.system_id = system_id
self.mac_addr = mac_addr
self.os = os
self.os_version = os_version
self.cpu_count = cpu_count
self.total_memory = total_memory
self.cpu_architecture = cpu_architecture
self.processor_type = processor_type
self.machine_type = machine_type
self.gpu_type = gpu_type
self.gpu_model = gpu_model
self.total_disk_space = total_disk_space
self.free_disk_space = free_disk_space
class MonitoringInfo:
def __init__(self ,
monitoring_id ,
agent_id ,
current_cpu_usage ,
current_memory_usage ,
current_network_usage ,
current_gpu_usage ,
current_free_disk_space):
self.monitoring_id = monitoring_id
self.agent_id = agent_id
self.current_cpu_usage = current_cpu_usage
self.current_memory_usage = current_memory_usage
self.current_network_usage = current_network_usage
self.current_gpu_usage = current_gpu_usage
self.current_free_disk_space = current_free_disk_space
class JobStatus (Enum):
PROCESSING = 0
ACCEPTING_JOBS = 1
NOT_ACCEPTING_JOBS = 2
class Agent:
def __init__(self , agent_id , agent_password , agent_key , job_status , system_id , policy):
self.agent_id = agent_id
self.agent_password = agent_password
self.agent_key = agent_key
self.system_id = system_id
self.job_status = job_status
self.policy = policy
'''
{
'free_cpu_threshold': 50 , # in percent
'free_memory_threshold': 10 , # in GB
'free_disk_space': 100 # in GB
}
'''
def export_conda_env(env_file_name='blah.yml'):
subprocess.run ([ "cd" , "/Users/rijupahwa/Library/Mobile Documents/com~apple~CloudDocs/cloudOS/code/trials" ])
subprocess.run ([ "python" , "--version" ])
env_file = open (env_file_name , "w")
# text = subprocess.run ([ "conda" , "list" ])
text = subprocess.run ([ "conda" , "env" , "export" ] , stdout=env_file)
print (text)
def import_conda_env(env_file_name='blah.yml'):
subprocess.run ([ "cd" , "/Users/rijupahwa/Library/Mobile Documents/com~apple~CloudDocs/cloudOS/code/trials" ])
env_file = open (env_file_name , "w")
# text = subprocess.run ([ "conda" , "list" ])
# conda env create -f environment.yml
text = subprocess.run ([ "conda" , "env" , "create" , "-n" , "test_env1" , "--prefix" , "./envs" ] ,
stdin=env_file)
def register_system_with_supercompute():
sys_info = {
'os': platform.system () ,
'os_version': platform.mac_ver ()[ 0 ] ,
'cpu_count': psutil.cpu_count () ,
'cpu_architecture': platform.architecture () ,
'processor_type': platform.processor () ,
'total_memory': psutil.virtual_memory ()[ 0 ] ,
'total_disk_space': psutil.disk_usage ('/')[ 0 ] ,
'free_disk_space': psutil.disk_usage ('/')[ 1 ]
}
resp = requests.post ('http://127.0.0.1:5000/system/register/' , json=sys_info)
print (resp.json ())
sys_info = resp.json ()
with open ('sys_config.json' , 'w') as f:
json.dump (sys_info , f)
return sys_info[ 'system_id' ]
def register_agent_with_supercompute(system_id):
agent_info = {
'system_id': system_id ,
'agent_password': 'Default' ,
'agent_key': 'from the file' ,
'policy': {
'free_cpu_threshold': 50 , # in percent
'free_memory_threshold': 10 , # in GB
'free_disk_space': 100 # in GB
}
}
resp = requests.post ('http://127.0.0.1:5000/agent/register/' , json=agent_info)
print (resp.json ())
agent_info = resp.json ()
with open ('agent_config.json' , 'w') as f:
json.dump (agent_info , f)
return agent_info[ 'agent_id' ]
def setup_jupyter_notebook(self):
# conda_installed =
# dm = tfds.download.download_manager.DownloadManager(download_dir=conda_download_dir)\
# .download(anaconda_installer[platform.system()])
# subprocess.run(['cd',conda_download_dir])
subprocess.run ([ 'pwd' ])
filename = anaconda_installer[ platform.system () ].rsplit ('/')[ -1 ]
subprocess.run ([ 'bash' , conda_download_dir + filename ])
def collect_monitoring_data(agent_id):
monitoring_info = {}
monitoring_info = {'agent_id': agent_id ,
'current_cpu_usage': psutil.cpu_percent (interval=1) ,
'available_memory': psutil.virtual_memory ()[ 1 ] ,
'current_network_usage': None ,
'current_gpu_usage': None ,
'free_disk_space': psutil.disk_usage ('/')[ 1 ] ,
'notebook_service': '' ,
'python_functions_service': '' ,
'notebook_service_port': '' ,
'python_functions_service_port': ''
}
resp = requests.post ('http://127.0.0.1:5000/agent/monitor/' , json=monitoring_info)
with open ('agent_monitoring_data.json' , 'a') as f:
json.dump (monitoring_info , f)
notebook_service_port = monitoring_info['notebook_service_port']
py_function_service_port = monitoring_info['python_service_port']
return monitoring_info
def start_notebook(conda_path , conda_port):
print ('Starting notebook server..')
# subprocess.run ([ '/Users/rijupahwa/opt/anaconda3/bin/conda','activate','trials' ], shell=True)
subprocess.run ([ conda_path , 'notebook' , '--port' , conda_port , '--NotebookApp.token=''' ])
def start_agent_services(self):
try:
while True:
print ('running..')
except KeyboardInterrupt:
print ('terminating.')
finally:
print ('finally')
def shutdown_agent_services(self):
pass
if __name__ == '__main__':
register_system_with_supercompute ()
if not os.path.isfile ("sys_config.json"):
print ('Registering system...')
sys_id = register_system_with_supercompute ()
register_agent_with_supercompute (sys_id)
elif not os.path.isfile ("agent_config.json"):
with open ('sys_config.json') as f:
sys_config = json.load (f)
print ('Registering agent for system {}...'.format (sys_config[ 'system_id' ]))
register_agent_with_supercompute (sys_config[ 'system_id' ])
print ('Starting monitoring services..')
#agent_config = None
with open ('agent_config.json') as f:
agent_config = json.load (f)
monitor_pid = multiprocessing.Process (target=collect_monitoring_data , args=agent_config[ 'agent_id' ])
time.sleep(5)
print(notebook_service_port)
ssh_cloud_pid = multiprocessing.Process (target=listen_and_accept_requests ,
args=(cloud_server_hostname ,
notebook_service_port ,
"127.0.0.1" ,
notebook_service_port ,
"/Users/rijupahwa/.ssh/super-compute-dev.pem"))
ssh_cloud_pid.start ()
ssh_cloud_status = True
time.sleep (5)
notebook_pid = multiprocessing.Process (target=start_notebook ,
args=('/Users/rijupahwa/opt/anaconda3/bin/jupyter' , notebook_service_port))
notebook_pid.start ()
notebook_service = True
while True:
collect_monitoring_data (agent_config[ 'agent_id' ])
print (collect_monitoring_data (agent_config[ 'agent_id' ]))
if not ssh_cloud_status:
ssh_cloud_pid = multiprocessing.Process (target=listen_and_accept_requests ,
args=(cloud_server_hostname ,
notebook_service_port ,
"127.0.0.1" ,
notebook_service_port ,
"/Users/rijupahwa/.ssh/super-compute-dev.pem"))
ssh_cloud_pid.start ()
ssh_cloud_status = True
if not notebook_service:
notebook_pid = multiprocessing.Process (target=start_notebook ,
args=('/Users/rijupahwa/opt/anaconda3/bin/jupyter' , notebook_service_port))
notebook_pid.start ()
notebook_service = True
time.sleep (5)
|
import socket
addr=("0.0.0.0",19562)
ss = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
ss.bind(addr)
while 1:
data,addr= ss.recvfrom(1024)
ss.sendto(data, addr)
|
# -*- coding:utf-8 -*-
# 匿名函数
from math import log #引入Python数学库的对数函数
# 此函数用于返回一个以base为底的匿名对数函数
def make_logarithmic_function(base):
return lambda x:log(x,base)
# 创建了一个以3为底的匿名对数函数,并赋值给了My_LF
My_LF = make_logarithmic_function(3)
# 使用My_LF调用匿名函数,参数只需要真数即可,底数已设置为3。而使用log()函数需要
# 同时指定真数和对数。如果我们每次都是求以3为底数的对数,使用My_LF更方便。
print My_LF(9)
# result: 2.0
|
try:
from charm.core.math.elliptic_curve import elliptic_curve,ZR,G,init,random,order,getGenerator,bitsize,serialize,deserialize,hashEC,encode,decode,getXY
#from charm.core.math.elliptic_curve import InitBenchmark,StartBenchmark,EndBenchmark,GetBenchmark,GetGeneralBenchmarks,ClearBenchmark
except Exception as err:
print(err)
exit(-1)
class ECGroup():
def __init__(self, builtin_cv):
self.group = elliptic_curve(nid=builtin_cv)
self.param = builtin_cv
self._verbose = True
def order(self):
return order(self.group)
def bitsize(self):
return bitsize(self.group)
def paramgen(self, secparam):
return None
def groupSetting(self):
return 'elliptic_curve'
def groupType(self):
return self.param
def init(self, _type=ZR):
return init(self.group, _type)
def random(self, _type=ZR):
if _type == ZR or _type == G:
return random(self.group, _type)
return None
def encode(self, message):
return encode(self.group, message)
def decode(self, msg_bytes):
return decode(self.group, msg_bytes)
def serialize(self, object):
return serialize(object)
def deserialize(self, bytes_object):
return deserialize(self.group, bytes_object)
# needs work to iterate over tuple
def hash(self, args, _type=ZR):
if isinstance(args, tuple):
s = bytes()
for i in args:
if type(i) == elliptic_curve:
s += serialize(i)
elif type(i) == str:
s += str(i)
# consider other types
#print("s => %s" % s)
return hashEC(self.group, str(s), _type)
elif type(args) == elliptic_curve:
msg = str(serialize(args))
return hashEC(self.group, msg, _type)
elif type(args) == str:
return hashEC(self.group, args, _type)
return None
def zr(self, point):
if type(point) == elliptic_curve:
return getXY(self.group, point, False)
return None
def coordinates(self, point):
if type(point) == elliptic_curve:
return getXY(self.group, point, True)
def debug(self, data, prefix=None):
if type(data) == dict and self._verbose:
for k,v in data.items():
print(k,v)
elif type(data) == list and self._verbose:
for i in range(0, len(data)):
print(prefix, (i+1),':',data[i])
print('')
elif type(data) == str and self._verbose:
print(data)
return None
|
import random
a = str(input('First student:'))
b = str(input('Second student'))
c = str(input('Third student'))
d = str(input('Forh student'))
list =[a, b, c, d]
random.shuffle(list)
print(list)
|
"""FTDs to VPNs Class."""
from fmcapi.api_objects.apiclasstemplate import APIClassTemplate
import logging
class FTDS2SVPNs(APIClassTemplate):
"""The FTDS2SVPNs Object in the FMC."""
VALID_JSON_DATA = [
"id",
"name",
"type",
"ipsecSettings",
"endpoints",
"ikeSettings",
"advancedSettings",
"description",
"ikeV2Enabled",
"ikeV1Enabled",
"topologyType",
"version",
]
VALID_FOR_KWARGS = VALID_JSON_DATA + []
FIRST_SUPPORTED_FMC_VERSION = "6.3"
URL_SUFFIX = "/policy/ftds2svpns"
VALID_CHARACTERS_FOR_NAME = """[.\w\d_\- ]"""
def __init__(self, fmc, **kwargs):
"""
Initialize FTDS2SVPNs object.
Set self.type to "FTDS2SVPpn", parse the kwargs, and set up the self.URL.
:param fmc (object): FMC object
:param **kwargs: Any other values passed during instantiation.
:return: None
"""
super().__init__(fmc, **kwargs)
logging.debug("In __init__() for FTDNatPolicies class.")
self.parse_kwargs(**kwargs)
self.type = "FTDS2SVpn"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# copia.py
#
# Copyright 2015 Cristian <cristian@cristian>
#
"""
Escreva um programa que cria uma cópia de um arquivo via CLI. Exemplo:
python3 copia.py <arquivo original> <arquivo copia>
"""
import sys
def copia(nomeArquivo, nomeNovoArquivo):
PARAMETROS = 3
copiaArquivo = ""
if len(sys.argv) == PARAMETROS:
arquivo = open(nomeArquivo, 'r')
for texto in arquivo.readlines():
copiaArquivo += texto
#
novoArquivo = open(nomeNovoArquivo, 'w')
novoArquivo.write(copiaArquivo)
novoArquivo.close()
arquivo.close()
else:
print("\nA função precisa receber dois parâmetros!\n")
#
#
def main():
copia(sys.argv[1], sys.argv[2])
return 0
if __name__ == '__main__':
main()
|
import re
comment = re.compile(r'/\*(.*?)\*/')
text1 = '/* abcsdfasdfasdf */'
text2 = '''/* abcsdfasdfasdf
ddddz;z;z;z;z;z;z */'''
print comment.findall(text1)
print '--------------'
print comment.findall(text2)
comment = re.compile(r'/\*((?:.|\n)*?)\*/')
print '--------------'
print comment.findall(text1)
print '--------------'
print comment.findall(text2)
comment = re.compile(r'/\*(.*?)\*/', re.DOTALL)
print '--------------'
print comment.findall(text1)
print '--------------'
print comment.findall(text2)
|
#!/usr/bin/python3
'''
eval is to parse a expression from a str to a command and run it
then return a result of it
'''
list_str="[5,6,7,8,9]"
list_str=eval(list_str)
print(list_str)
print(list_str[4])
x=input("code:")
check_this_out=eval(input("code:"))
print(check_this_out)
|
# http://fun.coolshell.cn/unix.html
D = dict(zip('pvwdgazxubqfsnrhocitlkeymj',
'abcdefghijklmnoqprstuvwxyz'))
string = 'Wxgcg txgcg ui p ixgff, txgcg ui p epm. I gyhgwt mrl lig txg ixgff wrsspnd tr irfkg txui hcrvfgs, nre, hfgpig tcm liunz txg crt13 ra "ixgff" tr gntgc ngyt fgkgf.'
answer = ''
for c in string:
if c in D:
answer += D[c]
else:
answer += c
print(answer)
# Where there is a shell, there is a way. I exqect you use the shell command to solve this qroblem, now, qlease try using the rot13 of "shell" to enter next level.
rot13 = dict(zip('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', 'NOPQRSTUVWXYZABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm'))
print([rot13[c] for c in 'shell']) # furyy
|
def Key_Generator(message, key):
key = list(key)
if len(message) == len(key):
return (key)
else:
for i in range(len(message) -
len(key)):
key.append(key[i % len(key)])
return ("".join(key))
def encrypt(message, key):
Encrypted_Message = []
for i in range(len(message)):
m = (ord(message[i]) +
ord(key[i])) % 26
m += ord('A')
Encrypted_Message.append(chr(m))
return ("".join(Encrypted_Message))
def decrypt(cipher_text, key):
Decrypted_Message = []
for i in range(len(cipher_text)):
m = (ord(cipher_text[i]) -
ord(key[i]) + 26) % 26
m += ord('A')
Decrypted_Message.append(chr(m))
return ("".join(Decrypted_Message))
print('<---Please select one of the options given below--->\nNOTE: Do not forget to use capital letters only :)')
Value = int(input('1 : Encryption\n2 : Decryption\n-->'))
if(Value == 1):
Message = input("Please Enter Your MESSAGE (Plain Text) : ")
key = input('Please Enter the desired SHIFT KEY : ')
key = Key_Generator(Message, key)
print("Encrypted Message : ", encrypt(Message, key))
elif(Value == 2):
Message = input("Please Enter Your MESSAGE (Cipher Text) : ")
key = input('Please Enter the desired SHIFT KEY : ')
key = Key_Generator(Message, key)
print("Decrypted Message : ", decrypt(Message, key))
else:
print('Please Select the Valid Option')
|
#-*- coding: utf-8 -*-
from django.core import serializers
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponse, HttpResponseRedirect, Http404, HttpResponseServerError
from django.shortcuts import render
from django.template import loader, RequestContext
from carton.cart import Cart
from .models import Produto
from protetores_bucais.apps.localizacao.models import Cidade, Endereco, Uf
from protetores_bucais.apps.loja.models import Cliente, Pedido, ItemPedido
import json
def abrir_loja(request):
produtos = Produto.objects.all()
template = loader.get_template('loja/base.html')
context = RequestContext(request, {'produtos': produtos})
return HttpResponse(template.render(context))
def buscar_produtos(request, pagina=1):
filtro = request.POST.get('filtro')
valor = request.POST.get('valor')
produtos = []
if filtro and valor:
if filtro == 'nome':
produtos = Produto.objects.filter(nome__icontains=valor)
elif filtro == 'preco-menor-igual':
produtos = Produto.objects.filter(preco__lte=valor)
elif filtro == 'preco-igual':
produtos = Produto.objects.filter(preco__exact=valor)
elif filtro == 'preco-maior-igual':
produtos = Produto.objects.filter(preco__gte=valor)
else:
produtos = Produto.objects.filter(descricao__icontains=valor)
else:
produtos = Produto.objects.all()
# Mostra 10 produtos por página.
paginator = Paginator(produtos, 10)
page = pagina
try:
produtos = paginator.page(page)
except PageNotAnInteger:
produtos = paginator.page(1)
except EmptyPage:
produtos = paginator.page(paginator.num_pages)
template = loader.get_template('loja/produtos.html')
context = RequestContext(request, {'produtos': produtos})
return HttpResponse(template.render(context))
def ver_produto(request, id):
produto = Produto.objects.get(pk=id)
template = loader.get_template('loja/produto.html')
context = RequestContext(request, {'produto': produto})
return HttpResponse(template.render(context))
def carrinho_adicionar(request):
if request.method == 'POST':
carrinho = Cart(request.session)
produto = Produto.objects.get(pk=request.POST.get('id'))
carrinho.add(produto, price=produto.preco, quantity=request.POST.get('quantidade'))
data = json.dumps({'itens': str(len(carrinho.items)), 'total': str(carrinho.total)})
return HttpResponse(data, content_type='application/json')
else:
return HttpResponse('404')
def carrinho_remover(request):
if request.method == 'POST':
carrinho = Cart(request.session)
produto = Produto.objects.get(pk=request.POST.get('id'))
carrinho.remove(produto)
template = loader.get_template('loja/carrinho.html')
context = RequestContext(request)
return HttpResponse(template.render(context))
else:
return HttpResponse('404')
def incrementar_quantidade(request):
produto_id = request.POST['produto']
quantidade = request.POST['quantidade']
carrinho = Cart(request.session)
produto = Produto.objects.get(pk=produto_id)
carrinho.set_quantity(produto, quantity=quantidade)
template = loader.get_template('loja/carrinho.html')
context = RequestContext(request)
return HttpResponse(template.render(context))
def decrementar_quantidade(request):
produto_id = request.POST['produto']
quantidade = request.POST['quantidade']
carrinho = Cart(request.session)
produto = Produto.objects.get(pk=produto_id)
carrinho.set_quantity(produto, quantity=quantidade)
template = loader.get_template('loja/carrinho.html')
context = RequestContext(request)
return HttpResponse(template.render(context))
def carrinho_ver(request, pagina=1):
carrinho = Cart(request.session)
template = loader.get_template('loja/carrinho.html')
context = RequestContext(request)
return HttpResponse(template.render(context))
def esvaziar_carrinho(request):
carrinho = Cart(request.session)
carrinho.clear()
return carrinho_ver(request)
def finalizar_pedido(request):
template = loader.get_template('loja/confirmar-pedido.html')
context = RequestContext(
request,
{
'ufs': Uf.objects.all(),
'cidades': Cidade.objects.all(),
}
)
return HttpResponse(template.render(context))
def confirmar_pedido(request):
if request.method == 'POST':
# UF
uf = Uf.objects.get(sigla=request.POST.get('uf'))
# Cidade
cidade = uf.cidade_set.get(nome=request.POST.get('cidade'))
# Endereço
endereco = Endereco()
endereco.logradouro = request.POST.get('logradouro')
endereco.complemento = request.POST.get('complemento')
endereco.bairro = request.POST.get('bairro')
endereco.cep = request.POST.get('cep')
endereco.cidade = cidade;
endereco.save()
# Cliente
cliente = Cliente()
cliente.endereco = endereco
cliente.nome = request.POST.get('nome')
cliente.telefone = request.POST.get('telefone')
cliente.email = request.POST.get('email')
cliente.save()
# Pedido
pedido = Pedido()
pedido.cliente = cliente
# Carrinho de compras
carrinho = Cart(request.session)
pedido.valor_total = carrinho.total
pedido.save()
# Item pedido
for item in carrinho.items:
print item
item_pedido = ItemPedido()
item_pedido.pedido = pedido
item_pedido.produto = item.product
item_pedido.quantidade = item.quantity
item_pedido.preco_unitario = item.price
item_pedido.preco_total = item.price * item.quantity
item_pedido.save()
pedido.save()
carrinho.clear()
template = loader.get_template('loja/efetuar-pagamento.html')
context = RequestContext(request, {'cliente': cliente})
return HttpResponse(template.render(context))
else:
return HttpResponseServerError()
|
num = int(input("请输入阶乘num:"))
factorial = 1
if num < 0:
print("负数没有阶乘!")
elif num == 0:
print("0的阶乘为1")
else:
for i in range(1,num+1):
factorial = factorial * i
print("%d的阶乘为%d"%(num,factorial))
|
from django.db.models import Q
from django.conf import settings
from django.contrib.auth import get_user_model, authenticate
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from rest_framework.views import APIView
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.models import Token
from rest_framework.generics import (
CreateAPIView,
)
from rest_framework.permissions import (
AllowAny,
IsAuthenticated,
IsAdminUser,
IsAuthenticatedOrReadOnly,
)
User = get_user_model()
from .serializers import (
UserCreateSerializer,
UserLoginSerializer,
)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
class UserCreateAPIView(CreateAPIView):
serializer_class = UserCreateSerializer
queryset = User.objects.all()
permission_classes = [AllowAny]
class UserLoginAPIView(APIView):
permission_classes = [AllowAny]
serializer_class = UserLoginSerializer
def post(self, request, *args, **kwargs):
data = request.data
serializer = UserLoginSerializer(data=data)
import pdb;pdb.set_trace()
if serializer.is_valid(raise_exception=True):
new_data = serializer.data
token = Token.objects.get_or_create(user=serializer)
new_data['token'] = token.key
return Response(new_data, status=HTTP_200_OK)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
|
szamla=[]
print(szamla)
print(total)
|
# coding=utf-8
def triangles(max):
c, n = [1], 1
while n <= max:
print(c)
c = [1]+[c[i-1]+c[i] for i in range(1, len(c))]+[1]
n += 1
def main():
while True:
cmd = raw_input("输入杨辉三角数字?")
if cmd == 'q':
return
else:
num = int(cmd)
triangles(num)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from pyunitreport import HTMLTestRunner
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
import mysql.connector
import os
import datetime
class UntitledTestCase(unittest.TestCase):
#steps = [["open","https://www.facebook.com/",""],["click","id=email",""],["type","id=email","salmanplaystation2@gmail.com"],["click","id=pass",""],["type","id=pass","12345678"],["click","id=u_0_2",""]]
case_name = "facebooklogin.xml"
def setUp(self):
cwd = os.getcwd()
self.driver = webdriver.Chrome(cwd+"\\chrome\\chromedriver.exe")
'''self.driver.implicitly_wait(30)
self.base_url = "https://www.google.com/"
self.verificationErrors = []
self.accept_next_alert = True'''
'''chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--window-size=1920x1080")
chrome_driver = os.getcwd() + "\\chrome\\chromedriver.exe"
self.driver = webdriver.Chrome(options=chrome_options, executable_path=chrome_driver)'''
self.driver.implicitly_wait(30)
self.base_url = "https://www.google.com/"
self.verificationErrors = []
self.accept_next_alert = True
def test_untitled_test_case(self):
driver = self.driver
steps = self.getArray(self.case_name)
try:
self.execute_steps(steps)
except Exception as e:
print("type error: " + str(e))
driver.get_screenshot_as_file("test.png")
def is_element_present(self, how, what):
try:
self.driver.find_element(by=how, value=what)
except NoSuchElementException as e:
return False
return True
def is_alert_present(self):
try:
self.driver.switch_to_alert()
except NoAlertPresentException as e:
return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally:
self.accept_next_alert = True
def execute_steps(self,listSteps):
driver = self.driver
driver.maximize_window()
size=len(listSteps)
if size > 0:
now = datetime.datetime.now()
b=now.strftime("%Y-%m-%d_%H-%M-%S")
f= open("log_"+b+".txt","a+")
f.write("\n-----------------------------------------------------------------------------------------------------------------------\n")
f.write("######################################################################################################################\n")
f.write("-----------------------------------------------------------------------------------------------------------------------\n")
f.write("Current Time \n")
f.write(now.strftime("%Y-%m-%d %H:%M:%S\n"))
f.write("-----------------------------------------------------------------------------------------------------------------------\n")
print("executing testcase: ", self.case_name,"\n")
a="executing testcase: "+self.case_name+"\n"
f.write(a)
for step in listSteps:
# print(step)
target = step[1]
if "open" in step[0]:
print("opening ", step[1])
a = "opening "+step[1]+"\n"
f.write(a)
driver.get(step[1])
elif "click" in step[0]:
if "/" in target[1]:
print("click element by xpath ", target)
a="click element by xpath "+target+"\n"
f.write(a)
driver.find_element_by_xpath(target).click()
elif "link=" in target:
linktag = target.split("=")
print("click link ", linktag[1])
a="click link "+linktag[1]+"\n"
f.write(a)
driver.find_element_by_link_text(linktag[1]).click()
elif "id=" in step[1]:
idtag = target.split("=")
print("click on element with id ", idtag[1])
a="click on element with id "+idtag[1]+"\n"
f.write(a)
driver.find_element_by_id(idtag[1]).click()
elif "name=" in target:
nametag = target.split("=")
print("click on element with name ", nametag[1])
a="click on element with name "+nametag[1]+"\n"
f.write(a)
driver.find_element_by_name(nametag[1]).click()
elif "type" in step[0]:
if "/" in target[1]:
print("clearing field with xpath ",target)
a="clearing field with xpath "+target+"\n"
f.write(a)
driver.find_element_by_xpath(target).clear()
print("typing ", step[2], " field with xpath ", target)
a="typing "+step[2]+" field with xpath "+target+"\n"
f.write(a)
driver.find_element_by_xpath(target).send_keys(step[2])
elif "id=" in step[1]:
idtag = target.split("=")
print("clearing field with element id ", idtag[1])
x=str(idtag[1])
b="clearing field with element id "+x+"\n"
f.write(b)
driver.find_element_by_id(idtag[1]).clear()
print("inserting ", step[2], "field with element id ", idtag[1])
a="inserting "+step[2]+"field with element id "+idtag[1]+"\n"
f.write(a)
driver.find_element_by_id(idtag[1]).send_keys(step[2])
elif "name=" in target:
nametag = target.split("=")
print("clearing field with element name ", nametag[1])
a="clearing field with element name "+nametag[1]+"\n"
f.write(a)
driver.find_element_by_name(nametag[1]).clear()
print("inserting", step[2], "field with element name ", nametag[1])
a="inserting"+step[2]+"field with element name "+nametag[1]+"\n"
f.write(a)
driver.find_element_by_name(nametag[1]).send_keys(step[2])
elif "assertText" in step[0]:
if "/" in target[1]:
print("assertEqualText ", step[2], "with text in element with xpath ", target)
a="assertEqualText "+step[2], "with text in element with xpath "+target+"\n"
f.write(a)
self.assertEqual(step[2], driver.find_element_by_xpath(
target).text)
elif "link=" in target:
linktag = target.split("=")
print("assertEqualText ", step[2], "with text in link ", linktag[1])
a="assertEqualText "+step[2]+"with text in link "+linktag[1]+"\n"
f.write(a)
self.assertEqual(step[2], driver.find_element_by_link_text(linktag[1]).text)
elif "id=" in step[1]:
idtag = target.split("=")
print("assertEqualText ", step[2], "with text in element with id ", idtag[1])
a="assertEqualText "+step[2], "with text in element with id "+idtag[1]+"\n"
f.write(a)
self.assertEqual(step[2], driver.find_element_by_id(idtag[1]).text)
elif "name=" in target:
nametag = target.split("=")
print("assertEqualText ",step[2],"with text in element with name ", nametag[1])
a="assertEqualText "+step[2], "with text in element with name "+nametag[1]+"\n"
f.write(a)
self.assertEqual(step[2], driver.find_element_by_name(nametag[1]).text)
elif "assertElementPresent" in step[0]:
if "/" in target[1]:
print("assert element is present with xpath ", target)
a="assert element is present with xpath "+target+"\n"
f.write(a)
self.assertTrue(self.is_element_present(By.XPATH, target))
elif "link=" in target:
linktag = target.split("=")
print("assert element is present with link ", linktag[1])
a="assert element is present with link "+linktag[1]+"\n"
f.write(a)
self.assertTrue(self.is_element_present(By.LINK_TEXT, linktag[1]))
elif "id=" in step[1]:
idtag = target.split("=")
print("assert element is present with id ", idtag[1])
a="assert element is present with id "+idtag[1]+"\n"
f.write(a)
self.assertTrue(self.is_element_present(By.ID, idtag[1]))
elif "name=" in target:
nametag = target.split("=")
print("assert element is present with name ", nametag[1])
a="assert element is present with name "+nametag[1]+"\n"
f.write(a)
self.assertTrue(self.is_element_present(By.NAME, nametag[1]))
elif "waitForVisible" in step[0]:
if "/" in target[1]:
print("waiting for element with xpath ", target, "to be visible")
a="waiting for element with xpath "+target+"to be visible"+"\n"
f.write(a)
for i in range(60):
try:
if driver.find_element_by_xpath(target).is_displayed():
break
except:
pass
time.sleep(1)
else:
print("element is not visible within time frame")
a="element is not visible within time frame"+"\n"
f.write(a)
self.fail("time out")
elif "link=" in target:
linktag = target.split("=")
print("waiting for element with link ", linktag[1], "to be visible")
a="waiting for element with link "+linktag[1], "to be visible"+"\n"
f.write(a)
for i in range(60):
try:
if driver.find_element_by_link_text(linktag[1]).is_displayed():
break
except:
pass
time.sleep(1)
else:
print("element is not visible within time frame")
a="element is not visible within time frame"+"\n"
f.write(a)
self.fail("time out")
elif "id=" in step[1]:
idtag = target.split("=")
print("waiting for element with id ", idtag[1], "to be visible")
a="waiting for element with id "+idtag[1], "to be visible"+"\n"
f.write(a)
for i in range(60):
try:
if driver.find_element_by_id(idtag[1]).is_displayed():
break
except:
pass
time.sleep(1)
else:
print("element is not visible within time frame")
a="element is not visible within time frame"+"\n"
f.write(a)
self.fail("time out")
elif "name=" in target:
nametag = target.split("=")
print("waiting for element with name ", nametag[1], "to be visible")
a="waiting for element with name "+nametag[1], "to be visible"+"\n"
f.write(a)
for i in range(60):
try:
if driver.find_element_by_name(nametag[1]).is_displayed():
break
except:
pass
time.sleep(1)
else:
print("element is not visible within time frame")
a="element is not visible within time frame"+"\n"
f.write(a)
self.fail("time out")
elif "assertTextPresent" in step[0]:
self.assertRegexpMatches(driver.find_element_by_css_selector("BODY").text,
r"^[\s\S]*"+target+"[\s\S]*$")
print("taking screenshot")
a="taking screenshot"
f.write(a)
driver.get_screenshot_as_file("test.png")
def getArray(self,testcase):
conn = mysql.connector.connect(user="root", password="", host="localhost", database="testcasedb")
cursor = conn.cursor()
cursor.execute("SELECT * FROM testcases WHERE testCaseName='" + testcase + "' ORDER BY ID")
array = []
for row in cursor.fetchall():
array.append([row[2], row[3], row[4]])
conn.close()
if array == []:
print("Test case not found ")
else:
return array
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main(testRunner=HTMLTestRunner(output="example_dir"))
|
import logic
import random
from AbstractPlayers import *
from math import log
import copy
import signal
import time
# commands to use for move players. dictionary : Move(enum) -> function(board),
# all the functions {up,down,left,right) receive board as parameter and return tuple of (new_board, done, score).
# new_board is according to the step taken, done is true if the step is legal, score is the sum of all numbers that
# combined in this step.
# (you can see GreedyMovePlayer implementation for example)
from constants import *
inf = 1 << 32
PROBABILITY_2 = 0.9
PROBABILITY_4 = 0.1
commands = {Move.UP: logic.up, Move.DOWN: logic.down,
Move.LEFT: logic.left, Move.RIGHT: logic.right}
def signal_handler(signum, frame):
raise Exception("Timeout")
# generate value between {2,4} with probability p for 4
def gen_value(p=PROBABILITY):
return logic.gen_two_or_four(p)
class GreedyMovePlayer(AbstractMovePlayer):
"""Greedy move player provided to you (no need to change),
the player receives time limit for a single step and the board as parameter and return the next move that gives
the best score by looking one step ahead.
"""
def get_move(self, board, time_limit) -> Move:
optional_moves_score = {}
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = score
return max(optional_moves_score, key=optional_moves_score.get)
def calc_heuristic(board):
empty = 3 * _empty_cells(board)
max_val = 3 * _max_value(board)
mono_val = 1 * _monotonic_board_2(board)
same_tiles_val = 2 * _same_tiles(board)
return empty + mono_val + max_val + same_tiles_val
def _empty_cells(board) -> float:
empty_cells = 0
for i in range(GRID_LEN):
for j in range(GRID_LEN):
if board[i][j] == 0:
empty_cells += 1
return empty_cells
def _max_value(board) -> float:
max_val = 0
for i in range(GRID_LEN):
for j in range(GRID_LEN):
if board[i][j] > max_val:
max_val = board[i][j]
return log(max_val)
def _monotonic_board_2(board):
left_to_right = 0
right_to_left = 0
up_to_down = 0
down_to_up = 0
for x in range(GRID_LEN):
for y in range(GRID_LEN - 1):
# horizontal
val_curr = board[x][y]
val_next = board[x][y + 1]
if val_curr != 0 or val_next != 0:
if val_curr < val_next:
left_to_right += val_next - val_curr
if val_curr > val_next:
right_to_left += val_curr - val_next
# vertical
val_curr = board[y][x]
val_next = board[y + 1][x]
if val_curr != 0 or val_next != 0:
if val_curr < val_next:
up_to_down += val_next - val_curr
if val_curr > val_next:
down_to_up += val_curr - val_next
res = min(left_to_right, right_to_left) + min(up_to_down, down_to_up)
if res == 0:
return 0
return -log(res)
def _same_tiles(board):
counter = 0
for row in range(GRID_LEN):
for col in range(GRID_LEN - 1):
if board[row][col] == board[row][col + 1] and board[row][col] != 0:
counter += 1
col += 1
for col in range(GRID_LEN):
for row in range(GRID_LEN - 1):
if board[row][col] == board[row + 1][col] and board[row][col] != 0:
counter += 1
row += 1
if counter == 0:
return 0
return counter
class RandomIndexPlayer(AbstractIndexPlayer):
"""Random index player provided to you (no need to change),
the player receives time limit for a single step and the board as parameter and return the next indices to
put 2 randomly.
"""
def get_indices(self, board, value, time_limit) -> (int, int):
a = random.randint(0, len(board) - 1)
b = random.randint(0, len(board) - 1)
while board[a][b] != 0:
a = random.randint(0, len(board) - 1)
b = random.randint(0, len(board) - 1)
return a, b
# part A
class ImprovedGreedyMovePlayer(AbstractMovePlayer):
"""Improved greedy Move Player,
implement get_move function with greedy move that looks only one step ahead with heuristic.
(you can add helper functions as you want).
"""
def __init__(self):
AbstractMovePlayer.__init__(self)
def get_move(self, board, time_limit) -> Move:
optional_moves_score = {}
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = calc_heuristic(new_board)
return max(optional_moves_score, key=optional_moves_score.get)
# part B
class MiniMaxMovePlayer(AbstractMovePlayer):
"""MiniMax Move Player,
implement get_move function according to MiniMax algorithm
(you can add helper functions as you want).
"""
def __init__(self):
AbstractMovePlayer.__init__(self)
def get_move(self, board, time_limit) -> Move:
move = Move.LEFT
iter = 0
signal.signal(signal.SIGALRM, signal_handler)
signal.setitimer(signal.ITIMER_REAL, 0.8 * time_limit)
try:
while True:
move = MiniMaxMovePlayer.min_max_move(board, iter)[0]
iter += 1
except Exception as msg:
pass
return move
@staticmethod
def min_max_move(board, iteration) -> (Move, float):
optional_moves_score = {}
if iteration == 0:
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = calc_heuristic(new_board)
if not optional_moves_score:
return Move.LEFT, 0 # need to put something in the second value
else:
res_move = max(optional_moves_score, key=optional_moves_score.get)
return res_move, optional_moves_score[res_move]
else:
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = MiniMaxIndexPlayer.min_max_index(new_board, iteration - 1)[1]
if not optional_moves_score:
return Move.LEFT, 0
else:
res_move = max(optional_moves_score, key=optional_moves_score.get)
return res_move, optional_moves_score[res_move]
class MiniMaxIndexPlayer(AbstractIndexPlayer):
"""MiniMax Index Player,
this player is the opponent of the move player and need to return the indices on the board where to put 2.
the goal of the player is to reduce move player score.
implement get_indices function according to MiniMax algorithm, the value in minimax player value is only 2.
(you can add helper functions as you want).
"""
def __init__(self):
AbstractIndexPlayer.__init__(self)
def get_indices(self, board, value, time_limit) -> (int, int):
row = 0
col = 0
iteration = 0
signal.signal(signal.SIGALRM, signal_handler)
signal.setitimer(signal.ITIMER_REAL, 0.80 * time_limit)
try:
while True:
row, col = MiniMaxIndexPlayer.min_max_index(board, iteration)[0]
iteration += 1
except Exception as msg:
pass
return row, col
@staticmethod
def min_max_index(board, iteration) -> ((int, int), float):
optional_index_score = {}
if iteration == 0:
for row in range(GRID_LEN):
for col in range(GRID_LEN):
if board[row][col] == 0:
new_board = copy.deepcopy(board)
new_board[row][col] = 2
optional_index_score[(row, col)] = calc_heuristic(new_board)
res_index = min(optional_index_score, key=optional_index_score.get)
return res_index, optional_index_score[res_index]
else:
for row in range(GRID_LEN):
for col in range(GRID_LEN):
if board[row][col] == 0:
new_board = copy.deepcopy(board)
new_board[row][col] = 2
optional_index_score[(row, col)] = (MiniMaxMovePlayer.min_max_move(new_board, iteration - 1))[1]
return min(optional_index_score, key=optional_index_score.get), min(optional_index_score.values())
# part C
class ABMovePlayer(AbstractMovePlayer):
"""Alpha Beta Move Player,
implement get_move function according to Alpha Beta MiniMax algorithm
(you can add helper functions as you want)
"""
def __init__(self):
AbstractMovePlayer.__init__(self)
def get_move(self, board, time_limit) -> Move:
move = Move.LEFT
iter = 0
start = time.time()
signal.signal(signal.SIGALRM, signal_handler)
signal.setitimer(signal.ITIMER_REAL, 0.80 * time_limit)
try:
while True:
move = ABMovePlayer.min_max_move(board, iter)[0]
iter += 1
except Exception as msg:
pass
return move
@staticmethod
def min_max_move(board, iteration, alpha=-inf, beta=inf) -> (Move, float):
optional_moves_score = {}
if iteration == 0:
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = calc_heuristic(new_board)
if not optional_moves_score:
return Move.LEFT, 0 # need to put something in the second value
else:
res_move = max(optional_moves_score, key=optional_moves_score.get)
return res_move, optional_moves_score[res_move]
else:
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = ABMovePlayer.min_max_index(new_board, iteration - 1, alpha, beta)[1]
alpha = max(alpha, optional_moves_score[move])
if alpha >= beta:
break
if not optional_moves_score:
return Move.LEFT, 0
else:
res_move = max(optional_moves_score, key=optional_moves_score.get)
return res_move, optional_moves_score[res_move]
@staticmethod
def min_max_index(board, iteration, alpha=-inf, beta=inf) -> ((int, int), float):
optional_index_score = {}
if iteration == 0:
for row in range(GRID_LEN):
for col in range(GRID_LEN):
if board[row][col] == 0:
new_board = copy.deepcopy(board)
new_board[row][col] = 2
optional_index_score[(row, col)] = calc_heuristic(new_board)
res_index = min(optional_index_score, key=optional_index_score.get)
return res_index, optional_index_score[res_index]
else:
for row in range(GRID_LEN):
for col in range(GRID_LEN):
if board[row][col] == 0:
new_board = copy.deepcopy(board)
new_board[row][col] = 2
optional_index_score[(row, col)] = \
(ABMovePlayer.min_max_move(new_board, iteration - 1, alpha, beta))[1]
beta = min(beta, optional_index_score[(row, col)])
if beta <= alpha:
break
return min(optional_index_score, key=optional_index_score.get), min(optional_index_score.values())
# part D
class ExpectimaxMovePlayer(AbstractMovePlayer):
"""Expectimax Move Player,
implement get_move function according to Expectimax algorithm.
(you can add helper functions as you want)
"""
def __init__(self):
AbstractMovePlayer.__init__(self)
def get_move(self, board, time_limit) -> Move:
move = Move.LEFT
iter = 0
signal.signal(signal.SIGALRM, signal_handler)
signal.setitimer(signal.ITIMER_REAL, 0.80 * time_limit)
try:
while True:
move = ExpectimaxMovePlayer.min_max_move(board, iter)[0]
iter += 1
except Exception as msg:
pass
return move
@staticmethod
def min_max_move(board, iteration) -> (Move, float):
optional_moves_score = {}
if iteration == 0:
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = calc_heuristic(new_board)
if not optional_moves_score:
return Move.LEFT, 0 # need to put something in the second value
else:
res_move = max(optional_moves_score, key=optional_moves_score.get)
return res_move, optional_moves_score[res_move]
else:
for move in Move:
new_board, done, score = commands[move](board)
if done:
optional_moves_score[move] = ExpectimaxIndexPlayer.min_max_index(new_board, iteration - 1, 2)[
1] * PROBABILITY_2
optional_moves_score[move] += ExpectimaxIndexPlayer.min_max_index(new_board, iteration - 1, 4)[
1] * PROBABILITY_4
if not optional_moves_score:
return Move.LEFT, 0
else:
res_move = max(optional_moves_score, key=optional_moves_score.get)
return res_move, optional_moves_score[res_move]
class ExpectimaxIndexPlayer(AbstractIndexPlayer):
"""Expectimax Index Player
implement get_indices function according to Expectimax algorithm, the value is number between {2,4}.
(you can add helper functions as you want)
"""
def __init__(self):
AbstractIndexPlayer.__init__(self)
def get_indices(self, board, value, time_limit) -> (int, int):
row = 0
col = 0
iteration = 0
signal.signal(signal.SIGALRM, signal_handler)
signal.setitimer(signal.ITIMER_REAL, 0.80 * time_limit)
try:
while True:
row, col = self.min_max_index(board, iteration, value)[0]
iteration += 1
except Exception as msg:
pass
return row, col
@staticmethod
def min_max_index(board, iteration, value) -> ((int, int), float):
optional_index_score = {}
if iteration == 0:
for row in range(GRID_LEN):
for col in range(GRID_LEN):
if board[row][col] == 0:
new_board = copy.deepcopy(board)
new_board[row][col] = value
optional_index_score[(row, col)] = calc_heuristic(new_board)
res_index = min(optional_index_score, key=optional_index_score.get)
return res_index, optional_index_score[res_index]
else:
for row in range(GRID_LEN):
for col in range(GRID_LEN):
if board[row][col] == 0:
new_board = copy.deepcopy(board)
new_board[row][col] = value
optional_index_score[(row, col)] = \
(ExpectimaxMovePlayer.min_max_move(new_board, iteration - 1))[1]
return min(optional_index_score, key=optional_index_score.get), min(optional_index_score.values())
# Tournament
class ContestMovePlayer(AbstractMovePlayer):
"""Contest Move Player,
implement get_move function as you want to compete in the Tournament
(you can add helper functions as you want)
"""
def __init__(self):
AbstractMovePlayer.__init__(self)
def get_move(self, board, time_limit) -> Move:
move = Move.LEFT
iter = 0
signal.signal(signal.SIGALRM, signal_handler)
signal.setitimer(signal.ITIMER_REAL, 0.80 * time_limit)
try:
while True:
move = ABMovePlayer.min_max_move(board, iter)[0]
iter += 1
except Exception:
pass
return move
|
name = input("Qual o nome do funcionario: ")
salary= int(input("Digite valor do Salario: "))
if (salary <= 5000):
newSalary = salary*1,10
print ("O Funcionario",name," recebera o nova salario =", newSalary)
if (salary > 5000) and (salary <= 20000):
newSalary = (salary/100)*105
print ("O Funcionario",name," recebera o nova salario =", newSalary)
if (salary > 20000):
print ("O Funcionario",name," recebera o mesmo salario =", salary)
|
from django.apps import AppConfig
#class MilliardConfig(AppConfig):
# name = 'milliard'
import re
if re.match (r'^[а-яА-ЯёЁa-zA-Z\s]+$', ' '):
print (1000)
|
print("Juste _un test")
|
# -*- coding: utf-8 -*-
import os, re
rs = os.popen("./iat_sample")
text = rs.read()
arr = text.split('=============================================================')
print(arr[1].strip('\n'))
|
# ============LICENSE_START=======================================================
# Copyright (c) 2020-2022 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=========================================================
import unittest
import trapd_exit
pid_file = "/tmp/test_pid_file"
pid_file_dne = "/tmp/test_pid_file_NOT"
import trapd_stormwatch_settings as sws
class test_cleanup_and_exit(unittest.TestCase):
"""
Test for presense of required vars
"""
def test_nonexistent_dict(self):
"""
Test nosuch var
"""
sws.init()
self.assertFalse(hasattr(sws, 'no_such_var'))
def test_storm_counter_dict(self):
"""
Test storm_counter_dict
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_storm_counter_dict'))
def test_storm_active_dict(self):
"""
Test storm_active_dict
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_storm_active_dict'))
def test_sw_config_oid_dict(self):
"""
Test sw_config_oid_dict
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_config_oid_dict'))
def test_sw_config_low_water_in_interval_dict(self):
"""
Test low_water
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_config_low_water_in_interval_dict'))
def test_sw_config_high_water_in_interval_dict(self):
"""
Test high water dict
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_config_high_water_in_interval_dict'))
def test_sw_config_category(self):
"""
Test category
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_config_category'))
def test_sw_interval_in_seconds(self):
"""
Test sw_interval
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_interval_in_seconds'))
self.assertTrue(str(sws.sw_interval_in_seconds).isnumeric())
def test_sw_last_stormwatch_dict_analysis(self):
"""
Test last_stormwatch_dict_analysis
"""
sws.init()
self.assertTrue(hasattr(sws, 'sw_last_stormwatch_dict_analysis'))
self.assertTrue(str(sws.sw_last_stormwatch_dict_analysis).isnumeric())
if __name__ == "__main__": # pragma: no cover
unittest.main(verbosity=2)
|
#!/usr/bin/env python3
import meal_planner
from bottle import *
import logging
import os
import copy
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger('web_server')
mp_log = logging.getLogger('meal_planner')
mp_log.setLevel(logging.INFO)
@get('/')
def index():
return static_file('index.html', '.')
@get('/css/<filename>')
def css(filename):
return static_file(filename, './css')
@get('/js/<filename>')
def js(filename):
return static_file(filename, './js')
@get('/csvs/<filename>')
def csvs(filename):
return static_file(filename, './csvs', download=True)
@get('/get_meal_plans')
def get_meal_plans_get():
return meal_planner.get_meal_plans()
@post('/get_meal_plans')
def get_meal_plans_post():
person = request.json.get('person', "adult man").strip()
nutrient_targets = request.json.get('nutrient_targets')
iterations = request.json.get('iterations') or 10000
min_serve_size_difference = request.json.get('min_serve_size_difference') or .5
allowed_varieties = request.json.get('variety') or [1,2,3]
allow_takeaways = request.json.get('takeaways')
food_group_targets = request.json.get('food_group_targets') or {}
logger.info('request recieved, person={}, nutrient_targets={}, iterations={}, min_serve_size_difference={}, allowed_varieties={}, allow_takeaways={}, food_group_targets={}'.format(person, nutrient_targets, iterations, min_serve_size_difference, allowed_varieties, allow_takeaways, food_group_targets))
return meal_planner.get_meal_plans(person, nutrient_targets, int(iterations), float(min_serve_size_difference), allowed_varieties, bool(allow_takeaways), food_group_targets)
@get('/get_nutrient_targets')
def get_nutrient_targets():
return meal_planner.nutrient_targets
@get('/get_food_group_targets')
def get_food_group_targets():
return meal_planner.food_groups
@get('/get_var_price')
def get_variable_price_options():
return meal_planner.variable_prices
@post('/check_meal_plan_for_person')
def check_meal_plan_for_person():
person = request.json.get('person') or 'adult man'
meal_plan = request.json['meal_plan']
nutrients = meal_planner.get_nutrients(meal_plan, person)
nutrient_targets = meal_planner.convert_to_fortnightly(copy.deepcopy(meal_planner.nutrient_targets[person]))
return meal_planner.get_diff(nutrients, nutrient_targets)
port = int(os.environ.get('PORT', 8080))
if __name__ == "__main__":
try:
try:
run(host='0.0.0.0', port=port, debug=True, server='gunicorn', workers=8, timeout=500)
except ImportError:
run(host='0.0.0.0', port=port, debug=True, timeout=500)
except Exception as e:
logger.error(e)
sys.stdin.readline()
app = default_app()
|
name = "Roberta"
print("Hello, " + name.title())
|
def add(num1, num2):
return num1 + num2
def subtract(num1, num2):
return num1 - num2
def multiply(num1, num2):
return num1 * num2
def divide(num1, num2):
return num1 / num2
def vowels(string):
vowel = "aeiou"
string = string.lower()
count = 0
for x in string:
if x in vowel:
count = count+1
return count
def encrypt(string):
return string[::-1]
menu = input("For Mathematical functions, Please Enter the Number 1 " + "\n"
+ "For String Operations, Please Enter the Number 2 \n")
if menu == '1':
menu = input("For Addition, Please Enter the Number 1 \n" + "For subtraction, Please Enter the Number 2 \n" + "For Multiplication, Please Enter the Number 3 \n" + "For Division, Please Enter the Number 4 \n")
num1 = int(input("Enter First Number: "))
num2 = int(input("Enter Second Number: "))
if menu == '1':
print(add(num1, num2))
elif menu == '2':
print(subtract(num1, num2))
elif menu == '3':
print(multiply(num1, num2))
elif menu == '4':
print(divide(num1, num2))
else:
print("Invalid Menu Number!")
elif menu == '2':
menu = input("To Determine the Number of Vowels in a String; Enter the Number 1 \n" + "To Encrypt a String; Enter Number 2 \n")
string = input("Enter a String: ")
if menu == '1':
print(vowels(string))
elif menu == '2':
print(encrypt(string))
else:
print("Invalid Menu Number")
else:
print("Invalid Menu Number")
|
from panda3d.core import CKeyValues
from .Entity import Entity
# The root entity of each map
class World(Entity):
ObjectName = "world"
def __init__(self, id):
Entity.__init__(self, id)
self.setClassname("worldspawn")
self.np.node().setFinal(False)
def doWriteKeyValues(self, parent):
kv = CKeyValues(self.ObjectName, parent)
self.writeKeyValues(kv)
for child in self.children.values():
if isinstance(child, Entity):
# Put entities outside of the world key-value block
par = parent
else:
par = kv
child.doWriteKeyValues(par)
self.writeEditorValues(kv)
def isWorld(self):
return True
|
from flask import Flask, request, render_template, session, redirect, url_for
from user import Newbike
from sdt import New
import mlab
import smtplib
mlab.connect()
app = Flask(__name__)
@app.route("/", methods=["GET", "POST"])
def home():
if request.method == "GET":
#User request form
return render_template("login.html")
elif request.method == "POST":
form = request.form
s = form["sdt"]
nbi = New(sdt=s.strip())
if s == '':
warning = "vui long nhap sdt"
return render_template("login.html", warning=warning)
else:
nbi.save()
return render_template("welcome.html")
@app.route("/trangchu", methods=["GET", "POST"])
def trangchu():
if request.method == "GET":
#User request form
return render_template("index.html")
elif request.method == "POST":
form = request.form
u = form["username"]
e = form["email"]
n = form["number"]
m = form["messenger"]
nbike = Newbike(username=u.strip(), email=e.strip(), number=n.strip(), messenger=m.strip())
nbike.save()
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login("spy12a6@gmail.com", "cuong11a6")
msg = "Cam on quy khach da quan tam toi du an, goi ngay 012345678 de duoc tu van mien phi"
server.sendmail("spy12a6@gmail.com", e, msg)
server.quit()
notice2 = 'tkank you!'
return render_template("index.html", username=u, email=e, number=n, messenger=m, u=u, notice2=notice2)
@app.route("/list")
def list():
userlist = Newbike.objects()
sdtlist = New.objects()
return render_template('aboutmes.html', userlist=userlist, sdtlist=sdtlist)
if __name__ == "__main__":
app.run(debug=True)
|
from controller import Robot, DistanceSensor, Motor,Receiver,Emitter,LightSensor,LED,Keyboard, GPS
import struct
#import RRT
import math
import RRT2
import sys
import my_parser
import IdealPos
EN = "utf-8"
pI=3.14159265359
#dummy car location
#goal_x = -1
#goal_y = -1
#fire car dimension
#fire_length = 0.2
#fire_width = 0.1
#car dimensions
# x,y,z in meters
dim = [0.1,0.05,0.2]
CAR = "FIRE"
TH = 950
TIME_STEP = 64
##########globals for rrt##############################
delta_time=11.93662 #when speed is 3.14 rad/s, go 1.5 meters need 11.93662s
currTime=0
nextTime=0
i=1
mode=2
my_turn = False
lock=0
rrt_count=0
prio = 2
do_job_now = False
task_done = False
#testing purposes
mode = 2
##########################################################
######## globals for info parsing and ideal loc ############
com = r_message = None #
at_ideal = going_ideal = False
coords = []
ccoords = [] #
############################################################
robot = Robot()
ds = []
dsNames = ['ds_right', 'ds_left','ds_back_left','ds_back_right']
for i in range(4):
ds.append(robot.getDistanceSensor(dsNames[i]))
ds[i].enable(TIME_STEP)
'''
ds_l = robot.getDistanceSensor('ds_left')
ds_r = robot.getDistanceSensor('ds_right')
ds_l.enable(TIME_STEP)
ds_r.enable(TIME_STEP)
'''
wheels = []
wheelsNames = ['wheel1', 'wheel2', 'wheel3', 'wheel4','arm_motor1','arm_motor2','arm_motor4','arm_motor6']
for i in range(8):
wheels.append(robot.getMotor(wheelsNames[i]))
wheels[i].setPosition(float('inf'))
wheels[i].setVelocity(0.0)
avoidObstacleCounter = 0
tuoluoyi=robot.getInertialUnit('imu_angle')
tuoluoyi.enable(TIME_STEP)
jieshouqi=robot.getReceiver('receiver')
jieshouqi.enable(TIME_STEP)
jieshouqi.setChannel(3)
#set up emitter to send back to server
server_sock=robot.getEmitter('emitter')
server_sock.setChannel(1)
server_sock.setRange(-1)
count =0
jianpan=robot.getKeyboard()
jianpan.enable(TIME_STEP)
light_sensor_1=robot.getLightSensor('light_sensor1')
light_sensor_1.enable(TIME_STEP)
light_sensor_2=robot.getLightSensor('light_sensor2')
light_sensor_2.enable(TIME_STEP)
light_sensor_3=robot.getLightSensor('light_sensor3')
light_sensor_3.enable(TIME_STEP)
light_sensor_lu_left=robot.getLightSensor('light_sensor_lu_left')
light_sensor_lu_left.enable(TIME_STEP)
light_sensor_lu_right=robot.getLightSensor('light_sensor_lu_right')
light_sensor_lu_right.enable(TIME_STEP)
motor4_imu=robot.getInertialUnit('motor4_imu')
motor4_imu.enable(TIME_STEP)
motor2_imu=robot.getInertialUnit('motor2_imu')
motor2_imu.enable(TIME_STEP)
gps = robot.getGPS('arm_gps')
gps.enable(TIME_STEP)
#start_rrt = 0
#start_find = 0
#start_arm = 0
ls_top = robot.getLightSensor("light_sensor_top")
ls_bot = robot.getLightSensor("light_sensor_bot")
ls_top.enable(TIME_STEP)
ls_bot.enable(TIME_STEP)
#to go to the closest side global variables
#detect fire
#Top, Bot, Left, and Right position of the dummy car
dum_loc = []
arm_loc = []
dum_angle = -361
dum_loc.append((-100,-100)) #top
dum_loc.append((-100,-100)) #right
dum_loc.append((-100,-100)) #bot
dum_loc.append((-100,-100)) #left
dum_light =[-1, -1, -1, -1] #top, right, bot, left max light intensity
dum_imu = [-1, -1, -1, -1] #top, right, bot, left angle with the highest intensity
#move_to_fire variables
stop_index = -1
index = -1
max_light_index = -1
min_dist_index = -1
first_time = 0
#LOCKS
fire_lock_0 = 0
fire_lock_1 = 0
fire_lock_2 = 0
fire_lock_3 = 0
fire_lock_4 = 0
fire_lock_5 = 0
fire_lock_6 = 0
fire_lock_7 = 0
fire_lock_8 = 0
fire_lock_9 = 0
move_lock_4_1 = 0
move_lock_4_2 = 0
move_lock_4_3 = 0
move_lock_4_4 = 0
move_lock_4_5 = 0
move_lock_4_6 = 0
move_lock_1 = 0
move_lock_2 = 0
move_lock_3 = 0
move_lock_4 = 0
move_thresh_angle = -361
scan_lock_1 = 0
scan_lock_2 = 0
scan_lock_3 = 0
scan_lock_4 = 0
scan_lock = 0
#convert python coordinates to webots
def conv_coords(coords):
x = (coords[0] + 3) * 10
y = (coords[1] + 3) * 10
z = (-1*coords[2] + 3) * 10
return [x,y,z]
############## create rrt path
rrt_theta=[]
len_rrt = 0
my_rrt = None
in_transit = False
ending = False
obstacles_list = [
(19,20,2,40),
(37,0,2,40)
]
show_ani = False
#initializing the rrt
def rrt_create(start, dest):
global my_rrt
#rrt_theta = []
beg_x = float(start[0])
beg_y = float(start[2])
goal_x=float(dest[0])
goal_y=float(dest[2])
#convert radius to RRT coordinates
car_bound = dim[2] * 10 + 2
#print(car_radius)
my_rrt = RRT2.RRT(start=[beg_x, beg_y],goal=[goal_x,goal_y],rand_area=[0,60],\
obstacle_list=obstacles_list,car_radi=car_bound,show = show_ani)
#rrt_path=RRT2.rrt_main(beg_x, beg_y,goal_x,goal_y,car_bound)
rrt_new(start,dest)
#print(rrt_theta)
#ALWAYS! rrt_reset right before rrt_new
#resetting parameters for rrt
def rrt_reset():
global rrt_theta, len_rrt, i, lock
rrt_theta = []
len_rrt = 0
i = 1
#print(i)
angleArray=rrt_theta
lock = 0
#change start and goal and create path
def rrt_new(start, dest):
global rrt_theta,len_rrt, mode,my_rrt
rrt_reset()
my_rrt.set_start_end([float(start[0]),float(start[2])],[float(dest[0]),float(dest[2])])
rrt_path = my_rrt.planning()
len_path=len(rrt_path)
for data in rrt_path:
rrt_theta.append(float(data[2]))
rrt_theta.reverse()
len_rrt=len(rrt_theta)
mode = 1
print(rrt_theta)
################################################# for move RRT
def go_straight():
wheels[0].setVelocity(1)
wheels[1].setVelocity(1)
wheels[2].setVelocity(1)
wheels[3].setVelocity(1)
def right_turn():
wheels[0].setVelocity(1)
wheels[1].setVelocity(-1)
wheels[2].setVelocity(1)
wheels[3].setVelocity(-1)
def left_turn():
wheels[0].setVelocity(-1)
wheels[1].setVelocity(1)
wheels[2].setVelocity(-1)
wheels[3].setVelocity(1)
def no_move():
wheels[0].setVelocity(0)
wheels[1].setVelocity(0)
wheels[2].setVelocity(0)
wheels[3].setVelocity(0)
def set_global_angle(angle):
global dum_angle
dum_angle = angle
def move_car_on_rrt():
global mode,lock,i,delta_time,currTime,nextTime,rrt_theta, at_ideal, going_ideal
angleArray = rrt_theta
if mode==0:
#print(lock)
xyz = tuoluoyi.getRollPitchYaw()
for a in xyz:
a = float(a)
curr_angle = xyz[2]/pI*180
#decide which way should rotate
if lock ==0:
if angleArray[i]<=curr_angle and curr_angle >=0 and angleArray[i] >=0: #right turn
lock=1
elif angleArray[i] > curr_angle and curr_angle >=0 and angleArray[i] >=0: #left turn
lock=2
elif angleArray[i]<=curr_angle and curr_angle <=0 and angleArray[i] <=0: #right turn
lock=3
elif angleArray[i]>curr_angle and curr_angle <=0 and angleArray[i] <=0: #left turn
lock=4
elif angleArray[i]<90 and curr_angle <=0 and angleArray[i] >=0: #left turn
lock=5
elif angleArray[i]>=90 and curr_angle <=0 and angleArray[i] >=0: #right turn
lock=6
elif angleArray[i]>-90 and curr_angle >=0 and angleArray[i] <=0: #left turn
lock=7
elif angleArray[i]<=-90 and curr_angle >=0 and angleArray[i] <=0: #right turn
lock=8
else:
print('error')
#print(lock)
if lock==1: #right turn
right_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 <=angleArray[i]:
no_move()
mode=1
currTime=robot.getTime()
elif lock==2: #left turn
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 >=angleArray[i] or (xyz[2]/pI*180 <=-175 and xyz[2]/pI*180>=-180):
no_move()
mode=1
currTime=robot.getTime()
elif lock==3: #right turn
right_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 <=angleArray[i]:
no_move()
mode=1
currTime=robot.getTime()
elif lock==4: #left turn
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 >=angleArray[i]:
no_move()
mode=1
currTime=robot.getTime()
elif lock==5: #left turn
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 >=angleArray[i]:
no_move()
mode=1
currTime=robot.getTime()
elif lock==6: #right turn
right_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 <=angleArray[i] and xyz[2]/pI*180>=0:
no_move()
mode=1
currTime=robot.getTime()
elif lock==7: #left turn
right_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 <=angleArray[i] and xyz[2]/pI*180<=0:
no_move()
mode=1
currTime=robot.getTime()
elif lock==8: #right turn
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
if xyz[2]/pI*180 >=angleArray[i] and xyz[2]/pI*180<=0:
no_move()
mode=1
currTime=robot.getTime()
elif mode==1:
if robot.getTime()-currTime <=1.55:#3.978733:
#count+=1
#print(count,robot.getTime())
leftSpeed = pI
rightSpeed = pI
wheels[0].setVelocity(leftSpeed)
wheels[1].setVelocity(rightSpeed)
wheels[2].setVelocity(leftSpeed)
wheels[3].setVelocity(rightSpeed)
else:
#print(lock,angleArray[i])
i+=1
lock=0
no_move()
if going_ideal:
#if at the end of RRT, switch modes
if i==len_rrt-1:
mode=3
#if heading to ideal, set at_ideal to be true
at_ideal = True
going_ideal = False
else:
mode = 0
elif ending:
if i==len_rrt-1:
mode = 2
#mode=5
else:
mode = 0
elif do_job_now:
#print("from do_job_now")
if i==len_rrt-1:
mode = 5
#mode=5
else:
mode = 0
elif i == len_rrt-4:
mode = 3
#rrt_theta = []
else:
mode=0
#moving = 0
return mode #,lock,i,delta_time,currTime,nextTime
#######################
# ideal waiting stuff #
#######################
def check_queue(rec_sock):
server_sock.send(bytes('REQ JOBS NONE '+CAR,EN))
return False
#ideal position does not work well with u shaped obstacles since the car will wait
#on the outside of the wall
def go_ideal():
global gps, ccoords, going_ideal, mode,my_rrt
#test ideal_pos
my_pos = gps.getValues()
ideal = IdealPos.Ideal_Pos(r1=15,r2=20,show=True)
my_pos = conv_coords(my_pos)
#pos is a list of all reported positions (x,z)
#ind is a sorted index by smallest distance
pos, ind = ideal.get_ideal(my_pos[0],my_pos[2],ccoords[0],ccoords[2])
#get coordinates of ideal waiting spot from positions and index
ind_no_obs = 0
#while position has an obstacle, update index to next shortest distance
while my_rrt.collision_check_no_path(pos[ind[ind_no_obs]],my_rrt.obstacleList) == 0:
ind_no_obs = ind_no_obs + 1
print("obs index "+str(ind_no_obs))
print("position: "+str(pos[ind[ind_no_obs]]))
#change to fit coordinate convention
ideal_wait = [pos[ind[ind_no_obs]][0],0,pos[ind[ind_no_obs]][1]]
rrt_new(my_pos,ideal_wait)
#mode = 2
going_ideal = True
return True
#############################
# collision avoidance stuff #
#############################
##TODO!!!!: finish implementing collision avoidance,
# need to use function to add obstacles
# need update communications implementation
def query_coll():
data = gps.getValues()
coord = str(data[0]) + " " + str(data[1]) + " " + str(data[2])
out_message = bytes("REQ COLL COORDS "+ str(coords[0]) +" "+ str(coords[1]) +" "+ str(coords[2]) + " "+CAR,EN)
server_sock.send(out_message)
def resp_coll():
data = gps.getValues()
coord = str(data[0]) + " " + str(data[1]) + " " + str(data[2])
out_message = bytes("RESP COLL COORDS "+ str(coords[0]) +" "+ str(coords[1]) +" "+ str(coords[2]) + " "+CAR,EN)
server_sock.send(out_message)
def handle_collision():
return False
def send_fin_task():
data = "RESP HELP DONE "+CAR
out_message = bytes(data,EN)
server_sock.send(out_message)
def add_obstacle(c,size):
global my_rrt
obs_x = c[0]-0.5*size
obs_z = c[2]-0.5*size
my_rrt.add_obstacle([obs_x,obs_z],size)
def add_obstacles(obs_list):
for (ox,oz,size) in obs_list:
add_obstacle(ox,oz,size)
###############################################fire car move
def distance(pos1, pos2):
return math.sqrt((pos1[0]-pos2[0])**2+(pos1[1]-pos2[1])**2)
def move_straight(pos2, index):
data = gps.getValues()
angle = -1000
if index == 1:
if data[0] < pos2[0]:
go_straight()
data = gps.getValues()
else:
no_move()
angle = 180
elif index == 2:
if data[2] > pos2[1]:
go_straight()
data = gps.getValues()
else:
no_move()
angle = -90
elif index == 3:
if data[0] > pos2[0]:
go_straight()
data = gps.getValues()
else:
no_move()
angle = 0
elif index == 0:
if data[2] < pos2[1]:
print(data[2])
print(pos2[1])
go_straight()
data = gps.getValues()
print("here")
else:
no_move()
angle = 90
return angle
def move(pos1, pos2, index):
global move_lock_4_1, move_lock_4_2, move_lock_4_3, move_lock_4_4, move_lock_4_5, move_lock_4_6, move_lock_1, move_lock_2, move_lock_3, move_lock_4
global move_thresh_angle
move_return = -1
if index == 4:
#print("here?")
xyz = tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
# get thresh angle, the angle to stop turning
if pos1[0] < pos2[0]:
thresh_angle = 90
elif pos1[0] > pos2[0]:
thresh_angle = -90
# turn to that angle
if move_lock_4_1 == 0:
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == thresh_angle:
no_move()
move_lock_4_1 = 1
print("here!!!!!!!!!!!")
elif move_lock_4_1 == 1 and move_lock_4_2 == 0:
data = gps.getValues()
if pos1[0] > pos2[0] and data[0] > pos2[0]:
go_straight()
data = gps.getValues()
#print(str(data[0]))
#print(str(pos2[0]))
elif pos1[0] < pos2[0] and data[0] < pos2[0]:
go_straight()
data = gps.getValues()
else:
move_lock_4_2 = 1
no_move()
print("finish moving left or right")
elif move_lock_4_2 == 1:
data = gps.getValues()
if pos2[1] > data[2] and pos2[1] > pos1[1]:
#thresh_angle = 180
if move_lock_4_3==0:
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
print(curr_angle)
if -1 < round(curr_angle) < 1:
print("here")
move_lock_4_3 = 1
no_move()
elif move_lock_4_5 == 0:
go_straight()
data = gps.getValues()
#print(str(data[2]))
#print(str(pos2[1]))
#if data[2] >= pos2[1]:
#print("here!!!!")
#move_lock_4_5 = 1
#move_return = 1
#no_move()
elif pos2[1] < data[2] and pos2[1] < pos1[1]:
#thresh_angle = 0
#print("hello")
if move_lock_4_4==0:
right_turn()
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if abs(curr_angle) > 179:
move_lock_4_4 = 1
no_move()
elif data[2] > pos2[1] and move_lock_4_6 == 1:
go_straight()
data = gps.getValues()
if data[2] < pos2[1]:
move_return = 1
no_move()
else:
print("I am fking here!")
no_move()
move_return = 1
else:
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if move_lock_1 == 0:
print("at lock 1")
ds_l = ds[0].getValue()
ds_r = ds[1].getValue()
if ds_l < 100 or ds_r < 100:
no_move()
#move_thresh_angle = t_angle[index]
else:
move_thresh_angle = move_straight(pos2, index)
if move_thresh_angle != -1000:
print(move_thresh_angle)
move_lock_1 = 1
elif curr_angle < (move_thresh_angle-0.7) or curr_angle > (move_thresh_angle +0.7) and move_lock_2 == 0:
print("at lock 2")
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if curr_angle > (move_thresh_angle-0.7) and curr_angle < (move_thresh_angle +0.7):
move_lock_2 = 1
no_move()
elif move_lock_3 == 0:
print("at lock 3")
index = (index+1)%4
move_lock_3 = 1
elif move_lock_4 == 0:
print("at lock 4")
print(index)
print(pos2)
index_temp = (index+1)%4
temp = move_straight(pos2, index_temp)
if temp != -1000:
move_lock_4 = 1
move_return = 1
else:
move_return = 1
if move_return == 1:
no_move()
move_lock_4_1 = 0
move_lock_4_2 = 0
move_lock_4_3 = 0
move_lock_4_4 = 0
move_lock_1 = 0
move_lock_2 = 0
move_lock_3 = 0
move_lock_4 = 0
move_thresh_angle = -361
return 1
return move_return
def scan(index):
global scan_lock, scan_lock_1, scan_lock_2, scan_lock_3, scan_lock_4
ret = -1
xyz = tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if index == 1 or index == 3: #thresh angle for scanning right and left side
thresh_angle_1 = -180+dum_angle
if thresh_angle_1 == -180:
thresh_angle_1 += 1
if thresh_angle_1 < -180:
thresh_angle_1 = 360 + thresh_angle_1
thresh_angle_2 = 0 + dum_angle
if thresh_angle_2 == 0:
thresh_angle_2 += 1
elif index == 0 or index == 2: #thresh angle for scanning top and bot side
thresh_angle_1 = 90+dum_angle
thresh_angle_2 = -90+dum_angle
if index == 1 and scan_lock == 0: #scan right
if scan_lock_1 == 0:
left_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_2):
scan_lock_1 = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
if scan_lock_1 == 1:
right_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_1):
print("finished rotating!")
scan_lock_1 = 0
scan_lock = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
elif index == 2 and scan_lock == 0: #scan bot (which is shown top)
if scan_lock_2 == 0:
left_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_1):
scan_lock_2 = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
if scan_lock_2 == 1:
right_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_2):
print("finished rotating!")
scan_lock_2 = 0
scan_lock = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
elif index == 3 and scan_lock == 0: # scan left
if scan_lock_3 == 0:
left_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_1):
scan_lock_3 = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
if scan_lock_3 == 1:
right_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_2):
print("finished rotating!")
scan_lock_3 = 0
scan_lock = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
elif index == 0 and scan_lock == 0: #scan top (which is shown bot)
if scan_lock_4 == 0:
left_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_2):
scan_lock_4 = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
elif scan_lock_4 == 1:
right_turn()
lt= (ls_top.getValue() + ls_bot.getValue())/2
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(thresh_angle_1):
print("finished rotating!")
scan_lock_4 = 0
scan_lock = 1
no_move()
if lt > dum_light[index]:
dum_light[index] = lt
dum_imu[index] = curr_angle
if scan_lock == 1:
print("scan_lock is set to 1")
scan_lock = 0
return 1
return ret
def get_to_operation(angle):
left_turn()
xyz=tuoluoyi.getRollPitchYaw()
curr_angle = xyz[2]/pI*180
if round(curr_angle) == int(angle):
no_move()
return 1
else:
return -1
def move_to_fire():
global fire_lock_0, fire_lock_1, fire_lock_2, fire_lock_3, fire_lock_4, fire_lock_5, fire_lock_6, fire_lock_7, fire_lock_8, fire_lock_9
global arm_loc, stop_index, index, min_dist_index, max_light_index
global first_time
ret = -1
#data = gps.getValues()
if fire_lock_0 == 0:
data = gps.getValues()
arm_loc = (data[0], data[2])
#finding closest point of interest
dist = []
#print("here?")
dist.append(distance(arm_loc, dum_loc[0])) #top
dist.append(distance(arm_loc, dum_loc[1])) #right
dist.append(distance(arm_loc, dum_loc[2])) #bot
dist.append(distance(arm_loc, dum_loc[3])) #left
min_dist_index = dist.index(min(dist[0], dist[1], dist[2], dist[3]))
print("The min dist index is: " + str(min_dist_index))
fire_lock_0 = 1
#move to point of interest
elif fire_lock_1 == 0:
#print("Now the min dist index is: " + str(min_dist_index))
temp = move(arm_loc, dum_loc[min_dist_index], 4)
#print(str(dum_loc[min_dist_index]))
#print(str(arm_loc))
if temp == 1:
fire_lock_1 = 1
print("moved to point of interest")
#scan the area
elif fire_lock_1 == 1 and fire_lock_2 == 0:
temp = scan(min_dist_index)
if temp == 1:
print("finished scanning")
fire_lock_2 = 1
index = (min_dist_index+1)%4
#move to the other sides
elif fire_lock_2 == 1 and fire_lock_3 == 0:
if index != min_dist_index:
#print("try to move to other side")
data = gps.getValues()
arm_loc = (data[0], data[2])
#if dum_loc[index] != (-100,-100):
if fire_lock_4 == 0:
print("At 4")
temp1 = move(arm_loc, dum_loc[index], index)
if temp1 == 1:
fire_lock_4 = 1
elif fire_lock_4 == 1 and fire_lock_5 ==0:
print("At 5")
temp2 = scan(index)
if temp2 == 1:
fire_lock_5 = 1
#index = (index+1)%4
elif fire_lock_5 == 1:
print("At 6")
index = (index+1)%4
print("index is:" + str(index))
print("min_dist_index" + str(min_dist_index))
fire_lock_4 = 0
fire_lock_5 = 0
if index == min_dist_index:
fire_lock_3 = 1
max_light_index = dum_light.index(max(dum_light[0],dum_light[1],dum_light[2],dum_light[3]))
stop_index = min_dist_index-1
if stop_index < 0:
stop_index = 3
#find which side to go
elif fire_lock_3 == 1 and fire_lock_6 == 0:
print("max_light_index is:" + str(max_light_index))
print("current index is: " + str(stop_index))
if stop_index != (max_light_index+1)%4 or first_time == 0:
if first_time == 0:
first_time = 1
if fire_lock_7 == 0 and fire_lock_8 == 0:
stop_index = (stop_index+1)%4
data = gps.getValues()
arm_loc = (data[0], data[2])
fire_lock_7 = 1
elif fire_lock_7 == 1:
temp = move(arm_loc, dum_loc[stop_index], stop_index)
#stop_index = (stop_index-1)%4
if temp == 1:
fire_lock_7 = 0
else:
fire_lock_8 = 1
fire_lock_6 = 1
#rotate to the final angle
elif fire_lock_8 == 1:
print("here!!!!")
temp = get_to_operation(dum_imu[max_light_index])
if temp == 1:
fire_lock_8 = 0
fire_lock_9 = 1
elif fire_lock_9 == 1:
fire_lock_1 = 0
fire_lock_2 = 0
fire_lock_3 = 0
fire_lock_4 = 0
fire_lock_5 = 0
fire_lock_6 = 0
fire_lock_7 = 0
fire_lock_8 = 0
fire_lock_9 = 0
ret = 1
return ret
###############################################motor_move
motor1_direction=0
motor1_stop=0
motor2_stop=0
motor3_stop=0
motor4_stop=0
max_fire1 = 0
temp_fire1 = 0
max_fire2 = 0
max_fire3 = 0
temp_fire2 = 0
temp_fire3 = 0
theta_1=0
distance_fire=0
theta_0=0
max_2motors=0
arm_mode = False
def robot_arm_moving():
global motor1_stop, motor2_stop, motor3_stop, motor4_stop,temp_fire1,\
max_fire1,temp_fire2,max_fire2,theta_1,distance_fire,theta_0,\
temp_fire3,max_fire3,motor1_direction,max_2motors,arm_mode,task_done
#movement for motor 1
if(motor1_stop==0):
#print('1')
if motor1_direction == 0:
if(light_sensor_lu_left.getValue()>light_sensor_lu_right.getValue()):
motor1_direction=1
else:
motor1_direction=2
elif(motor1_direction==1): #turn left
temp_fire1 = light_sensor_1.getValue()
print("fire readings: " + str(temp_fire1))
if(temp_fire1>=max_fire1):
wheels[4].setVelocity(0.5)
max_fire1=temp_fire1
else:
wheels[4].setVelocity(0)
motor1_stop=1
elif(motor1_direction==2): #turn right
temp_fire1 = light_sensor_1.getValue()
print("fire readings: " + str(temp_fire1))
if(temp_fire1>=max_fire1):
wheels[4].setVelocity(-0.5)
max_fire1=temp_fire1
else:
wheels[4].setVelocity(0)
motor1_stop=1
#movement for motor 3 to test fire location
elif(motor3_stop==0):
#print('2')
temp_fire2 = light_sensor_2.getValue()
if(temp_fire2>=max_fire2):
wheels[6].setVelocity(0.5)
max_fire2=temp_fire2
else:
wheels[6].setVelocity(0)
motor3_stop=1
theta_1 = motor4_imu.getRollPitchYaw()
distance_fire=0.18/math.cos(theta_1[1])#+0.065*math.tan(theta_1[1])
if((distance_fire-0.27)/0.18>=0.72):
theta_0=0.785
else:
theta_0=math.acos((distance_fire-0.27)/0.18)
#get the theta1 angle
elif(motor3_stop==1):
#print(motor3_stop)
temp_theta_0=motor2_imu.getRollPitchYaw()
if((pI/2-math.fabs(temp_theta_0[1]))>theta_0):
wheels[5].setVelocity(0.5)
else:
motor3_stop=2
#print(motor3_stop)
wheels[5].setVelocity(0)
motor3_stop==2
motor2_stop=1
#move motor3 to correct postition depend on angle thta0
elif(motor2_stop==1):
#print('4')
#print((motor4_imu.getRollPitchYaw())[1]/pI*180)
if((motor4_imu.getRollPitchYaw())[1]>0):
wheels[6].setVelocity(0.5)
else:
wheels[6].setVelocity(0)
motor2_stop=2
#
elif(motor2_stop==2):
temp_fire3 = light_sensor_3.getValue()
if(temp_fire3>=max_fire3):
wheels[7].setVelocity(0.5)
max_fire3=temp_fire3
else:
wheels[7].setVelocity(0)
motor2_stop=3
#
elif(motor2_stop==3):
if(light_sensor_3.getValue() < 500):
temp_fire_s=light_sensor_3.getValue()
if(temp_fire_s>=max_2motors):
wheels[7].setVelocity(-0.5)
wheels[6].setVelocity(0.3)
max_2motors=temp_fire_s
else:
wheels[6].setVelocity(0)
wheels[7].setVelocity(0)
motor2_stop=4
else:
motor2_stop=4
wheels[6].setVelocity(0)
wheels[7].setVelocity(0)
task_done = True
arm_mode = False
######################################################
while robot.step(TIME_STEP) != -1:
# xyz = []
#xyz=tuoluoyi.getRollPitchYaw()
# print('x=', xyz[0]/3.14*180,' ','y=', xyz[1]/3.14*180,' ','z=', xyz[2]/3.14*180)
#print(jieshouqi.getDataSize())
leftSpeed = 10.0
rightSpeed = 10.0
v1 = ds[0].getValue()
v2 = ds[1].getValue()
#print("ds_l is: " + str(v1))
#print("ds_r is: " + str(v2))
#if there is a message to be recieved
if jieshouqi.getQueueLength() > 0:
if jieshouqi.getDataSize() > 0:
message=jieshouqi.getData()
jieshouqi.nextPacket()
#parse message
r_message = message.decode(EN)
#statement to go to a position via RRT
if "GO" in r_message:
#rrt_reset()
print(r_message)
coords,dinfo = my_parser.coords(r_message)
start = gps.getValues()
for a in coords:
ccoords.append(float(a))
start = conv_coords(start)
ccoords = conv_coords(ccoords)
mode = 0
#TODO: set up actual task
angle = math.radians(float(dinfo[0])) # imu of dum car
width = float(dinfo[1]) # length of dum car
length = float(dinfo[2]) # width of dum car
set_global_angle(float(dinfo[0]))
#print(dum_angle)
#set the four location to traverse.
dum_loc[0] = (float(coords[0]) + (length+dim[2]/2)*math.sin(angle),float(coords[2])+(length+dim[2]/2)*math.cos(angle)) #top
dum_loc[1] = (float(coords[0])+ (width+dim[2])*math.cos(angle),float(coords[2])-(width+dim[2])*math.sin(angle)) #right
dum_loc[2] = (float(coords[0]) - (length+dim[2]/2)*math.sin(angle),float(coords[2])-(length+dim[2]/2)*math.cos(angle)) #bot
dum_loc[3] = (float(coords[0]) - (width+dim[2])*math.cos(angle),float(coords[2])+(width+dim[2])*math.sin(angle)) #left
print(dum_loc[0])
print(dum_loc[1])
print(dum_loc[2])
print(dum_loc[3])
if my_rrt is None:
rrt_create(start,ccoords)
else:
rrt_new(start,ccoords)
in_transit = True
elif "RESP" in r_message:
#statement to check message whose turn it is for going to ideal position
if "JOBS" in r_message:
#its FIRE car's turn to go, go into specific task mode
if (CAR+"T") in r_message:
print("my turn!")
my_turn = True
mode = 5
else:
print(r_message)
print("not my turn")
#go to ideal position mode
mode = 4
#statement to check if its their turn to go for collision protocol
elif "COLL" in r_message:
print(r_message)
#TODO: implement add obstacles,implement go to node function
#add obstacles to rrt planner list
#find new node to traverse to
#rewire rrt to go to that node
#send message back to server for confirmation
#continue on rrt
#condition to respond to collision protocol steo for server's query
elif "REQ" in r_message:
if "COLL" in r_message:
resp_coll()
#RRT is still processing
elif 0 <= mode and mode < 2:
print(mode)
#check if car senses another car
#if it does, go into collision avoidance mode
#else, move car on rrt
#if in_transit:
# for sensor in ds:
# if sensor.getValue() < 300:
# mode = 6
if 0 <= mode and mode < 2:
move_car_on_rrt()
#RRT is completed and check to go to ideal
elif mode == 3:
in_transit = False
#rrt_reset()
#if not at ideal position, then check queue
check_queue(jieshouqi)
print(mode)
#if needs to go to ideal position
elif mode == 4:
in_transit = False
print(mode)
#not car's turn and not at ideal, then go to ideal
if not my_turn and not at_ideal:
print("GOING IDEAL")
go_ideal()
#at_ideal = True
#if not car's turn and is at ideal, then stay still and check queue
elif not my_turn and not going_ideal:
no_move()
mode = 3
#it is this car's turn, go back in front of car, then mode 5 which does its job
else:
mode = 0
my_pos = gps.getValues()
my_pos = conv_coords(my_pos)
do_job_now = True
rrt_new(my_pos,ccoords)
at_ideal = False
#do specific task
elif mode == 5:
print("finding fire...")
#should prepare to operate on car
#then do its specific task
#from there, send fin
task_done = False
temp = move_to_fire()
do_job_now = False
if temp == 1:
#start_find = 0
no_move()
arm_mode = True
print("done finding fire!")
#go to arm mode
mode = 8
#collision avoidance protocol
elif mode == 6:
print(mode)
no_move()
#request server for obstacles
query_coll()
#mode = 7
elif mode == 7:
print(mode)
#wait until further notice to run new RRT
#mode = 6
elif mode == 8:
robot_arm_moving()
if task_done == True:
send_fin_task()
my_pos = gps.getValues()
my_pos = conv_coords(my_pos)
add_obstacle(ccoords,(dinfo[2]*10-1))
end_pos = conv_coords([2.5,0.04,2.0])
rrt_new(my_pos,end_pos)
ending = True
mode = 0
print('done with arm')
#test mode
elif mode == 10:
go_ideal()
print(mode)
##franks code i think, to go around car
#define goal location.
# goal_x = float(coord[1])*10+30
# goal_y = 60-(float(coord[5])*10+30)
# angle = math.radians(float(coord[7])) # imu of dum car
# length = float(coord[9]) # length of dum car
# width = float(coord[11]) # width of dum car
#dum_len = length
#dum_wid = width
#dum_gps = (float(coord[1]),float(coord[5]))
# set_global_angle(float(coord[7])) # set global variable dum_angle for future use
#set the four location to traverse.
# dum_loc[0] = (float(coord[1]) + (length+dim[2]/2)*math.sin(angle),float(coord[5])+(length+dim[2]/2)*math.cos(angle)) #top
# dum_loc[1] = (float(coord[1])+ (width+dim[2])*math.cos(angle),float(coord[5])-(width+dim[2])*math.sin(angle)) #right
# dum_loc[2] = (float(coord[1]) - (length+dim[2]/2)*math.sin(angle),float(coord[5])-(length+dim[2]/2)*math.cos(angle)) #bot
# dum_loc[3] = (float(coord[1]) - (width+dim[2])*math.cos(angle),float(coord[5])+(width+dim[2])*math.sin(angle)) #left
# print(str(dum_loc[0]))
# print(str(dum_loc[1]))
# print(str(dum_loc[2]))
# print(str(dum_loc[3]))
##############specific task, i think
# if start_find == 1:
# temp = move_to_fire()
# if temp == 1:
# start_find = 0
# print("done!!!!")
#############doing rrt then after go to specific task <- lu's code i think
# if start_rrt == 1:
# temp = move_car_on_rrt()
#print(temp[0])
# print(temp[0])
# if(temp[0] == 2):
# start_rrt = 0
# start_find = 1
|
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
from subscripts import bls_api_query_app as BLS
import pdb, json
Base = declarative_base()
BLS_API_KEY = 'c4aceae070ec4aa88bd85a9323947770'
# A year has many consumer demographics
# A consumer demographic has many expenditures
# An expenditure belongs to a consumer demographic
# A consumer demographic belongs to a year
def get_bls_row(query_code):
df = BLS.get_series([query_code], 2002, 2012, BLS_API_KEY)
print('\n')
years = [year.year for year in df.axes[0]]
values = [year[0] for year in df.values]
return(list(zip(years,values)))
#############################################################
def get_expenditure_by_demo_code(item_code, demo_code, decile, year):
query_code = 'CXU' + item_code + demo_code + decile +'M'
toops = get_bls_row(query_code)
newList = []
try:
newList.append([toop[1] for toop in toops if int(toop[0]) == year][0])
except IndexError:
pass
return newList
categories = ['INCAFTTX', 'HOUSING', 'FOODHOME', 'FOODAWAY', 'HEALTH', 'ALCBEVG', 'APPAREL', 'TRANS', 'ENTRTAIN']
# takes a year and a edu_level and returns list of all expenses
def get_all_expenses_for_demo_and_year(demo_code, decile, year):
expenses = {}
for category in categories:
expenses[category] = get_expenditure_by_demo_code(category, demo_code, decile, year)
return expenses
####################
def get_all_expenses_all_levels(year):
# demo_code = 'LB13'
# levels = ['03', '04', '06', '08', '09'] # edu_levels
# ed_level_labels = ['sub_hs', 'high_school', 'AD', 'BD', 'MA+']
# levels = ['02','03','04','05','06'] # income_quants
# demo_code = 'LB01'
levels = ['03','04','05'] # housing_levels
demo_code = 'LB08'
all_levels = {}
for level in levels:
expenses = get_all_expenses_for_demo_and_year(demo_code,level, year)
all_levels[level] = expenses
return all_levels
####################
def get_all_data_all_years(start, end):
alldata_allyears = {}
for year in range(start,end):
alldata_allyears[year] = get_all_expenses_all_levels(year)
return alldata_allyears
data = get_all_data_all_years(2002,2012)
with open('BLS_data_housing.json', 'w') as BLS_data:
json.dump(data, BLS_data)
#### This script makes so many gosh darn requests, we filed for many API keys with many fake email addys
|
#!/usr/bin/env /data/mta/Script/Python3.8/envs/ska3-shiny/bin/python
#################################################################################
# #
# extract_goes.py: extract GOES-R data and plot the results #
# #
# Note: this script works only after: 2020:077 #
# #
# author: t. isobe (tisobe@cfa.harvard.edu) #
# #
# last update: Mar 09, 2021 #
# #
# P1 1.0 - 3.0 MeV protons (Counts/cm2 sec sr MeV) Uncorrected #
# P2 3.4 - 11.0 MeV protons (Counts/cm2 sec sr MeV) Uncorrected #
# P5 40.0 - 98.0 MeV protons (Counts/cm2 sec sr MeV) Uncorrected #
# HRC Proxy = 6000 * (11.64-38.1MeV) + 270000 * (40.3-73.4MeV) #
# + 100000 * (83.7-242.0MeV) #
# #
#################################################################################
import math
import re
import sys
import os
import string
import time
import Chandra.Time
#
#--- reading directory list
#
path = '/data/mta/Script/Interrupt/Scripts/house_keeping/dir_list'
with open(path, 'r') as f:
data = [line.strip() for line in f.readlines()]
for ent in data:
atemp = re.split(':', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec("%s = %s" %(var, line))
#
#--- append path
#
sys.path.append(bin_dir)
sys.path.append(mta_dir)
import mta_common_functions as mcf
#
#--- original data location
#
goes_r = '/data/mta4/Space_Weather/GOES/Data/goes_data_r.txt'
#
#--- a day in seconds
#
aday = 86400.0
#-------------------------------------------------------------------------------
#-- extract_goes_data: read goes r data and extract the potion for this period -
#-------------------------------------------------------------------------------
def extract_goes_data(event, start, stop):
"""
read goes r data and extract the potion for this period
input: event --- the name of the event
start --- starting time in <yyyy>:<mm>:<dd>:<hh>:<mm>
stop --- stopping time in <yyyy>:<mm>:<dd>:<hh>:<mm>
output: <web_dir>/GOES_data/<event>_goes.txt
"""
#
#--- year of starting time
#
atemp = re.split(':', start)
syear = float(atemp[0])
#
#--- convert time in Chandra Time
#
start = time.strftime('%Y:%j:%H:%M:00', time.strptime(start, '%Y:%m:%d:%H:%M'))
stop = time.strftime('%Y:%j:%H:%M:00', time.strptime(stop, '%Y:%m:%d:%H:%M'))
start = int(Chandra.Time.DateTime(start).secs)
stop = int(Chandra.Time.DateTime(stop).secs)
#
#--- this script works only after 2020:077
#
if start < 700790394:
print('Starting time is before the valid date (2020:077). Terminating the process.')
exit(1)
#
#--- set to data collecting period
#
pstart = start - 2 * aday
period = int((stop - start) / (5 * aday)) + 1
pstop = start + 5 * period * aday
#
#--- original data has the following columns
#--- Time P1 P2A P2B P3 P4 P5 P6 P7 P8A P8B P8C P9 P10 HRC Proxy
#
data = mcf.read_data_file(goes_r)
hline = 'Science Run Interruption: ' + event + '\n'
hline = hline + 'dofy p1 p2 p5 hrc prox\n'
hline = hline + '-' * 65 + '\n'
line = ''
for ent in data:
if ent[0] == '#':
continue
atemp = re.split('\s+', ent)
stime = int(Chandra.Time.DateTime(atemp[0]).secs)
if stime < pstart:
continue
elif stime > pstop:
break
#
#--- time in ydate
#
ctime = chandra_time_to_yday(stime, syear)
#
#--- p1: 1020 - 1860 keV
#--- p2a: 1900 - 2300 keV
#--- p2b: 2310 - 3340 keV
#
p1 = float(atemp[1])
p2a = float(atemp[2])
p2b = float(atemp[3])
p1n = (0.83 * p1 + 0.4 * p2a + 1.0 * p2b) / 2.3
#
#--- p3: 3400 - 6480 keV
#--- p4: 5840 - 1100 keV
#
p3 = float(atemp[4])
p4 = float(atemp[5])
p2n = (3.08 * p3 + 5.16 * p4) / 7.6
#
#--- p8b: 99900 - 118000 keV
#--- p8c: 115000 - 143000 keV
#
p8b = float(atemp[10])
p8c = float(atemp[11])
p5n = (18.1 * p8b + 28.0 * p8c) / 43.1
hprox = atemp[14]
line = line + str(stime) + '\t'
line = line + '%3.3e\t' % p1n
line = line + '%3.3e\t' % p2n
line = line + '%3.3e\t' % p5n
line = line + hprox + '\n'
hline = hline + '%3.4f\t' % ctime
hline = hline + '%3.3e\t' % p1n
hline = hline + '%3.3e\t' % p2n
hline = hline + '%3.3e\t' % p5n
hline = hline + hprox + '\n'
#
#--- print out the data
#
ofile = web_dir + 'GOES_data/' + event + '_goes.txt'
with open(ofile, 'w') as fo:
fo.write(line)
ofile = wdata_dir + event + '_goes.txt'
with open(ofile, 'w') as fo:
fo.write(hline)
#--------------------------------------------------------------------
#-- chandra_time_to_yday: convert chandra time to ydate --
#--------------------------------------------------------------------
def chandra_time_to_yday(stime, syear):
"""
convert chandra time to ydate
input: stime --- time in seconds from 1998.1.1
syear --- year at the beginning of the data period
output: ydate --- ydate
"""
atime = Chandra.Time.DateTime(stime).date
btemp = re.split(':', atime)
year = float(btemp[0])
ydate = float(btemp[1])
hour = float(btemp[2])
mins = float(btemp[3])
sec = float(btemp[4])
ydate = ydate + (hour/24.0 + mins/1440.0 + sec/86400.0)
#
#--- if the date is over two years, keep counting from the first year
#
if year > syear:
if mcf.is_leapyear(syear):
base = 366
else:
base = 365
ydate += base
return ydate
#--------------------------------------------------------------------
#--- compute_goes_stat: computing GOES statitics ---
#--------------------------------------------------------------------
def compute_goes_stat(event, start):
"""
read data from goes data, and compute statistics
input: event --- event name
start --- interruption start time in <yyyy>:<mm>:<dd>:<hh>:<mm>
output: <stat_dir>/<event>_goes_stat
"""
#
#--- check the interruption period so that we can choose which data format to use
#
atemp = re.split(':', start)
syear = float(atemp[0])
nind = 0
if syear >= 2020:
nind = 1
#
#--- convert to ydate
#
start = time.strftime('%Y:%j:%H:%M:00', time.strptime(start, '%Y:%m:%d:%H:%M'))
atemp = re.split(':', start)
rstart = float(atemp[1]) + float(atemp[2]) / 24.0 + float(atemp[3]) / 1440.0
#
#--- read the data file
#
ifile = wdata_dir + event + '_goes.txt'
data = mcf.read_data_file(ifile)
#
#--- initialize
#
p1_list = [0.0, 0.0, 0.0, 1.0e10, 0.0, 0.0, 0.0]
p2_list = [0.0, 0.0, 0.0, 1.0e10, 0.0, 0.0, 0.0]
p5_list = [0.0, 0.0, 0.0, 1.0e10, 0.0, 0.0, 0.0]
hp_list = [0.0, 0.0, 0.0, 1.0e10, 0.0, 0.0, 0.0]
p1_int_val = 0.0
p2_int_val = 0.0
p5_int_val = 0.0
hp_int_val = 0.0
ind = 0 #---- indicator whther the loop passed the interruption time
for ent in data:
atemp = re.split('\s+|\t+', ent)
btemp = re.split('\.', atemp[0])
if ent and btemp[0].isdigit():
atemp = re.split('\s+|\t+', ent)
if len(atemp) < 4:
continue
val0 = float(atemp[0]) #--- time
val1 = float(atemp[1]) #--- p1
val2 = float(atemp[2]) #--- p2
val3 = float(atemp[3]) #--- p5
p1_list = update_data_set(val0, val1, p1_list)
p2_list = update_data_set(val0, val2, p2_list)
p5_list = update_data_set(val0, val3, p5_list)
if nind > 0:
val4 = float(atemp[4]) #--- hrc prox
hp_list = update_data_set(val4, val1, p1_list)
#
#--- finding the value at the interruption
#
if rstart <= val0 and ind == 0:
p1_int_val = val1
p2_int_val = val2
p5_int_val = val3
if nind > 0:
hp_int_val = val4
ind = 1
#
#--- compute avg/std and create output
#
line = '\t\t\tavg\t\t\tmax\t\tTime\t\tmin\t\tTime\t\tValue at Interruption Started\n'
line = line + '-'*95 + '\n'
line = line + create_stat_line(p1_list, 'p1\t', p1_int_val)
line = line + create_stat_line(p2_list, 'p2\t', p2_int_val)
line = line + create_stat_line(p5_list, 'p5\t', p5_int_val)
if nind > 0:
line = line + create_stat_line(hp_list, 'hrc prox', hp_int_val)
ofile = stat_dir + event + '_goes_stat'
with open(ofile, 'w') as fo:
fo.write(line)
#----------------------------------------------------------------------------
#-- update_data_set: update min, max,sum and sum of square value in the data list
#----------------------------------------------------------------------------
def update_data_set(ytime, val, dlist):
"""
update min, max,sum and sum of square value in the data list
input: ytime --- time in ydate
val --- value
dlist --- a list of data:
<sum of value>
<sum of value**2>
<max>
<min>
<time of max>
<time of min>
<# of data>
output: dlist --- updated data list
"""
if val > 0:
dlist[0] += val
dlist[1] += val * val
if val > dlist[2]:
dlist[2] = val
dlist[4] = ytime
if val < dlist[3]:
dlist[3] = val
dlist[5] = ytime
dlist[6] += 1
return dlist
#----------------------------------------------------------------------------
#-- create_stat_line: create a stat result line for a given data list -
#----------------------------------------------------------------------------
def create_stat_line(d_list, title, int_val):
"""
create a stat result line for a given data list
input: d_list --- a list of data
title --- a line head
int_val --- the value at the interruption
output: line --- a resulted line to be printed
"""
[davg, dstd] = compute_stat(d_list)
line = title + '\t%2.3e +/- %2.3e\t\t%2.3e\t%4.3f\t\t%2.3e\t%4.3f\t\t%2.3e\n'\
% (davg, dstd, d_list[2], d_list[4], d_list[3], d_list[5], int_val)
return line
#----------------------------------------------------------------------------
#-- compute_stat: compute avg and std --
#----------------------------------------------------------------------------
def compute_stat(d_list):
"""
compute avg and std
input: d_list --- a list of data
output [avg, std]
"""
if d_list[-1] > 0:
davg = d_list[0] / d_list[-1]
try:
dstd = math.sqrt((d_list[1] / d_list[-1]) - (davg * davg))
except:
dstd = -999
else:
davg = -999
dstd = -999
return [davg, dstd]
#----------------------------------------------------------------------------
if __name__ == '__main__':
if len(sys.argv) > 2:
event = sys.argv[1]
start = sys.argv[2]
stop = sys.argv[3]
extract_goes_data(event, start, stop)
compute_goes_stat(event, start)
|
from filecache import filecache
import feedparser
from unidecode import unidecode
import urllib
import urlparse
import re
from nab.files import Searcher, Torrent
@filecache(60 * 60)
def _get_feed(url):
feed = feedparser.parse(url)
if feed['entries']:
return feed['entries']
else:
raise IOError("No results found")
def get_seeds(f):
if "description" in f:
match = re.search(r"(\d+) seed", f["description"])
if match:
return int(match.group(1))
match = re.search(r"seed(?:er(?:\(s?\))?)?:? (\d+)", f["description"])
if match:
return int(match.group(1))
if "torrent_seeds" in f:
return int(f["torrent_seeds"])
return None
def get_torrent_url(f):
for link in f.get('links', []):
if link['type'] == 'application/x-bittorrent':
# remove query string and return
return link['href'][:link['href'].find('?')]
# no link found
return None
class Feed(Searcher):
def __init__(self, url, name=None,
search_by=None, match_by=None, num_pages=1):
Searcher.__init__(self, search_by, match_by)
self.url = url
self.name = name or url
self.num_pages = num_pages
self.multipage = "{p}" in self.url
def _get_feed(self, url):
Feed.log.debug("Parsing feed at %s" % url)
# retry three times
feed = []
for retry in range(1):
try:
feed = _get_feed(url)
except IOError:
continue
else:
break
if feed:
Feed.log.debug("Feed parsed")
else:
Feed.log.debug("No results found")
return feed
def search(self, term):
files = []
if isinstance(term, unicode):
term = unidecode(term)
term = urllib.quote(term)
files = []
# only search first few pages for files
for page in range(1, self.num_pages + 1):
results = self._get_feed(self.url.format(s=term, p=page))
# remember page 1 links so we can tell if the
# site is giving us the same page again
links = set([f["link"] for f in results])
if page == 1:
p1_links = set(links)
# break when no results or results are the same as page 1
if not results or (page != 1 and p1_links == links):
break
for f in results:
url = get_torrent_url(f)
magnet = f.get("torrent_magneturi")
files.append(Torrent(f["title"], url, magnet, get_seeds(f)))
if not self.multipage:
break
return files
def __str__(self):
return "%s: %s" % (Searcher.__str__(self), self.name)
Feed.register("feed")
|
from django.apps import AppConfig
class ZipcodesConfig(AppConfig):
name = 'zipcodes'
|
#%%
import pandas as pd
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from sklearn.cluster import KMeans
import hvplot.pandas
# %%
file_path = "/Users/oshadi/Desktop/Analysis Projects/Cryptocurrencies/module examples/new_iris_data.csv"
df_iris = pd.read_csv(file_path)
df_iris.head(10)
# %%
#standardize
iris_scaled = StandardScaler().fit_transform(df_iris)
print(iris_scaled[0:5])
# %%
# Initialize PCA model
pca = PCA(n_components=2)
# %%
# Get two principal components for the iris data.
iris_pca = pca.fit_transform(iris_scaled)
# %%
df_iris_pca = pd.DataFrame(
data= iris_pca, columns=['principal component 1', 'principal component 2']
)
df_iris_pca.head()
# %%
pca.explained_variance_ratio_
# %%
# Find the best value for K
inertia = []
k = list(range(1, 11))
# Calculate the inertia for the range of K values
for i in k:
km = KMeans(n_clusters=i, random_state=0)
km.fit(df_iris_pca)
inertia.append(km.inertia_)
# Create the elbow curve
elbow_data = {"k": k, "inertia": inertia}
df_elbow = pd.DataFrame(elbow_data)
df_elbow.hvplot.line(x="k", y="inertia", xticks=k, title="Elbow Curve")
# %%
# Initialize the K-means model
model = KMeans(n_clusters=3, random_state=0)
# Fit the model
model.fit(df_iris_pca)
# Predict clusters
predictions = model.predict(df_iris_pca)
# Add the predicted class columns
df_iris_pca["class"] = model.labels_
df_iris_pca.head()
# %%
df_iris_pca.hvplot.scatter(
x="principal component 1",
y="principal component 2",
hover_cols=["class"],
by="class",
)
# %%
|
cont = 0
for cont in range(0,51):
if cont%2 == 0 and cont != 0:
print(cont)
|
from playsound import playsound
print('voice playing')
playsound('happyvoice.wav')
print('voice stopped')
|
###########################################
# Assessing the number of reads that
# were filtered out from the host-filtering
# step from bowtie2
###########################################
#imports
import os
from Bio import SeqIO
import gzip
from collections import defaultdict
#Path to raw reads
path_raw = '/Volumes/UUI/reads/'
#Path to filtered reads
path_filtered = '/Volumes/UUI/host_filtered/'
#Functions
def getSeqNum(path):
'''Read in the sequence files and return
a dictionary:
- keys = file name
- values = number of sequences '''
counts = defaultdict(int)
for file in os.listdir(path):
if file != 'hide':
file_path = path + file
sequences = 0
for record in SeqIO.parse(gzip.open(file_path, 'rt', encoding='utf-8'),"fastq"):
sequences += 1
counts[file] = sequences
return counts
#Read in the raw sequences
raw_counts = getSeqNum(path_raw)
#Read in the filtered sequences
filtered_counts = getSeqNum(path_filtered)
#output the filtering statistics
with open('filtering_stats.csv') as fn:
fn.write("Raw Sequences\tcounts\n")
for key, value in raw_counts.items():
fn.write(key + '\t' + str(value) + '\n')
fn.write("\t\tFiltered Sequences\tcounts\n")
for key, value in filtered_counts.items():
fn.write("\t\t" + key + '\t' + str(value) + '\n')
|
import numpy as np
from numba import cuda, float32
from pyqtgraph.Qt import QtCore, QtGui
import pyqtgraph as pg
import pyqtgraph.opengl as gl
from time import clock
from math import sqrt
'''------------------------------------------------ Disclaimer ------------------------------------------------'''
# # Only tested on a gtx 960m with compute capability 5.0
# # DWORD 'TdrDelay' and 'TdrLevel' at 8 and 0 respectively must be added to allow for larger amount of bodies in
# # HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Control\GraphicsDrivers
# #
'''------------------------------------------------ GPU function ------------------------------------------------'''
@cuda.jit(device=True, inline=True)
def interaction(xi, yi, zi, axi, ayi, azi, xj, yj, zj, mj, eps, min_dist):
"""
Models the gravitational interaction between two bodies
:param xi: x-coordinate of body i
:param yi: y-coordinate of body i
:param zi: y-coordinate of body i
:param axi: Acceleration of body i along the x-axis
:param ayi: Acceleration of body i along the y-axis
:param azi: Acceleration of body i along the z-axis
:param xj: x-coordinate of body j
:param yj: y-coordinate of body j
:param zj: z-coordinate of body j
:param mj: Mass of body j
:param eps: Softening parameter
:param min_dist: Minimal distance for acceleration calculation
:return: Updated acceleration of body i along the three axes
"""
rx = xj-xi # Distance along the x-axis
ry = yj-yi # Distance along the y-axis
rz = zj-zi # Distance along the z-axis
distance = sqrt(rx*rx+ry*ry+rz*rz) # Distance between the bodies plus the softening parameter
if distance > min_dist: # If the bodies are far enough apart (at least not the same)
scale = mj/(distance**3+eps) # Scale for the distance vector (rx, ry, rz) for gravitation
elif distance < 1.1*eps: # If too close together (or the same particle)
scale = 0 # Zero acceleration
else: # If the bodies are very close together
scale = mj/(distance*min_dist**2+eps) # Constant acceleration once scaled
axi += rx*scale # Add acceleration of body j to that of body i along the x-axis
ayi += ry*scale # Add acceleration of body j to that of body i along the y-axis
azi += rz*scale # Add acceleration of body j to that of body i along the z-axis
return axi, ayi, azi
@cuda.jit(device=True, inline=True)
def block_acceleration(xi, yi, zi, axi, ayi, azi, bodies, eps, min_dist):
"""
Executes interaction for each block in the grid
:param xi: x-coordinate of body i
:param yi: y-coordinate of body i
:param zi: z-coordinate of body i
:param axi: Acceleration of body i along the x-axis
:param ayi: Acceleration of body i along the y-axis
:param azi: Acceleration of body i along the z-axis
:param bodies: Array of body vectors
:param eps: Softening parameter
:param min_dist: Minimal distance for acceleration calculation
:return: Updated acceleration of body i along the three axes
"""
for j in range(cuda.blockDim.x): # For each body in this block
xj = bodies[j, 0] # x-coordinate of body j
yj = bodies[j, 1] # y-coordinate of body j
zj = bodies[j, 2] # z-coordinate of body j
mj = bodies[j, 3] # Mass of body j
axi, ayi, azi = interaction(xi, yi, zi, axi, ayi, azi, xj, yj, zj, mj, eps, min_dist) # Update acceleration
return axi, ayi, azi
@cuda.jit('void(float32[:, :], float32, float32, float32)')
def total_acceleration(d_bodies, N, eps, min_dist):
"""
Calculates the acceleration for each body
:param d_bodies: Array of all the body vectors
:param: N: Amount of bodies
:param eps: Softening parameter
:param min_dist: Minimal distance for acceleration calculation
"""
block_dim = 32 # Manually set block size (can not be a parameter because of the shared array)
sh_bodies = cuda.shared.array((block_dim, 4), float32) # Create shared array for block_acceleration
i = cuda.grid(1) # Create grid
xi = d_bodies[i, 0] # x-coordinate of body i
yi = d_bodies[i, 1] # y-coordinate of body i
zi = d_bodies[i, 2] # z-coordinate of body i
axi = 0.0 # Initialize acceleration of body i along the x-axis
ayi = 0.0 # Initialize acceleration of body i along the y-axis
azi = 0.0 # Initialize acceleration of body i along the z-axis
for j in range(0, N, block_dim): # For each block
index = (j//block_dim)*cuda.blockDim.x+cuda.threadIdx.x # Calculate index
sh_index = cuda.threadIdx.x # Identify the thread (body)
sh_bodies[sh_index, 0] = d_bodies[index, 0] # Add x-coordinate of body to shared array
sh_bodies[sh_index, 1] = d_bodies[index, 1] # Add y-coordinate of body to shared array
sh_bodies[sh_index, 2] = d_bodies[index, 2] # Add z-coordinate of body to shared array
sh_bodies[sh_index, 3] = d_bodies[index, 9] # Add mass of body to shared array
cuda.syncthreads() # Wait for the entire shared array to finish
axi, ayi, azi = block_acceleration(xi, yi, zi, axi, ayi, azi, sh_bodies, eps, min_dist) # Update acceleration
cuda.syncthreads() # Wait for all the accelerations to be updated
d_bodies[i, 6] = axi # Assign acceleration along the x-axis to body i
d_bodies[i, 7] = ayi # Assign acceleration along the y-axis to body i
d_bodies[i, 8] = azi # Assign acceleration along the z-axis to body i
@cuda.jit('void(float32[:, :], float32, float32)')
def leapfrog(d_bodies, delta_t, inv_step_size):
"""
Executes the leapfrog integration method
:param d_bodies: Array of all the body vectors
:param delta_t: Time step duration
:param inv_step_size: Full step (1) or half step (2)
"""
i = cuda.grid(1) # Create grid
d_bodies[i, 3] += delta_t*d_bodies[i, 6]/inv_step_size # Update velocity of body i along the x-axis
d_bodies[i, 4] += delta_t*d_bodies[i, 7]/inv_step_size # Update velocity of body i along the y-axis
d_bodies[i, 5] += delta_t*d_bodies[i, 8]/inv_step_size # Update velocity of body i along the z-axis
d_bodies[i, 0] += delta_t*d_bodies[i, 3] # Update x-coordinate of body i
d_bodies[i, 1] += delta_t*d_bodies[i, 4] # Update y-coordinate of body i
d_bodies[i, 2] += delta_t*d_bodies[i, 5] # Update z-coordinate of body i
'''------------------------------------------------- Constants -------------------------------------------------'''
G = 6.67*10**-11 # Gravitational constant
kpc = 3.08567758*10**19 # 1 Kilo parsec in meters
min_dist = 1 # Minimum distance between two bodies for calculating acceleration
eps = np.float32(1e-3) # Softening parameter
'''---------------------------------------------------- Data ----------------------------------------------------'''
'''
galaxy1 = np.zeros((1600, 10), dtype=np.float32) # Allocate galaxy array
theta = np.repeat(np.arange(41)*2*np.pi/41, 39) # Multiple sets of an array of angles
r = np.tile(np.arange(5, 200, 5), 41) # Multiple sets of an array of radii
galaxy1[1:, 0] = r*np.cos(theta) # Set x-coordinate from (r, theta)
galaxy1[1:, 1] = r*np.sin(theta) # Set y-coordinate from (r, theta)
galaxy1[1:, 3] = -np.sqrt(G*1000000/r)*np.sin(theta)*1.2 # Perpendicular velocity .*sqrt(GM/r) in the x-direction
galaxy1[1:, 4] = np.sqrt(G*1000000/r)*np.cos(theta)*1.2 # Perpendicular velocity .*sqrt(GM/r) in the y-direction
galaxy1[1:, 9] = G # Set masses of all 'planets' scaled by G
galaxy1[0, 9] = 1000000*G # Set mass of the 'star' scaled by G
follow = 90 # State which particle to create a trail of
bodies = galaxy1 # Set galaxy1 as de body array
# Set colors of the bodies for the scatter plot
N = bodies.shape[0] # Amount of bodies
color = np.zeros((N, 4), dtype=np.float32) # Allocate color array
color[:, 3] = 0.9 # Set transparency to 90%
color[1:, 1] = 1 # Add color green to all but the center
color[0, 2] = 1 # Add color blue to center
'''
data = np.loadtxt("dubinskitab.txt") # Load Milky Way & Andromeda data (m, x, y, z, vx, vy, vz)
# 16384 Gal. disk, 16384 And. disk, 8192 Gal. bulge, 8192 And. bulge, 16384 Gal. halo, 16384 And. halo
mod = 16 # Set amount 81920/mod amount of bodies
bodies = np.zeros((int(81920/mod), 10), dtype=np.float32) # Initialize body array
bodies[:, :6] = data[0::mod, 1:] # Copy positions and velocities of selected data
bodies[:, 9] = mod*data[0::mod, 0] # Copy masses of selected data, balance total mass and scale with G
follow = 90 # State which particle to create a trail of
# Set color of the plot based on the Milky Way & Andromeda data
N = bodies.shape[0] # Amount of bodies
color = np.zeros((N, 4), dtype=np.float32) # Initialize color array for the plot
color[:, 3] = 0.3 # Set transparency to 30%
color[:int(16384/mod), 2] = 1 # Set Milky Way core to blue
color[int(16384/mod):int(32768/mod), 0] = 1 # Set Andromeda core to red
color[int(32768/mod):int(40960/mod), 2] = 0.8 # Set Milky Way bulge to lighter blue
color[int(409604/mod):int(49152/mod), 0] = 0.8 # Set Andromeda bulge to lighter red
color[int(49152/mod):int(65536/mod), 2] = 0.6 # Set Milky way halo to even lighter blue
color[int(65536/mod):, 0] = 0.6 # Set Andromeda halo to even lighter red
d_bodies = cuda.to_device(bodies) # Copy the array of body vectors to the gpu
'''------------------------------------------------- Parameters -------------------------------------------------'''
# Set block and grid dimensions for the GPU
blockdim = 32 # Amount of threads per block
griddim = int(np.ceil(N/blockdim)) # Amount of blocks
# Initialize GPU functions with the given grid and block dimensions
leapfrog = leapfrog.configure(griddim, blockdim) # Configure leapfrog function
total_acceleration = total_acceleration.configure(griddim, blockdim) # Configure total_acceleration function
'''------------------------------------------------ 3D Animation ------------------------------------------------'''
app = QtGui.QApplication([]) # Initialize application
# Initialize window for the scatter plots
w = gl.GLViewWidget() # Initialize opengl widget
w.opts['distance'] = 12500 # Set viewing distance to the figure
w.show() # Show the figure
w.setWindowTitle('N-body simulation') # Set title of the window
w.setGeometry(960, 35, 960, 995) # Set window to envelop right side of the screen
# Scatter plot of all the bodies
sp = gl.GLScatterPlotItem(pos=bodies[:, :3], color=color, size=7) # Set initial frame
sp.scale(20, 20, 20) # Scale the plot to match the grids
sp.translate(-10, -10, -10) # Translate the plot to match the grids
sp.rotate(80, 0, 0, 1)
w.addItem(sp) # Add plot to figure
# Initialize arrays for the scatter plot trail
bodies2 = np.zeros((1000000, 3)) # Allocate memory for the body position over time
bodies2[0, :] = bodies[follow, :3] # Set the location at the first frame
color2 = np.zeros((1000000, 4)) # Allocate memory for the color array
color2[:, 0] = 1 # Set color array to color of followed particle
color2[:, 3] = 0.9 # Set color array to color of followed particle
# Scatter plot of the trail of a single body
sp2 = gl.GLScatterPlotItem(pos=bodies2[0, :], color=color2[0, :], size=5, glOptions='additive') # Set initial frame
sp2.scale(20, 20, 20) # Scale the plot to match the grids
sp2.translate(-10, -10, -10) # Translate the plot to match the grids
w.addItem(sp2) # Add plot to figure
# Initialize window for the scrolling plots
win = pg.GraphicsWindow() # Initialize window for plotting
win.setGeometry(0, 35, 960, 995) # Set window to envelop left side of the screen
p1 = win.addPlot(row=1, col=0) # Add top plot
p2 = win.addPlot(row=2, col=0) # Add bottom plot
# Scrolling plots of the time steps
dts = np.zeros(1000000, dtype=np.float32) # Allocate array for dt's
curve = p1.plot(dts[:500]) # Set initial frame top plot
curve2 = p2.plot(dts[:2]) # Set initial frame bottom plot
frame = 0 # Frame index
def update():
"""
Updates the plot
"""
global sp, d_bodies, N, frame, ts, color, dts, bodies2, color2, dt
frame += 1 # Update frame index
# Update the positions
for amount in range(10): # Calculates amount of frames per one display of a frame
total_acceleration(d_bodies, N, eps, min_dist) # Calculate the accelerations
d_bodies.copy_to_host(bodies) # Copy the updated array of body vectors back to RAM (auto syncs threads)
dt = np.clip(1/(np.max(np.abs(bodies[:, 6:9]))+0.001), 0, 200) # Set time step to max velocity, acceleration
if frame == 1: # Half step of the velocity
ts = clock() # Starting time
#dts[499] = dt # Replace initial value needed for initializing the plot
leapfrog(d_bodies, dt, 2) # Apply the leapfrog integration method
else: # Full step of the velocity
leapfrog(d_bodies, dt, 1) # Apply the leapfrog integration method
# Top plot
dts[499+frame] = dt # Save value of current frame
curve.setData(dts[frame:500+frame]) # Set data as last 500 frames
curve.setPos(frame-499, 0) # Scroll x-axis
# Bottom plot
curve2.setData(dts[499:500+frame]) # Set data as all elapsed frames
# Plot bodies
d_bodies.copy_to_host(bodies) # Copy the updated array of body vectors back to RAM (auto syncs threads)
sp.setData(pos=bodies[:, :3]) # Update the plot data and colors
# Plot Trail
bodies2[frame, :] = bodies[follow, :3] # Save value of current frame
sp2.setData(pos=bodies2[:frame+1, :], color=color2[:frame+1, :]) # Update the plot data and colors
# Print FPS
if frame % 10 == 0: # Display fps every 10 frames
print(10/(clock()-ts)) # Display fps
ts = clock() # Reset starting time
# Update the figure
timer = QtCore.QTimer() # Initialize timer
timer.timeout.connect(update) # Connect the timer to the update function
timer.start(0) # Start timer
QtGui.QApplication.instance().exec_() # Run the figure
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.