repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Ecam-Eurobot-2017/main
|
code/raspberrypi/range_sensors.py
|
1
|
2358
|
from i2c import I2C
from enum import IntEnum
class Command(IntEnum):
MeasureOne = 1
MeasureAll = 2
Count = 3
class RangeSensor(I2C):
"""
This class is an abstraction around the I2C communication with
the range-sensor module.
Details of the "protocol" used:
The Raspberry Pi sends a byte to the module containing a command
and eventually a sensor number. Both informations are coded on 4 bits
totalling 8 bits together. The null byte, 0x00, is used to indicate errors.
This means that we have 15 possible commands and 15 possible sensors.
We only use 3 different commands:
1. MeasureOne (get_range): 0001 xxxx
This command requests the last measure of the sensor number xxxx
Sensor indices begin at 1. If the sensor does not exists, the module
will return a null byte. If the sensor does exists, two bytes will be
returned making up the 16 bits value together.
2. MeasureAll (get_ranges): 0010 0000
This command requests the last measures of all the available sensors.
The response to this request is a sequence of 2*n bytes where n is the
number of available sensors.
3. Count (get_number_of_sensors): 0011 0000
This command requests the number of available sensors.
The response is only one byte as there are only 15 possible sensors.
"""
def __init__(self, address):
"""Constructor takes the adress of the I2C module"""
super(RangeSensor, self).__init__(address)
self.n = self.get_number_of_sensors()
def get_range(self, sensor):
"""Requests the last measurement of a specific sensor"""
cmd = I2C.pack8(Command.MeasureOne, sensor)
self.send(cmd)
r = self.receive(2)
return I2C.pack16(r[1], r[0])
def get_ranges(self):
"""Requests the last measurements of all sensors"""
cmd = I2C.pack8(Command.MeasureAll, 0)
self.send(cmd)
data = self.receive(2 * self.n)
ranges = list()
for i in range(self.n):
j = i*2
ranges.append(I2C.pack16(data[(i*2)+1], data[i*2]))
return ranges
def get_number_of_sensors(self):
"""Requests the number of available sensors"""
cmd = I2C.pack8(Command.Count, 0)
self.send(cmd)
return self.receive()
|
mit
| -9,218,176,433,495,981,000
| 32.685714
| 79
| 0.651824
| false
| 3.956376
| false
| false
| false
|
PedroMDuarte/thesis-hubbard-lda_evap
|
lda.py
|
1
|
78053
|
import logging
# create logger
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import rc
rc('font',**{'family':'serif'})
rc('text', usetex=True)
from vec3 import vec3, cross
import scipy.constants as C
"""
This file provides a way of calculating trap profiles in the local density
approximation. It needs to have a way of calculating:
* local band structure
* local tunneling rate, t
* local onsite interactions, U
From these thre quantities it can go ahead an use the solutions to the
homogeneous Fermi-Hubbard (FH) model to calculate the LDA.
In the homogeenous FH problem the chemical potential and the zero of
energy are always specified with respect to some point in the local band
structure. This point depends on how the Hamiltonian is written down:
A. Traditional hamiltonian.
i, j : lattice sites
<i,j> : nearest neighbors
s : spin
su : spin-up
sd : spin-down
Kinetic energy = -t \sum_{s} \sum_{<i,j>} a_{i,s}^{\dagger} a_{j,s}
Onsite energy = U \sum_{i} n_{i,su} n_{i,sd}
Using the traditional hamiltonian half-filling occurrs at a chemical
potential mu = U/2.
The zero of energy in the traditional hamiltonian is exactly midway through
the lowest band of the U=0 hamiltonian.
B. Half-filling hamiltonian
Kinetic energy = -t \sum_{s} \sum_{<i,j>} a_{i,s}^{\dagger} a_{j,s}
Onsite energy = U \sum_{i} ( n_{i,su} - 1/2 )( n_{i,sd} - 1/2 )
Using the half-filling hamiltonian half-filling occurrs at a chemical
potential mu = 0, a convenient value.
The zero of energy in the half-filling hamiltonian is shifted by U/2
with respect to the zero in the traditional hamiltonian.
....
Considerations for LDA
....
When doing the local density approximation (LDA) we will essentially have a
homogenous FH model that is shifted in energy by the enveloping potential of
the trap and by the local band structure. In the LDA the zero of energy is
defined as the energy of an atom at a point where there are no external
potentials. A global chemical potential will be defined with respect to the
LDA zero of energy.
To calculate the local thermodynamic quantities, such as density, entropy,
double occupancy, etc. we will use theoretical results for a homogeneous FH
model. The local chemical potential will be determined based on the local
value of the enveloping potential and the local band structure (which can be
obtained from the local lattice depth).
"""
import udipole
import scubic
from mpl_toolkits.mplot3d import axes3d
from scipy import integrate
from scipy import optimize
from scipy.interpolate import interp1d
# Load up the HTSE solutions
from htse import htse_dens, htse_doub, htse_entr, htse_cmpr
from nlce import nlce_dens, nlce_entr, nlce_spi, nlce_cmpr
import qmc, qmc_spi
def get_dens( T, t, mu, U, select='htse', ignoreLowT=False, verbose=True):
""" This function packages all three methods for obtaining
the thermodynamic quantities: htse, nlce, qmc"""
if select == 'htse':
return htse_dens( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
elif select == 'nlce':
return nlce_dens( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
def get_entr( T, t, mu, U, select='htse', ignoreLowT=False, verbose=True):
""" This function packages all three methods for obtaining
the thermodynamic quantities: htse, nlce, qmc"""
if select == 'htse':
return htse_entr( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
elif select == 'nlce':
return nlce_entr( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
def get_spi( T, t, mu, U, select='htse', ignoreLowT=False, verbose=True):
""" This function packages all three methods for obtaining
the thermodynamic quantities: htse, nlce, qmc"""
if select == 'htse':
return np.ones_like( t )
elif select == 'nlce':
return nlce_spi( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
def get_doub( T, t, mu, U, select='htse', ignoreLowT=False, verbose=True):
""" This function packages all three methods for obtaining
the thermodynamic quantities: htse, nlce, qmc"""
if select == 'htse':
return htse_doub( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
else:
raise "doublons not defined"
def get_cmpr( T, t, mu, U, select='htse', ignoreLowT=False, verbose=True):
""" This function packages all three methods for obtaining
the thermodynamic quantities: htse, nlce, qmc"""
if select == 'htse':
return htse_cmpr( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
elif select == 'nlce':
return nlce_cmpr( T, t, mu, U, ignoreLowT=ignoreLowT, verbose=verbose)
#...............
# LDA CLASS
#...............
class lda:
"""
This class provides the machinery to do the lda. It provides a way to
determine the global chemical potential for a given number or for a half
filled sample.
"""
def __init__( self, **kwargs ):
self.verbose = kwargs.get('verbose', False)
# Flag to ignore errors related to the slope of the density profile
# or the slope of the band bottom
self.ignoreSlopeErrors = kwargs.get( 'ignoreSlopeErrors',False)
# Flag to ignore errors related to the global chemical potential
# spilling into the beams
self.ignoreMuThreshold = kwargs.get('ignoreMuThreshold', False )
# Flag to ignore errors related to low temperatures beyond the reach
# of the htse
self.ignoreLowT = kwargs.get('ignoreLowT',False)
# Flag to ignore errors related to a non-vanishing density
# distribution within the extents
self.ignoreExtents = kwargs.get('ignoreExtents',False)
# The potential needs to offer a way of calculating the local band
# band structure via provided functions. The following functions
# and variables must exist:
#
# To calculate lda:
# - pot.l
# - pot.bandStructure( X,Y,Z )
#
# To make plots
# - pot.unitlabel
# - pot.Bottom( X,Y,Z )
# - pot.LatticeMod( X,Y,Z )
# - pot.Info()
# - pot.EffAlpha()
# - pot.firstExcited( X,Y,Z )
# - pot.S0( X,Y,Z )
self.pot = kwargs.pop( 'potential', None)
if self.pot is None:
raise ValueError(\
'A potential needs to be defined to carry out the LDA')
# The potential also contains the lattice wavelength, which defines
# the lattice spacing
self.a = self.pot.l / 2.
# Initialize temperature. Temperature is specified in units of
# Er. For a 7 Er lattice t = 0.04 Er
self.T = kwargs.get('Temperature', 0.40 )
# Initialize interactions.
self.a_s = kwargs.get('a_s',300.)
# Initialize extents
self.extents = kwargs.pop('extents', 40.)
# Initialize the type of Hubbard solution
# type can be: 'htse', 'nlce', 'qmc'
self.select = kwargs.get('select','htse')
# Make a cut line along 111 to calculate integrals of the
# thermodynamic quantities
# set the number of points to use in the cut
if self.select == 'htse':
NPOINTS = 320
else:
NPOINTS = 80
OVERRIDE_NPOINTS = kwargs.pop('override_npoints', None)
if OVERRIDE_NPOINTS is not None:
NPOINTS = OVERRIDE_NPOINTS
direc111 = (np.arctan(np.sqrt(2)), np.pi/4)
unit = vec3(); th = direc111[0]; ph = direc111[1]
unit.set_spherical( 1., th, ph);
t111, self.X111, self.Y111, self.Z111, lims = \
udipole.linecut_points( direc=direc111, extents=self.extents,\
npoints=NPOINTS)
# Below we get the signed distance from the origin
self.r111 = self.X111*unit[0] + self.Y111*unit[1] + self.Z111*unit[2]
# Obtain band structure and interactions along the 111 direction
bandbot_111, bandtop_111, \
self.Ezero_111, self.tunneling_111, self.onsite_t_111 = \
self.pot.bandStructure( self.X111, self.Y111, self.Z111)
# The onsite interactions are scaled up by the scattering length
self.onsite_t_111 = self.a_s * self.onsite_t_111
self.onsite_111 = self.onsite_t_111 * self.tunneling_111
# Lowst value of E0 is obtained
self.LowestE0 = np.amin( bandbot_111 )
self.Ezero0_111 = self.Ezero_111.min()
#---------------------
# CHECK FOR NO BUMP IN BAND BOTTOM
#---------------------
# Calculate first derivative of the band bottom at small radii, to
# assess whether or not the potential is a valid potential
# (no bum in the center due to compensation )
positive_r = np.logical_and( self.r111 > 0. , self.r111 < 10. )
# absolute energy of the lowest band, elb
elb = bandbot_111[ positive_r ]
elb_slope = np.diff( elb ) < -1e-4
n_elb_slope = np.sum( elb_slope )
if n_elb_slope > 0:
msg = "ERROR: Bottom of the band has a negative slope"
if self.verbose:
print msg
print elb
print np.diff(elb)
print elb_slope
if not self.ignoreSlopeErrors:
raise ValueError(msg)
else:
if self.verbose:
print "OK: Bottom of the band has positive slope up to "\
+ "r111 = 10 um"
#------------------------------
# SET GLOBAL CHEMICAL POTENTIAL
#------------------------------
# Initialize global chemical potential and atom number
# globalMu can be given directly or can be specified via the
# number of atoms. If the Natoms is specified we calculate
# the required gMu using this function:
muHalfMott = self.onsite_111.max()/2.
if 'globalMu' in kwargs.keys():
# globalMu is given in Er, and is measured from the value
# of Ezero at the center of the potential
# When using it in the phase diagram it has to be changed to
# units of the tunneling
self.globalMu = kwargs.get('globalMu', 0.15)
if self.globalMu == 'halfMott':
self.globalMu = muHalfMott \
+ kwargs.get('halfMottPlus',0.)
else :
self.Number = kwargs.get('Natoms', 3e5)
fN = lambda x : self.getNumber( muHalfMott + x,self.T, \
verbose=False)- self.Number
if self.verbose :
print "Searching for globalMu => N=%.0f, "% self.Number,
muBrent = kwargs.get('muBrent', (-0.2, 0.3)) # Maybe the default
# muBrent range should
# be U dependent
muBrentShift = kwargs.get('muBrentShift', 0. )
muBrent = ( muBrent[0] + muBrentShift * muHalfMott, \
muBrent[1] + muBrentShift * muHalfMott )
try:
muBrentOpt, brentResults = \
optimize.brentq(fN, muBrent[0], muBrent[1], \
xtol=2e-3, rtol=1e-2, full_output=True)
#print "fN(muBrentOpt) = ", fN(muBrentOpt)
self.globalMu = muHalfMott + muBrentOpt
except Exception as e:
errstr = 'f(a) and f(b) must have different signs'
if errstr in e.message:
print "Natoms = {:.4g}".format(self.Number)
print "mu0 = %.2f --> Nlda = %.2g" % \
(muBrent[0], fN(muBrent[0]) + self.Number )
print "mu1 = %.2f --> Nlda = %.2g" % \
(muBrent[1], fN(muBrent[1]) + self.Number )
raise
if self.verbose:
print "gMu=%.3f, " % brentResults.root,
print "n_iter=%d, " % brentResults.iterations,
print "n eval=%d, " % brentResults.function_calls,
print "converge?=", brentResults.converged
#---------------------
# EVAPORATION ENERGIES
#---------------------
# Calculate energies to estimate eta parameter for evaporation
self.globalMuZ0 = self.Ezero0_111 + self.globalMu
# Make a cut line along 100 to calculate the threshold for evaporation
direc100 = (np.pi/2, 0.)
t100, self.X100, self.Y100, self.Z100, lims = \
udipole.linecut_points( direc=direc100, extents = 1200.)
# Obtain band structure along the 100 direction
bandbot_100, bandtop_100, self.Ezero_100, self.tunneling_100 = \
self.pot.bandStructure( self.X100, self.Y100, self.Z100, \
getonsite=False)
self.Ezero0_100 = self.Ezero_100.min()
# evapTH0_100 accounts for situations in which there is a local barrier
# as you move along 100 to the edge
self.evapTH0_100 = bandbot_100.max()
# Once past the local barrier we calculate the bandbot energy along
# a beam
self.beamBOT_100 = bandbot_100[-1]
if self.verbose:
#This obtains the value of g0, careful when using anisotropic params
scubic.get_max_comp( self.pot, 650., self.T, verbose=False)
#------------------------------------------------
# CONTROL THE CHEMICAL POTENTIAL SO THAT IT STAYS
# BELOW THE THRESHOLD FOR EVAPORATION
#------------------------------------------------
# For a valid scenario we need
# self.globalMuZ0 < self.beamBOT_100
# self.globalMuZ0 < self.evapTH0_100
# Otherwise the density distribution will spill out into the beams
# and the assumption of spherical symmetry won't be valid.
if self.globalMuZ0 + self.T*1.2 > self.evapTH0_100:
msg = "ERROR: Chemical potential exceeds the evaporation threshold "
if self.verbose:
print msg
print " mu0 = %.3f" % self.globalMuZ0
print " T = %.3f" % (self.T*1.2)
print " Eth = %.3f" % self.evapTH0_100
if not self.ignoreMuThreshold :
raise ValueError(msg)
elif self.verbose:
print "OK: Chemical potential is below evaporation threshold."
if self.globalMuZ0 + self.T*1.2 > self.beamBOT_100:
msg = "ERROR: Chemical potential exceeds the bottom of the band " +\
"along 100"
if self.verbose:
print msg
print " mu0 = %.3f" % self.globalMuZ0
print " T = %.3f" % (self.T*1.2)
print "E100 = %.3f" % self.beamBOT_100
if not self.ignoreMuThreshold :
raise ValueError(msg)
elif self.verbose:
print "OK: Chemical potential is below the bottom of the band " +\
"along 100"
#-----------------------
# ESTIMATION OF ETA EVAP
#-----------------------
mu = self.globalMuZ0 - self.LowestE0
th = self.evapTH0_100 - self.LowestE0
self.EtaEvap = th/mu
self.DeltaEvap = th - mu
if False:
print "mu global = %.3g" % self.globalMuZ0
print "evap th = %.3g" % self.evapTH0_100
print "lowest E = %.3g" % self.LowestE0
print "mu = %.3g" % mu
print "th = %.3g" % th
print "eta = %.3g" % (th/mu)
print "th-mu = %.3g" % (th-mu)
# After the chemical potential is established the local chemical
# potential along 111 can be defined. It is used to calculate other
# thermodynamic quantities
gMuZero = self.Ezero0_111 + self.globalMu
self.localMu_t_111= (gMuZero - self.Ezero_111) / self.tunneling_111
self.localMu_111= (gMuZero - self.Ezero_111)
localMu = gMuZero - self.Ezero_111
# If the global chemical potential is fixed then the lda
# class can have an easier time calculating the necessary
# temperature to obtain a certain entropy per particle.
# This option is provided here:
if ( 'globalMu' in kwargs.keys() and 'SN' in kwargs.keys() ) \
or kwargs.get('forceSN',False):
self.SN = kwargs.get('SN', 2.0)
# Shut down density extent errors during the search
igExt = self.ignoreExtents
self.ignoreExtents = True
fSN = lambda x : self.getEntropy(x) / \
self.getNumber(self.globalMu, x ) \
- self.SN
if self.verbose:
print "Searching for T => S/N=%.2f, "% self.SN
TBrent = kwargs.get('TBrent',(0.14,1.8))
try:
Tres, TbrentResults = \
optimize.brentq(fSN, TBrent[0], TBrent[1], \
xtol=2e-3, rtol=2e-3, full_output=True)
if self.verbose:
print "Brent T result = %.2f Er" % Tres
self.T = Tres
except Exception as e:
errstr = 'f(a) and f(b) must have different signs'
if errstr in e.message:
print "T0 = %.3f --> fSN = %.3f" % \
(TBrent[0], fSN(TBrent[0]) )
print "T1 = %.3f --> fSN = %.3f" % \
(TBrent[1], fSN(TBrent[1]) )
raise
print "Search for S/N=%.2f did not converge"%self.SN
print "Temperature will be set at T = %.2f Er" % self.T
print "ERROR:"
print e.message
print self.pot.Info()
print
self.ignoreExtents = igExt
# Once the temperature is established we can calculate the ratio
# of temperature to chemical potential, with the chem. potential
# measured from the lowest energy state
self.Tmu = self.T / mu
# We define an etaF_star which allows us to control for atoms
# spilling along the beams in situations with non-zero temperature
# such as what we can access with HTSE
self.etaF_star = self.EtaEvap - self.Tmu*1.4
# Obtain trap integrated values of the thermodynamic quantities
self.Number = self.getNumber( self.globalMu, self.T )
self.Entropy = self.getEntropy( self.T)
def Info( self ):
"""
Returns a latex string with the information pertinent to the
hubbard parameters
"""
# Tunneling label
tmin = self.tunneling_111.min()
tmin_kHz = tmin * 29.2
tlabel = '$t=%.2f\,\mathrm{kHz}$'%(tmin_kHz)
# Scattering length
aslabel = '$a_{s}=%.0fa_{0}$' % self.a_s
# U/t label
Utlabel = '$U/t=%.1f$' % self.onsite_t_111.max()
# Temperature label
Tlabel = '$T/t=%.1f$' % (self.T/self.tunneling_111).max()
LDAlabel = '\n'.join( [ aslabel, Utlabel, Tlabel, tlabel ] )
return LDAlabel
def ThermoInfo( self ):
"""
Returns a latex string with the information pertinent to the
calculated thermodynamic quantities.
"""
wLs = self.pot.w
waists = sum( wLs, ())
wL = np.mean(waists)
self.NumberD = self.getNumberD( self.T )
rlabel = r'$\mathrm{HWHM} = %.2f\,w_{L}$' % ( self.getRadius()/wL )
Nlabel = r'$N=%.2f\times 10^{5}$' % (self.Number/1e5)
Dlabel = r'$D=%.3f$' % ( self.NumberD / self.Number )
Slabel = r'$S/N=%.2fk_{\mathrm{B}}$' % ( self.Entropy / self.Number )
return '\n'.join([rlabel, Nlabel, Dlabel, Slabel])
def getRadius( self ):
"""
This function calculates the HWHM (half-width at half max) of the
density distribution.
"""
gMu = self.globalMu
T = self.T
gMuZero = self.Ezero0_111 + gMu
localMu = gMuZero - self.Ezero_111
density = get_dens( T, self.tunneling_111, localMu, \
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
posradii = self.r111 >= 0.
r111pos = self.r111[ posradii]
posdens = density[ posradii ]
try:
hwhm = r111pos[ posdens - posdens[0]/2. < 0.][0]
return hwhm
except:
print r111pos
print posdens
raise
def get_localMu_t( self, gMu):
gMuZero = self.Ezero0_111 + gMu
localMu = gMuZero - self.Ezero_111
localMu_t = localMu / self.tunneling_111
return localMu_t
def getDensity( self, gMu, T ):
"""
This function calculates and returns the density along
the 111 direction
Parameters
----------
gMu : global chemical potential
"""
gMuZero = self.Ezero0_111 + gMu
localMu = gMuZero - self.Ezero_111
localMu_t = localMu / self.tunneling_111
density = get_dens( T, self.tunneling_111, localMu, \
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
return self.r111 , density
def getEntropy111( self, gMu, T ):
"""
This function calculates and returns the entropy along
the 111 direction
Parameters
----------
gMu : global chemical potential
"""
gMuZero = self.Ezero0_111 + gMu
localMu = gMuZero - self.Ezero_111
localMu_t = localMu / self.tunneling_111
entropy = get_entr( T, self.tunneling_111, localMu, \
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
return self.r111 , entropy
def getSpi111( self, gMu, T ):
"""
This function calculates and returns the structure factor along
the 111 direction
Parameters
----------
gMu : global chemical potential
"""
gMuZero = self.Ezero0_111 + gMu
localMu = gMuZero - self.Ezero_111
localMu_t = localMu / self.tunneling_111
spi = get_spi( T, self.tunneling_111, localMu, \
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
return self.r111 , spi
def getBulkSpi( self, **kwargs ):
r111, n111 = self.getDensity( self.globalMu, self.T )
t0 = self.tunneling_111.min()
Tspi = kwargs.get( 'Tspi', self.T / t0 )
logger.info( "Tspi in units of t0 = " + str(Tspi) )
Tspi = Tspi * t0
logger.info( "Tspi in units of Er = " + str(Tspi) )
logger.info( " t0 in units of Er = " + str( t0 ) )
gMuZero = self.Ezero0_111 + self.globalMu
localMu = gMuZero - self.Ezero_111
localMu_t = localMu / self.tunneling_111
# Get the bulk Spi and the Spi profile
# ALSO
# Get the overall S/N and the s profiles, both s per lattice site
# and s per particle
spibulk, spi, overall_entropy, entropy, lda_number, density = \
qmc_spi.spi_bulk( r111, n111, localMu_t, Tspi, \
self.tunneling_111, self.onsite_111, **kwargs )
do_k111 = kwargs.get('do_k111', False)
if do_k111:
# Get the compressibility
k111 = get_cmpr( self.T, self.tunneling_111, localMu, \
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
k111htse_list = []
for Thtse in [ 1.8, 2.3, 2.8]:
k111htse = get_cmpr( Thtse*t0, self.tunneling_111, localMu, \
self.onsite_111, select='htse',\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
k111htse_list.append( [Thtse, k111htse] )
else:
k111 = None
k111htse_list = []
U111 = self.onsite_111 / self.tunneling_111
return spibulk, spi, r111, n111, U111, self.tunneling_111, \
overall_entropy, entropy, lda_number, density, k111, \
k111htse_list
def getSpiFineGrid( self, **kwargs):
direc111 = (np.arctan(np.sqrt(2)), np.pi/4)
unit = vec3(); th = direc111[0]; ph = direc111[1]
unit.set_spherical( 1., th, ph);
numpoints = kwargs.pop('numpoints', 80 )
t111, X111_, Y111_, Z111_, lims_ = \
udipole.linecut_points( direc=direc111, extents=self.extents,\
npoints=numpoints)
# Below we get the signed distance from the origin
r111_ = X111_*unit[0] + Y111_*unit[1] + Z111_*unit[2]
# Obtain band structure and interactions along the 111 direction
bandbot_111_, bandtop_111_, \
Ezero_111_, tunneling_111_, onsite_t_111_ = \
self.pot.bandStructure( X111_, Y111_, Z111_)
# The onsite interactions are scaled up by the scattering length
onsite_t_111_ = self.a_s * onsite_t_111_
onsite_111_ = onsite_t_111_ * tunneling_111_
# Lowst value of E0 is obtained
LowestE0_ = np.amin( bandbot_111_ )
Ezero0_111_ = Ezero_111_.min()
t0 = tunneling_111_.min()
Tspi = kwargs.get( 'Tspi', self.T / t0 )
Tspi = Tspi * t0
localMu_ = self.globalMu + Ezero0_111_ - Ezero_111_
localMu_t_ = localMu_ / tunneling_111_
# Get the density
density_ = get_dens( self.T, tunneling_111_, localMu_, \
onsite_111_, select=self.select,\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
# Get the bulk Spi and the Spi profile
# ALSO
# Get the overall S/N and the s profiles, both s per lattice site
# and s per particle
kwargs['do_kappa']=True
spibulk, spi, overall_entropy, entropy, \
lda_number, density, compr = \
qmc_spi.spi_bulk( r111_, density_, localMu_t_, Tspi, \
tunneling_111_, onsite_111_, **kwargs )
U111 = onsite_111_ / tunneling_111_
#return spibulk, spi, r111, n111, U111, self.tunneling_111, \
# overall_entropy, entropy, lda_number, density
return r111_, spi, density_, compr, localMu_t_ * tunneling_111_
def getNumber( self, gMu, T, **kwargs):
"""
This function calculates and returns the total number of atoms.
It integrates along 111 assuming a spherically symmetric sample.
Parameters
----------
gMu : global chemical potential
"""
kwverbose = kwargs.get('verbose', None)
if kwverbose is not None:
NVerbose = kwverbose
else:
NVerbose = self.verbose
gMuZero = self.Ezero0_111 + gMu
localMu = gMuZero - self.Ezero_111
localMu_t = localMu / self.tunneling_111
density = get_dens( T, self.tunneling_111, localMu, \
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT, \
verbose=self.verbose)
# Under some circumnstances the green compensation can
# cause dips in the density profile. This can happen only
# if the green beam waist is smaller than the IR beam waist
# Experimentally we have seen that we do not handle these very
# well, so we want to avoid them at all cost
# The occurence of this is flagged by a change in the derivative
# of the radial density. This derivative should always be negative.
# If the green beam waist is larger than the IR beam waist, then
# the problem with the non-monotonic density can also be found
# when trying to push the compensation such that muGlobal gets
# close to the evaporation threshold
# This can be pathological because it leads to an accumulation of atoms
# that are not trapped but this lda code integrates over them and counts
# them anyways.
# To avoid any of the two situations desribed above we force the
# density to decrease monotonically over the extent of our calculation.
# If the density slope is positive the we report it as an error
#
# find the point at which the density changes derivative
radius_check = 1e-3
posradii = self.r111 > radius_check
posdens = density[ posradii ]
neg_slope = np.diff( posdens ) > 1e-4
n_neg_slope = np.sum( neg_slope )
if n_neg_slope > 0:
msg = "ERROR: Radial density profile along 111 " + \
"has a positive slope"
if NVerbose:
print msg
print "\n\nradius check start = ", radius_check
print posdens
print np.diff( posdens ) > 1e-4
if not self.ignoreSlopeErrors:
raise ValueError(msg)
elif NVerbose:
print "OK: Radial density profile along 111 decreases " + \
"monotonically."
if False:
print " posdens len = ",len(posdens)
print " n_neg_slope = ",n_neg_slope
# Checks that the density goes to zero within the current extents
if kwverbose is not None and kwverbose is False:
edgecuttof = 10.
else:
edgecuttof = 2e-2
if posdens[-1]/posdens[0] > edgecuttof:
msg = "ERROR: Density does not vanish within current " + \
"extents"
if not self.ignoreExtents:
print msg
print posdens[0]
print posdens[-1]
print posdens
print self.pot.g0
#print "etaF = ", self.EtaEvap
#print "etaFstar = ", self.etaF_star
#print "extents = ", self.extents
raise ValueError(msg)
if NVerbose:
print msg
print posdens[0]
print posdens[-1]
print self.pot.g0
dens = density[~np.isnan(density)]
r = self.r111[~np.isnan(density)]
self.PeakD = dens.max()
return np.power(self.a,-3)*2*np.pi*integrate.simps(dens*(r**2),r)
def getNumberD( self, T):
"""
This function calculates and returns the total number of doublons.
It integrates along 111 assuming a spherically symmetric sample.
"""
doublons = get_doub( T, self.tunneling_111, self.localMu_111,\
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT,\
verbose=self.verbose)
doub = doublons[~np.isnan(doublons)]
r = self.r111[~np.isnan(doublons)]
return 2.*np.power(self.a,-3)*2*np.pi*integrate.simps(doub*(r**2),r)
def getEntropy( self, T):
"""
This function calculates and returns the total entropy.
It integrates along 111 assuming a spherically symmetric sample.
"""
entropy = get_entr( T, self.tunneling_111, self.localMu_111,\
self.onsite_111, select=self.select,\
ignoreLowT=self.ignoreLowT,\
verbose=self.verbose)
entr = entropy[~np.isnan(entropy)]
r = self.r111[~np.isnan(entropy)]
return np.power(self.a,-3)*2*np.pi*integrate.simps(entr*(r**2),r)
def column_density( self ):
"""
This function calculates and returns the column density of the
cloud
"""
return None
def plotLine( lda0, **kwargs):
# Flag to ignore errors related to low temperatures beyond the reach
# of the htse
ignoreLowT = kwargs.get('ignoreLowT',False)
scale = 0.9
figGS = plt.figure(figsize=(6.0*scale,4.2*scale))
gs3Line = matplotlib.gridspec.GridSpec(2,2,\
width_ratios=[1.6, 1.], height_ratios=[2.0,1.4],\
wspace=0.25,
left=0.13, right=0.90,
bottom=0.15, top=0.78)
tightrect = [0.,0.00, 0.95, 0.84]
Ax1 = [];
Ymin =[]; Ymax=[]
line_direction = kwargs.pop('line_direction', '111')
direcs = { \
'100':(np.pi/2, 0.), \
'010':(np.pi/2, np.pi/2), \
'001':(0., np.pi), \
'111':(np.arctan(np.sqrt(2)), np.pi/4) }
labels = { \
'100':'$(\mathbf{100})$', \
'010':'$(\mathbf{010})$', \
'001':'$(\mathbf{001})$', \
'111':'$(\mathbf{111})$' }
cutkwargs = kwargs.pop( 'cutkwargs', { } )
cutkwargs['direc'] = direcs[ line_direction ]
cutkwargs['ax0label']= labels[ line_direction ]
cutkwargs['extents']= kwargs.pop('extents', 40.)
t, X,Y,Z, lims = udipole.linecut_points( **cutkwargs )
potkwargs = kwargs.pop( 'potkwargs', { } )
potkwargs['direc'] = direcs[ line_direction ]
potkwargs['ax0label']= labels[ line_direction ]
potkwargs['extents']= kwargs.pop('x1lims', (lims[0],lims[1]))[1]
tp, Xp,Yp,Zp, lims = udipole.linecut_points( **potkwargs )
kwargs['suptitleY'] = 0.96
kwargs['foottextY'] = 0.84
x1lims = kwargs.get('x1lims', (lims[0],lims[1]))
ax1 = figGS.add_subplot( gs3Line[0:3,0] )
ax1.set_xlim( *x1lims )
ax1.grid()
ax1.grid(which='minor')
ax1.set_xlabel('$\mu\mathrm{m}$ '+cutkwargs['ax0label'], fontsize=13.)
ax1.set_ylabel( lda0.pot.unitlabel, rotation=0, fontsize=13., labelpad=15 )
ax1.xaxis.set_major_locator( matplotlib.ticker.MultipleLocator(20) )
ax1.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(10) )
ax1.yaxis.set_major_locator( matplotlib.ticker.MaxNLocator(7) )
ax1.yaxis.set_minor_locator( matplotlib.ticker.AutoMinorLocator() )
ax2 = figGS.add_subplot( gs3Line[0,1] )
ax3 = None
#ax2.grid()
ax2.set_xlabel('$\mu\mathrm{m}$', fontsize=12, labelpad=0)
#ax2.set_ylabel('$n$', rotation=0, fontsize=14, labelpad=11 )
ax2.xaxis.set_major_locator( matplotlib.ticker.MultipleLocator(20) )
ax2.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(10) )
#----------------------------------
# CALCULATE ALL RELEVANT QUANTITIES
#----------------------------------
# All the relevant lines are first calculated here
bandbot_XYZ, bandtop_XYZ, \
Ezero_XYZ, tunneling_XYZ, onsite_t_XYZ = \
lda0.pot.bandStructure( X, Y, Z )
# The onsite interactions are scaled up by the scattering length
onsite_t_XYZ = lda0.a_s * onsite_t_XYZ
onsite_XYZ = onsite_t_XYZ * tunneling_XYZ
Ezero0_XYZ = Ezero_XYZ.min()
bottom = lda0.pot.Bottom( X, Y, Z )
lattmod = lda0.pot.LatticeMod( X, Y, Z )
excbot_XYZ, exctop_XYZ = lda0.pot.firstExcited( X, Y, Z )
# Offset the chemical potential for use in the phase diagram
localMu_XYZ = ( lda0.globalMu + lda0.Ezero0_111 - Ezero_XYZ )
# Obtain the thermodynamic quantities
density_XYZ = get_dens( lda0.T, tunneling_XYZ, localMu_XYZ, \
onsite_XYZ, select=lda0.select, ignoreLowT=ignoreLowT )
doublon_XYZ = get_doub( lda0.T, tunneling_XYZ, localMu_XYZ, \
onsite_XYZ, select=lda0.select, ignoreLowT=ignoreLowT )
entropy_XYZ = get_entr( lda0.T, tunneling_XYZ, localMu_XYZ, \
onsite_XYZ, select=lda0.select, ignoreLowT=ignoreLowT )
# All the potential lines are recalculated to match the potential
# xlims
bandbot_XYZp, bandtop_XYZp, \
Ezero_XYZp, tunneling_XYZp, onsite_t_XYZp = \
lda0.pot.bandStructure( Xp, Yp, Zp )
# The onsite interactions are scaled up by the scattering length
onsite_t_XYZp = lda0.a_s * onsite_t_XYZp
onsite_XYZp = onsite_t_XYZp * tunneling_XYZp
Ezero0_XYZp = Ezero_XYZp.min()
bottomp = lda0.pot.Bottom( Xp, Yp, Zp )
lattmodp = lda0.pot.LatticeMod( Xp, Yp, Zp )
excbot_XYZp, exctop_XYZp = lda0.pot.firstExcited( Xp, Yp, Zp )
# Offset the chemical potential for use in the phase diagram
localMu_XYZp = ( lda0.globalMu + lda0.Ezero0_111 - Ezero_XYZp )
#--------------------------
# SETUP LINES TO BE PLOTTED
#--------------------------
# A list of lines to plot is generated
# Higher zorder puts stuff in front
toplot = [
{'x':tp,\
'y':(bandbot_XYZp, Ezero_XYZp ), 'color':'blue', 'lw':2., \
'fill':True, 'fillcolor':'blue', 'fillalpha':0.75,\
'zorder':10, 'label':'$\mathrm{band\ lower\ half}$'},
{'x':tp,\
'y':(Ezero_XYZp + onsite_XYZp, bandtop_XYZp + onsite_XYZp), \
'color':'purple', 'lw':2., \
'fill':True, 'fillcolor':'plum', 'fillalpha':0.75,\
'zorder':10, 'label':'$\mathrm{band\ upper\ half}+U$'},
{'x':tp,\
'y':(excbot_XYZp, exctop_XYZp ), 'color':'red', 'lw':2., \
'fill':True, 'fillcolor':'pink', 'fillalpha':0.75,\
'zorder':2, 'label':'$\mathrm{first\ excited\ band}$'},
{'x':tp,\
'y':np.ones_like(Xp)*lda0.globalMuZ0, 'color':'limegreen',\
'lw':2,'zorder':1.9, 'label':'$\mu_{0}$'},
{'x':tp,\
'y':np.ones_like(Xp)*lda0.evapTH0_100, 'color':'#FF6F00', \
'lw':2,'zorder':1.9, 'label':'$\mathrm{evap\ threshold}$'},
{'x':tp,\
'y':bottomp,'color':'gray', 'lw':0.5,'alpha':0.5},
{'x':tp,\
'y':lattmodp,'color':'gray', 'lw':1.5,'alpha':0.5,\
'label':r'$\mathrm{lattice\ potential\ \ }\lambda\times10$'} \
]
toplot = toplot + [
{'y':density_XYZ, 'color':'blue', 'lw':1.75, \
'axis':2, 'label':'$n$'},
{'y':doublon_XYZ, 'color':'red', 'lw':1.75, \
'axis':2, 'label':'$d$'},
{'y':entropy_XYZ, 'color':'black', 'lw':1.75, \
'axis':2, 'label':'$s_{L}$'},
#{'y':density-2*doublons, 'color':'green', 'lw':1.75, \
# 'axis':2, 'label':'$n-2d$'},
#{'y':self.localMu_t, 'color':'cyan', 'lw':1.75, \
# 'axis':2, 'label':r'$\mu$'},
]
toplot = toplot + [
{'y':entropy_XYZ/density_XYZ, 'color':'gray', 'lw':1.75, \
'axis':3, 'label':'$s_{N}$'} ]
lattlabel = '\n'.join( list( lda0.pot.Info() ) + \
[lda0.pot.TrapFreqsInfo() + r',\ ' \
+ lda0.pot.EffAlpha(), \
'$\eta_{F}=%.2f$'%lda0.EtaEvap + '$,$ ' \
'$\Delta_{F}=%.2fE_{R}$'%lda0.DeltaEvap, \
] )
toplot = toplot + [ {'text':True, 'x': -0.1, 'y':1.02, 'tstring':lattlabel,
'ha':'left', 'va':'bottom', 'linespacing':1.4} ]
toplot = toplot + [ {'text':True, 'x': 1.0, 'y':1.02, 'tstring':lda0.Info(),
'ha':'right', 'va':'bottom', 'linespacing':1.4} ]
toplot = toplot + [ {'text':True, 'x': 0., 'y':1.02, \
'tstring':lda0.ThermoInfo(), \
'ha':'left', 'va':'bottom', 'axis':2, \
'linespacing':1.4} ]
#--------------------------
# ITERATE AND PLOT
#--------------------------
Emin =[]; Emax=[]
for p in toplot:
if not isinstance(p,dict):
ax1.plot(t,p); Emin.append(p.min()); Emax.append(p.max())
else:
if 'text' in p.keys():
whichax = p.get('axis',1)
axp = ax2 if whichax ==2 else ax1
tx = p.get('x', 0.)
ty = p.get('y', 1.)
ha = p.get('ha', 'left')
va = p.get('va', 'center')
ls = p.get('linespacing', 1.)
tstring = p.get('tstring', 'empty')
axp.text( tx,ty, tstring, ha=ha, va=va, linespacing=ls,\
transform=axp.transAxes)
elif 'figprop' in p.keys():
figsuptitle = p.get('figsuptitle', None)
figGS.suptitle(figsuptitle, y=kwargs.get('suptitleY',1.0),\
fontsize=14)
figGS.text(0.5,kwargs.get('foottextY',1.0),\
p.get('foottext',None),fontsize=14,\
ha='center')
elif 'y' in p.keys():
if 'x' in p.keys():
x = p['x']
else:
x = t
labelstr = p.get('label',None)
porder = p.get('zorder',2)
fill = p.get('fill', False)
ydat = p.get('y',None)
whichax = p.get('axis',1)
if whichax == 3:
if ax3 is None:
ax3 = ax2.twinx()
axp = ax3
else:
axp = ax2 if whichax ==2 else ax1
if ydat is None: continue
if fill:
axp.plot(x,ydat[0],
lw=p.get('lw',2.),\
color=p.get('color','black'),\
alpha=p.get('fillalpha',0.5),\
zorder=porder,\
label=labelstr
)
axp.fill_between( x, ydat[0], ydat[1],\
lw=p.get('lw',2.),\
color=p.get('color','black'),\
facecolor=p.get('fillcolor','gray'),\
alpha=p.get('fillalpha',0.5),\
zorder=porder
)
if whichax == 1:
Emin.append( min( ydat[0].min(), ydat[1].min() ))
Emax.append( max( ydat[0].max(), ydat[1].max() ))
else:
axp.plot( x, ydat,\
lw=p.get('lw',2.),\
color=p.get('color','black'),\
alpha=p.get('alpha',1.0),\
zorder=porder,\
label=labelstr
)
if whichax == 1:
Emin.append( ydat.min() )
Emax.append( ydat.max() )
if whichax == 3:
ax3.tick_params(axis='y', colors=p.get('color','black'))
#print labelstr
#print Emin
#print Emax
if ax3 is not None:
ax3.yaxis.set_major_locator( \
matplotlib.ticker.MaxNLocator(6, prune='upper') )
handles2, labels2 = ax2.get_legend_handles_labels()
handles3, labels3 = ax3.get_legend_handles_labels()
handles = handles2 + handles3
labels = labels2 + labels3
ax2.legend( handles, labels, bbox_to_anchor=(1.25,1.0), \
loc='lower right', numpoints=1, labelspacing=0.2, \
prop={'size':10}, handlelength=1.1, handletextpad=0.5 )
Emin = min(Emin); Emax=max(Emax)
dE = Emax-Emin
# Finalize figure
x2lims = kwargs.get('x2lims', (lims[0],lims[1]))
ax2.set_xlim( *x2lims )
y0,y1 = ax2.get_ylim()
if y1 == 1. :
ax2.set_ylim( y0 , y1 + (y1-y0)*0.05)
y2lims = kwargs.get('y2lims', None)
if y2lims is not None:
ax2.set_ylim( *y2lims)
y3lims = kwargs.get('y3lims', None)
if y3lims is not None:
ax3.set_ylim( *y3lims)
ymin, ymax = Emin-0.05*dE, Emax+0.05*dE
Ymin.append(ymin); Ymax.append(ymax); Ax1.append(ax1)
Ymin = min(Ymin); Ymax = max(Ymax)
#print Ymin, Ymax
for ax in Ax1:
ax.set_ylim( Ymin, Ymax)
if 'ax1ylim' in kwargs.keys():
ax1.set_ylim( *kwargs['ax1ylim'] )
Ax1[0].legend( bbox_to_anchor=(1.1,-0.15), \
loc='lower left', numpoints=1, labelspacing=0.2,\
prop={'size':9.5}, handlelength=1.1, handletextpad=0.5 )
#gs3Line.tight_layout(figGS, rect=tightrect)
return figGS
def plotMathy( lda0, **kwargs):
# Flag to ignore errors related to low temperatures beyond the reach
# of the htse
ignoreLowT = kwargs.get('ignoreLowT',False)
scale = 0.9
figGS = plt.figure(figsize=(6.0*scale,4.2*scale))
#figGS = plt.figure(figsize=(5.6,4.2))
gs3Line = matplotlib.gridspec.GridSpec(3,2,\
width_ratios=[1.6, 1.], height_ratios=[2.2,0.8,1.2],\
wspace=0.2, hspace=0.24,
left = 0.15, right=0.95, bottom=0.14, top=0.78)
#tightrect = [0.,0.00, 0.95, 0.88]
Ax1 = [];
Ymin =[]; Ymax=[]
line_direction = kwargs.pop('line_direction', '111')
direcs = { \
'100':(np.pi/2, 0.), \
'010':(np.pi/2, np.pi/2), \
'001':(0., np.pi), \
'111':(np.arctan(np.sqrt(2)), np.pi/4) }
labels = { \
'100':'$(\mathbf{100})$', \
'010':'$(\mathbf{010})$', \
'001':'$(\mathbf{001})$', \
'111':'$(\mathbf{111})$' }
cutkwargs = kwargs.pop( 'cutkwargs', {} )
cutkwargs['direc'] = direcs[ line_direction ]
cutkwargs['ax0label']= labels[ line_direction ]
cutkwargs['extents']= kwargs.pop('extents', 40.)
t, X,Y,Z, lims = udipole.linecut_points( **cutkwargs )
ax1 = figGS.add_subplot( gs3Line[0:2,0] )
ax1.grid()
ax1.grid(which='minor')
ax1.set_ylabel( lda0.pot.unitlabel, rotation=0, fontsize=16, labelpad=15 )
ax1.xaxis.set_major_locator( matplotlib.ticker.MaxNLocator(7) )
#ax1.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(20) )
ax1.yaxis.set_major_locator( matplotlib.ticker.MaxNLocator(7) )
#ax1.yaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(1.) )
ax2 = figGS.add_subplot( gs3Line[0,1] )
ax2.grid()
#ax2.set_ylabel('$n$', rotation=0, fontsize=14, labelpad=11 )
ax2.xaxis.set_major_locator( matplotlib.ticker.MaxNLocator(6) )
#ax2.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(10) )
ax3 = figGS.add_subplot( gs3Line[2,0] )
ax3.grid()
ax3.yaxis.set_major_locator( matplotlib.ticker.MaxNLocator(3) )
ax3.xaxis.set_major_locator( matplotlib.ticker.MaxNLocator(7) )
#----------------------------------
# CALCULATE ALL RELEVANT QUANTITIES
#----------------------------------
# All the relevant lines are first calculated here
# In the Mathy plot the x-axis is the local lattice depth
s0_XYZ = lda0.pot.S0( X, Y, Z)[0]
ax1.set_xlim( s0_XYZ.min(), s0_XYZ.max() )
ax3.set_xlim( s0_XYZ.min(), s0_XYZ.max() )
x2lims = kwargs.get('x2lims', None)
if x2lims is not None:
ax2.set_xlim( *x2lims)
else:
ax2.set_xlim( s0_XYZ.min(), s0_XYZ.max() )
ax3.set_xlabel('$s_{0}\,(E_{R}) $', fontsize=13)
ax2.set_xlabel('$s_{0}\,(E_{R}) $', fontsize=12, labelpad=0)
bandbot_XYZ, bandtop_XYZ, \
Ezero_XYZ, tunneling_XYZ, onsite_t_XYZ = \
lda0.pot.bandStructure( X, Y, Z )
# The onsite interactions are scaled up by the scattering length
onsite_t_XYZ = lda0.a_s * onsite_t_XYZ
onsite_XYZ = onsite_t_XYZ * tunneling_XYZ
Ezero0_XYZ = Ezero_XYZ.min()
bottom = lda0.pot.Bottom( X, Y, Z )
lattmod = lda0.pot.LatticeMod( X, Y, Z )
Mod = np.amin( lda0.pot.S0( X, Y, Z), axis=0 )
deltas0 = ( s0_XYZ.max()-s0_XYZ.min() )
lattmod = lda0.pot.Bottom( X, Y, Z ) + \
Mod*np.power( np.cos( 2.*np.pi* s0_XYZ *10./deltas0 ), 2)
excbot_XYZ, exctop_XYZ = lda0.pot.firstExcited( X, Y, Z )
# Offset the chemical potential for use in the phase diagram
localMu_XYZ = ( lda0.globalMu + lda0.Ezero0_111 - Ezero_XYZ )
# Obtain the thermodynamic quantities
density_XYZ = get_dens( lda0.T, tunneling_XYZ, localMu_XYZ, \
onsite_XYZ, select=lda0.select, ignoreLowT=ignoreLowT )
doublon_XYZ = get_doub( lda0.T, tunneling_XYZ, localMu_XYZ, \
onsite_XYZ, select=lda0.select, ignoreLowT=ignoreLowT )
entropy_XYZ = get_entr( lda0.T, tunneling_XYZ, localMu_XYZ, \
onsite_XYZ, select=lda0.select, ignoreLowT=ignoreLowT )
#--------------------------
# SETUP LINES TO BE PLOTTED
#--------------------------
# A list of lines to plot is generated
# Higher zorder puts stuff in front
toplot = [
{'y':(bandbot_XYZ, Ezero_XYZ ), 'color':'blue', 'lw':2., \
'fill':True, 'fillcolor':'blue', 'fillalpha':0.5,\
'zorder':10, 'label':'$\mathrm{band\ lower\ half}$'},
{'y':(Ezero_XYZ + onsite_XYZ, bandtop_XYZ + onsite_XYZ), \
'color':'purple', 'lw':2., \
'fill':True, 'fillcolor':'plum', 'fillalpha':0.5,\
'zorder':10, 'label':'$\mathrm{band\ upper\ half}+U$'},
{'y':(Ezero_XYZ, Ezero_XYZ + onsite_XYZ), \
'color':'black', 'lw':2., \
'fill':True, 'fillcolor':'gray', 'fillalpha':0.85,\
'zorder':10, 'label':'$\mathrm{mott\ gap}$'},
#{'y':(excbot_XYZ, exctop_XYZ ), 'color':'red', 'lw':2., \
# 'fill':True, 'fillcolor':'pink', 'fillalpha':0.75,\
# 'zorder':2, 'label':'$\mathrm{first\ excited\ band}$'},
{'y':np.ones_like(X)*lda0.globalMuZ0, 'color':'limegreen',\
'lw':2,'zorder':1.9, 'label':'$\mu_{0}$'},
{'y':np.ones_like(X)*lda0.evapTH0_100, 'color':'#FF6F00', \
'lw':2,'zorder':1.9, 'label':'$\mathrm{evap\ threshold}$'},
#{'y':bottom,'color':'gray', 'lw':0.5,'alpha':0.5, 'axis':3},
{'y':lattmod,'color':'gray', 'lw':1.5,'alpha':0.5, \
'axis':3,\
'label':r'$\mathrm{lattice\ potential\ \ }\lambda\times10$'} \
]
entropy_per_particle = kwargs.pop('entropy_per_particle', False)
if entropy_per_particle:
toplot = toplot + [
{'y':entropy_XYZ/density_XYZ, 'color':'black', 'lw':1.75, \
'axis':2, 'label':'$s_{N}$'} ]
else:
toplot = toplot + [
{'y':density_XYZ, 'color':'blue', 'lw':1.75, \
'axis':2, 'label':'$n$'},
{'y':doublon_XYZ, 'color':'red', 'lw':1.75, \
'axis':2, 'label':'$d$'},
{'y':entropy_XYZ, 'color':'black', 'lw':1.75, \
'axis':2, 'label':'$s_{L}$'},
#{'y':density-2*doublons, 'color':'green', 'lw':1.75, \
# 'axis':2, 'label':'$n-2d$'},
#{'y':self.localMu_t, 'color':'cyan', 'lw':1.75, \
# 'axis':2, 'label':r'$\mu$'},
]
lattlabel = '\n'.join( list( lda0.pot.Info() ) + \
[lda0.pot.TrapFreqsInfo() + r',\ ' \
+ lda0.pot.EffAlpha(), \
'$\eta_{F}=%.2f$'%lda0.EtaEvap + '$,$ ' \
'$\Delta_{F}=%.2fE_{R}$'%lda0.DeltaEvap, \
] )
toplot = toplot + [ {'text':True, 'x': 0., 'y':1.02, 'tstring':lattlabel,
'ha':'left', 'va':'bottom', 'linespacing':1.4} ]
toplot = toplot + [ {'text':True, 'x': 1.0, 'y':1.02, 'tstring':lda0.Info(),
'ha':'right', 'va':'bottom', 'linespacing':1.4} ]
toplot = toplot + [ {'text':True, 'x': 0., 'y':1.02, \
'tstring':lda0.ThermoInfo(), \
'ha':'left', 'va':'bottom', 'axis':2, \
'linespacing':1.4} ]
#--------------------------
# ITERATE AND PLOT
#--------------------------
kwargs['suptitleY'] = 0.96
kwargs['foottextY'] = 0.84
# For every plotted quantity I use only lthe positive radii
Emin =[]; Emax=[]
positive = t > 0.
xarray = s0_XYZ[ positive ]
for p in toplot:
if not isinstance(p,dict):
p = p[positive]
ax1.plot(xarray,p); Emin.append(p.min()); Emax.append(p.max())
else:
if 'text' in p.keys():
whichax = p.get('axis',1)
axp = ax2 if whichax ==2 else ax1
tx = p.get('x', 0.)
ty = p.get('y', 1.)
ha = p.get('ha', 'left')
va = p.get('va', 'center')
ls = p.get('linespacing', 1.)
tstring = p.get('tstring', 'empty')
axp.text( tx,ty, tstring, ha=ha, va=va, linespacing=ls,\
transform=axp.transAxes)
elif 'figprop' in p.keys():
figsuptitle = p.get('figsuptitle', None)
figGS.suptitle(figsuptitle, y=kwargs.get('suptitleY',1.0),\
fontsize=14)
figGS.text(0.5,kwargs.get('foottextY',1.0),\
p.get('foottext',None),fontsize=14,\
ha='center')
elif 'y' in p.keys():
whichax = p.get('axis',1)
#if whichax == 2 : continue
axp = ax2 if whichax ==2 else ax3 if whichax == 3 else ax1
labelstr = p.get('label',None)
porder = p.get('zorder',2)
fill = p.get('fill', False)
ydat = p.get('y',None)
if ydat is None: continue
if fill:
ydat = ( ydat[0][positive], ydat[1][positive] )
axp.plot(xarray,ydat[0],
lw=p.get('lw',2.),\
color=p.get('color','black'),\
alpha=p.get('fillalpha',0.5),\
zorder=porder,\
label=labelstr
)
axp.fill_between( xarray, ydat[0], ydat[1],\
lw=p.get('lw',2.),\
color=p.get('color','black'),\
facecolor=p.get('fillcolor','gray'),\
alpha=p.get('fillalpha',0.5),\
zorder=porder
)
if whichax == 1:
Emin.append( min( ydat[0].min(), ydat[1].min() ))
Emax.append( max( ydat[0].max(), ydat[1].max() ))
else:
ydat = ydat[ positive ]
axp.plot( xarray, ydat,\
lw=p.get('lw',2.),\
color=p.get('color','black'),\
alpha=p.get('alpha',1.0),\
zorder=porder,\
label=labelstr
)
if whichax == 1:
Emin.append( ydat.min() )
Emax.append( ydat.max() )
ax2.legend( bbox_to_anchor=(0.03,1.02), \
loc='upper left', numpoints=1, labelspacing=0.2, \
prop={'size':10}, handlelength=1.1, handletextpad=0.5 )
Emin = min(Emin); Emax=max(Emax)
dE = Emax-Emin
# Finalize figure
y0,y1 = ax2.get_ylim()
ax2.set_ylim( y0 , y1 + (y1-y0)*0.1)
ymin, ymax = Emin-0.05*dE, Emax+0.05*dE
Ymin.append(ymin); Ymax.append(ymax); Ax1.append(ax1)
Ymin = min(Ymin); Ymax = max(Ymax)
for ax in Ax1:
ax.set_ylim( Ymin, Ymax)
if 'ax1ylim' in kwargs.keys():
ax1.set_ylim( *kwargs['ax1ylim'] )
Ax1[0].legend( bbox_to_anchor=(1.1,0.1), \
loc='upper left', numpoints=1, labelspacing=0.2,\
prop={'size':11}, handlelength=1.1, handletextpad=0.5 )
#gs3Line.tight_layout(figGS, rect=tightrect)
return figGS
def CheckInhomog( lda0, **kwargs ):
"""This function will make a plot along 111 of the model parameters:
U, t, U/t, v0.
It is useful to assess the degree of inhomogeneity in our system"""
# Prepare the figure
fig = plt.figure(figsize=(9.,4.2))
lattlabel = '\n'.join( list( lda0.pot.Info() ) )
lattlabel = '\n'.join( [ i.split( r'$\mathrm{,}\ $' )[0].replace('s','v') \
for i in lda0.pot.Info() ] )
Nlabel = r'$N=%.2f\times 10^{5}$' % (lda0.Number/1e5)
Slabel = r'$S/N=%.2fk_{\mathrm{B}}$' % ( lda0.Entropy / lda0.Number )
thermolabel = '\n'.join([Nlabel, Slabel])
ldainfoA = '\n'.join(lda0.Info().split('\n')[:2])
ldainfoB = '\n'.join(lda0.Info().split('\n')[-2:])
fig.text( 0.05, 0.98, lattlabel, ha='left', va='top', linespacing=1.2)
fig.text( 0.48, 0.98, ldainfoA, ha='right', va='top', linespacing=1.2)
fig.text( 0.52, 0.98, ldainfoB, ha='left', va='top', linespacing=1.2)
fig.text( 0.95, 0.98, thermolabel, ha='right', va='top', linespacing=1.2)
#fig.text( 0.05, 0.86, "Sample is divided in 5 bins, all containing" +\
# " the same number of atoms (see panel 2).\n" + \
# "Average Fermi-Hubbard parameters $n$, $U$, $t$, " +\
# "and $U/t$ are calculated in each bin (see panels 1, 3, 4, 5 )" )
gs = matplotlib.gridspec.GridSpec( 2,4, wspace=0.18,\
left=0.1, right=0.9, bottom=0.05, top=0.98)
# Setup axes
axn = fig.add_subplot(gs[0,0])
axnInt = fig.add_subplot(gs[0,3])
axU = fig.add_subplot(gs[1,0])
axt = fig.add_subplot(gs[1,1])
axUt = fig.add_subplot(gs[1,2])
axv0 = fig.add_subplot(gs[1,3])
axEntr = fig.add_subplot( gs[0,1] )
axSpi = fig.add_subplot( gs[0,2] )
# Set xlim
x0 = -40.; x1 = 40.
axn.set_xlim( x0, x1)
axEntr.set_xlim( x0, x1)
axEntr.set_ylim( 0., 1.0)
axSpi.set_xlim( x0, x1)
axSpi.set_ylim( 0., 3.0)
axnInt.set_xlim( 0., x1 )
axU.set_xlim( x0, x1 )
axU.set_ylim( 0., np.amax( lda0.onsite_t_111 * lda0.tunneling_111 *1.05 ) )
axt.set_xlim( x0, x1 )
axt.set_ylim( 0., 0.12)
axUt.set_xlim( x0, x1 )
axUt.set_ylim( 0., np.amax( lda0.onsite_t_111 * 1.05 ))
axv0.set_xlim( x0, x1 )
lw0 = 2.5
# Plot relevant quantities
r111_, density_111 = lda0.getDensity( lda0.globalMu, lda0.T )
r111_Entr, entropy_111 = lda0.getEntropy111( lda0.globalMu, lda0.T)
r111_Spi, spi_111 = lda0.getSpi111( lda0.globalMu, lda0.T)
V0_111 = lda0.pot.S0( lda0.X111, lda0.Y111, lda0.Z111 )
# density, entropy and spi
axn.plot( lda0.r111, density_111, lw=lw0 , color='black')
axEntr.plot( lda0.r111, entropy_111, lw=lw0 , color='black')
axSpi.plot( lda0.r111, spi_111, lw=lw0 , color='black')
# U
axU.plot( lda0.r111, lda0.onsite_t_111 * lda0.tunneling_111 , \
lw=lw0, label='$U$', color='black')
# t
axt.plot( lda0.r111, lda0.tunneling_111,lw=lw0, label='$t$', \
color='black')
# U/t
axUt.plot( lda0.r111, lda0.onsite_t_111, lw=lw0, color='black')
# Lattice depth
#print "shape of V0 = ", V0_111.shape
axv0.plot( lda0.r111, V0_111[0], lw=lw0, color='black', \
label='$\mathrm{Lattice\ depth}$')
# Band gap
bandgap_111 = bands = scubic.bands3dvec( V0_111, NBand=1 )[0] \
- scubic.bands3dvec( V0_111, NBand=0 )[1]
axv0.plot( lda0.r111, bandgap_111, lw=lw0, linestyle=':', color='black', \
label='$\mathrm{Band\ gap}$')
axv0.legend( bbox_to_anchor=(0.03,0.02), \
loc='lower left', numpoints=3, labelspacing=0.2,\
prop={'size':6}, handlelength=1.5, handletextpad=0.5 )
# Define function to calculate cummulative atom number
def NRadius( Radius ):
"""
This function calculates the fraction of the atom number
up to a certain Radius
"""
valid = np.logical_and( np.abs(lda0.r111) < Radius, \
~np.isnan(density_111) )
r = lda0.r111[ valid ]
dens = density_111[ valid ]
return np.power( lda0.pot.l/2, -3) * \
2 * np.pi*integrate.simps( dens*(r**2), r) / lda0.Number
# Plot the cummulative atom number
radii = lda0.r111[ lda0.r111 > 4. ]
NInt = []
for radius in radii:
NInt.append( NRadius( radius ) )
NInt = np.array( NInt )
axnInt.plot( radii, NInt, lw=lw0, color='black')
# Define function to numerically solve for y in a pair of x,y arrays
def x_solve( x_array, y_array, yval ):
"""
This function solves for x0 in the equation y0=y(x0)
where the function y(x) is defined with data arrays.
"""
# Convert the array to a function and then solve for y==yval
yf = interp1d( x_array, y_array-yval, kind='cubic')
return optimize.brentq( yf, x_array.min(), x_array.max() )
def y_solve( x_array, y_array, xval ):
yf = interp1d( x_array, y_array, kind='cubic')
return yf(xval)
radius1e = x_solve( lda0.r111[ lda0.r111 > 0 ] , \
density_111[ lda0.r111 > 0 ] , \
density_111.max()/np.exp(1.) )
pos_r111 = lda0.r111[ lda0.r111 > 0 ]
pos_dens111 = density_111[ lda0.r111 > 0 ]
#slice_type = 'defined_bins'
slice_type = 'percentage'
if slice_type == 'defined_bins':
print pos_dens111.max()
cutoffs = [ 1.20, 1.05, 0.95, 0.75, 0.50, 0.25, 0.00 ]
if pos_dens111.max() < 1.20 :
cutoffs = cutoffs[1:]
if pos_dens111.max() < 1.05 :
cutoffs = cutoffs[1:]
nrange0 = [ pos_dens111.max() ] + cutoffs[:-1]
nrange1 = cutoffs
print nrange0
print nrange1
rbins = []
for i in range(len(nrange1)-1):
if np.any( pos_dens111 > nrange1[i] ):
rbins.append(( (nrange1[i] + nrange0[i])/2., \
x_solve( pos_r111, pos_dens111, nrange1[i] ) ))
print rbins
rcut = [ b[1] for b in rbins ]
print " Bins cut radii = ", rcut
elif slice_type == 'percentage':
# Find the various radii that split the cloud into slots of 20% atom number
rcut = []
nrange0 = [ pos_dens111[0] ]
nrange1 = []
for Ncut in [0.2, 0.4, 0.6, 0.8 ]:
sol = x_solve( radii, NInt, Ncut )
rcut.append( sol )
denssol = y_solve( pos_r111, pos_dens111, sol )
nrange0.append( denssol )
nrange1.append( denssol )
nrange1.append(0.)
# get the number of atoms in each bin
binedges = rcut + [rcut[-1]+20.]
Nbin = []
for b in range(len(rcut) + 1 ):
if b == 0:
Nbin.append( NRadius( binedges[b] ) )
else:
Nbin.append( NRadius(binedges[b]) - NRadius(binedges[b-1]) )
Nbin = np.array( Nbin )
Nbinsum = Nbin.sum()
if np.abs( Nbinsum - 1.0 ) > 0.01:
print "Total natoms from adding bins = ", Nbinsum
raise ValueError("Normalization issue with density distribution.")
# Define functions to average over the shells
def y_average( y_array, x0, x1):
# Average y_array over the radii x0 to x1, weighted by density
valid = np.logical_and( np.abs(lda0.r111) < 70., ~np.isnan(density_111) )
r = lda0.r111[ valid ]
dens = density_111[ valid ]
y = y_array[ valid ]
shell = np.logical_and( r >= x0, r<x1 )
r = r[shell]
dens = dens[shell]
y = y[shell]
num = integrate.simps( y* dens*(r**2), r)
den = integrate.simps( dens*(r**2), r)
return num/den
# Define a function here that makes a piecewise function with the average
# values of a quantity so that it can be plotted
def binned( x, yqty ):
x = np.abs(x)
yavg = []
cond = []
for x0,x1 in zip( [0.]+rcut, rcut+[rcut[-1]+20.]):
cond.append(np.logical_and( x >= x0 , x<x1 ) )
yavg.append( y_average( yqty, x0, x1) )
return np.piecewise( x, cond, yavg ), yavg
# Calculate and plot the binned quantities
dens_binned = binned( lda0.r111, density_111 )
entr_binned = binned( lda0.r111, entropy_111 )
spi_binned = binned( lda0.r111, spi_111 )
Ut_binned = binned( lda0.r111, lda0.onsite_t_111 )
U_binned = binned( lda0.r111, lda0.onsite_t_111 * lda0.tunneling_111 )
t_binned = binned( lda0.r111, lda0.tunneling_111 )
peak_dens = np.amax( density_111 )
peak_t = np.amin( lda0.tunneling_111 )
axn.fill_between( lda0.r111, dens_binned[0], 0., \
lw=2, color='red', facecolor='red', \
zorder=2, alpha=0.8)
axEntr.fill_between( lda0.r111, entr_binned[0], 0., \
lw=2, color='red', facecolor='red', \
zorder=2, alpha=0.8)
axSpi.fill_between( lda0.r111, spi_binned[0], 0., \
lw=2, color='red', facecolor='red', \
zorder=2, alpha=0.8)
axUt.fill_between( lda0.r111, Ut_binned[0], 0., \
lw=2, color='red', facecolor='red', \
zorder=2, alpha=0.8 )
axU.fill_between( lda0.r111, U_binned[0], 0., \
lw=2, color='red', facecolor='red',label='$U$', \
zorder=2, alpha=0.8)
axt.fill_between( lda0.r111, t_binned[0], 0., \
lw=2, color='red', facecolor='red',linestyle=':',\
label='$t$', zorder=2, alpha=0.8)
# Set y labels
axn.set_ylabel(r'$n$')
axEntr.set_ylabel(r'$s$')
axSpi.set_ylabel(r'$S_{\pi}$')
axnInt.set_ylabel(r'$N_{<R}$')
axU.set_ylabel(r'$U\,(E_{R})$')
axt.set_ylabel(r'$t\,(E_{R})$')
axUt.set_ylabel(r'$U/t$')
axv0.set_ylabel(r'$E_{R}$')
# Set y lims
n_ylim = kwargs.get('n_ylim',None)
if n_ylim is not None: axn.set_ylim( *n_ylim)
letters = [\
r'\textbf{a}',\
r'\textbf{b}',\
r'\textbf{c}',\
r'\textbf{d}',\
r'\textbf{e}',\
r'\textbf{f}',\
r'\textbf{g}',\
r'\textbf{h}',\
]
for i,ax in enumerate([axn, axEntr, axSpi, axnInt, axU, axt, axUt, axv0]):
ax.text( 0.08,0.86, letters[i] , transform=ax.transAxes, fontsize=14)
ax.yaxis.grid()
ax.set_xlabel(r'$\mu\mathrm{m}$')
for n,r in enumerate(rcut):
if n % 2 == 0:
if n == len(rcut) - 1:
r2 = 60.
else:
r2 = rcut[n+1 ]
ax.axvspan( r, r2, facecolor='lightgray')
if i != 3:
ax.axvspan(-r2, -r, facecolor='lightgray')
ax.axvline( r, lw=1.0, color='gray', zorder=1 )
if i != 3:
ax.axvline(-r, lw=1.0, color='gray', zorder=1 )
ax.xaxis.set_major_locator( matplotlib.ticker.MultipleLocator(20) )
ax.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(10) )
#labels = [item.get_text() for item in ax.get_xticklabels()]
#print labels
#labels = ['' if float(l) % 40 != 0 else l for l in labels ]
#ax.set_xticklabels(labels)
axnInt.xaxis.set_major_locator( matplotlib.ticker.MultipleLocator(10) )
axnInt.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(5) )
# Finalize figure
gs.tight_layout(fig, rect=[0.,0.0,1.0,0.94])
if kwargs.get('closefig', False):
plt.close()
#dens_set = np.array( [ b[0] for b in rbins ] + [dens_binned[1][-1]] )
binresult = np.column_stack((
np.round( Nbin, decimals=3),\
np.round( nrange1, decimals=3),\
np.round( nrange0, decimals=3),\
np.round( dens_binned[1], decimals=2),\
np.round( t_binned[1], decimals=3),\
np.round( U_binned[1], decimals=3),\
np.round( Ut_binned[1], decimals=3) ))
from tabulate import tabulate
output = tabulate(binresult, headers=[\
"Atoms in bin", \
"n min", \
"n max", \
"Mean n", \
"Mean t", \
"Mean U", \
"Mean U/t", ]\
, tablefmt="orgtbl", floatfmt='.3f')
#, tablefmt="latex", floatfmt='.3f')
#print
#print output
if kwargs.get('return_profile', False):
return fig, binresult,\
peak_dens, radius1e, peak_t, output, r111_, density_111
else:
return fig, binresult,\
peak_dens, radius1e, peak_t, output
def CheckInhomogSimple( lda0, **kwargs ):
"""This function will make a plot along 111 of the density, U/t
and T/t
It is useful to assess the degree of inhomogeneity in our system"""
# Prepare the figure
fig = plt.figure(figsize=(9.,4.2))
lattlabel = '\n'.join( list( lda0.pot.Info() ) )
lattlabel = '\n'.join( [ i.split( r'$\mathrm{,}\ $' )[0].replace('s','v') \
for i in lda0.pot.Info() ] )
Nlabel = r'$N=%.2f\times 10^{5}$' % (lda0.Number/1e5)
Slabel = r'$S/N=%.2fk_{\mathrm{B}}$' % ( lda0.Entropy / lda0.Number )
thermolabel = '\n'.join([Nlabel, Slabel])
ldainfoA = '\n'.join(lda0.Info().split('\n')[:2])
ldainfoB = '\n'.join(lda0.Info().split('\n')[-2:])
fig.text( 0.05, 0.98, lattlabel, ha='left', va='top', linespacing=1.2)
fig.text( 0.48, 0.98, ldainfoA, ha='right', va='top', linespacing=1.2)
fig.text( 0.52, 0.98, ldainfoB, ha='left', va='top', linespacing=1.2)
fig.text( 0.95, 0.98, thermolabel, ha='right', va='top', linespacing=1.2)
gs = matplotlib.gridspec.GridSpec( 1,3, wspace=0.18,\
left=0.1, right=0.9, bottom=0.05, top=0.98)
# Setup axes
axn = fig.add_subplot(gs[0,0])
axU = fig.add_subplot(gs[0,1])
axT = fig.add_subplot(gs[0,2])
# Set xlim
x0 = -40.; x1 = 40.
axn.set_xlim( x0, x1)
axU.set_xlim( x0, x1 )
axU.set_ylim( 0., np.amax( lda0.onsite_t_111 * lda0.tunneling_111 *1.05 ) )
axT.set_xlim( x0, x1 )
axT.set_ylim( 0., 1.0)
lw0 = 2.5
# Plot relevant quantities
r111_, density_111 = lda0.getDensity( lda0.globalMu, lda0.T )
# density,
axn.plot( lda0.r111, density_111, lw=lw0 , color='black')
# U
Ut_111 = lda0.onsite_t_111
axU.plot( lda0.r111, Ut_111 , \
lw=lw0, label='$U$', color='black')
# T
Tt_111 = lda0.T / lda0.tunneling_111
axT.plot( lda0.r111, Tt_111, lw=lw0, label='$T$', \
color='black')
peak_dens = np.amax( density_111 )
peak_t = np.amin( lda0.tunneling_111 )
# Set y labels
axn.set_ylabel(r'$n$')
axU.set_ylabel(r'$U/t$')
axT.set_ylabel(r'$T/t$')
# Set y lims
n_ylim = kwargs.get('n_ylim',None)
if n_ylim is not None: axn.set_ylim( *n_ylim)
letters = [\
r'\textbf{a}',\
r'\textbf{b}',\
r'\textbf{c}',\
]
for i,ax in enumerate([axn, axU, axT]):
ax.text( 0.08,0.86, letters[i] , transform=ax.transAxes, fontsize=14)
ax.yaxis.grid()
ax.set_xlabel(r'$\mu\mathrm{m}$')
ax.xaxis.set_major_locator( matplotlib.ticker.MultipleLocator(20) )
ax.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(10) )
#labels = [item.get_text() for item in ax.get_xticklabels()]
#print labels
#labels = ['' if float(l) % 40 != 0 else l for l in labels ]
#ax.set_xticklabels(labels)
# Finalize figure
gs.tight_layout(fig, rect=[0.,0.0,1.0,0.94])
if kwargs.get('closefig', False):
plt.close()
if kwargs.get('return_profile', False):
return fig, peak_dens, peak_t, r111_, density_111, Ut_111 ,Tt_111
else:
return fig, peak_dens, peak_t
def CheckInhomogTrap( lda0, **kwargs ):
"""This function will make a plot along 111 of U, t, U/t, v0, W, and W/U
(where W is the band gap)
It is useful to assess the degree of inhomogeneity in our system"""
# Prepare the figure
fig = plt.figure(figsize=(8.,4.2))
lattlabel = '\n'.join( list( lda0.pot.Info() ) )
lattlabel = '\n'.join( [ i.split( r'$\mathrm{,}\ $' )[0].replace('s','v') \
for i in lda0.pot.Info() ] )
ldainfoA = '\n'.join(lda0.Info().split('\n')[:2])
ldainfoB = '\n'.join(lda0.Info().split('\n')[-2:])
fig.text( 0.05, 0.98, lattlabel, ha='left', va='top', linespacing=1.2)
fig.text( 0.48, 0.98, ldainfoA, ha='right', va='top', linespacing=1.2)
fig.text( 0.52, 0.98, ldainfoB, ha='left', va='top', linespacing=1.2)
gs = matplotlib.gridspec.GridSpec( 2,4, wspace=0.18,\
left=0.1, right=0.9, bottom=0.05, top=0.98)
# Setup axes
axU = fig.add_subplot(gs[0,0])
axt = fig.add_subplot(gs[0,1])
ax12t = fig.add_subplot(gs[0,2])
axUt = fig.add_subplot(gs[0,3])
axv0 = fig.add_subplot(gs[1,0])
axW = fig.add_subplot(gs[1,1])
axWU = fig.add_subplot(gs[1,2])
axW12t = fig.add_subplot(gs[1,3])
axs = [axU, axt, ax12t, axUt, axv0, axW, axWU, axW12t]
# Set xlim
x0 = 0.; x1 = 40.
for ax in axs:
ax.set_xlim( x0, x1)
# Set y labels
axU.set_ylabel(r'$U\,(E_{R})$')
axt.set_ylabel(r'$t\,(\mathrm{kHz})$')
ax12t.set_ylabel(r'$12t\,(E_{R})$')
axUt.set_ylabel(r'$U/t$')
axv0.set_ylabel(r'$v_{0}\,(E_{R})$')
axW.set_ylabel(r'$W\,(E_{R})$')
axWU.set_ylabel(r'$W/U$')
axW12t.set_ylabel(r'$W/(12t)$')
#axU.set_ylim( 0., np.amax( lda0.onsite_t_111 * lda0.tunneling_111 *1.05 ) )
lw0 = 2.5
# U
U_111 = lda0.onsite_t_111 * lda0.tunneling_111
axU.plot( lda0.r111, U_111 , \
lw=lw0, label='$U/t$', color='black')
# t
t_111 = lda0.tunneling_111
axt.plot( lda0.r111, t_111*29., \
lw=lw0, label='$t$', color='black')
# 12t
t_111 = lda0.tunneling_111
ax12t.plot( lda0.r111, 12.*t_111 , \
lw=lw0, label='$t$', color='black')
# U/t
Ut_111 = lda0.onsite_t_111
axUt.plot( lda0.r111, Ut_111 , \
lw=lw0, label='$U$', color='black')
# v0
V0_111 = lda0.pot.S0( lda0.X111, lda0.Y111, lda0.Z111 )
axv0.plot( lda0.r111, V0_111[0], lw=lw0, color='black', \
label='$\mathrm{Lattice\ depth}$')
# Band gap
bandgap_111 = bands = scubic.bands3dvec( V0_111, NBand=1 )[0] \
- scubic.bands3dvec( V0_111, NBand=0 )[1]
axW.plot( lda0.r111, bandgap_111, lw=lw0, color='black', \
label='$\mathrm{Band\ gap},\,W$')
# Band gap / U
axWU.plot( lda0.r111, bandgap_111 / U_111, lw=lw0, color='black', \
label='$W/U$')
# Band gap / 12t
axW12t.plot( lda0.r111, bandgap_111 / (12.*t_111), lw=lw0, color='black', \
label='$W/(12t)$')
letters = [\
r'\textbf{a}',\
r'\textbf{b}',\
r'\textbf{c}',\
r'\textbf{d}',\
r'\textbf{e}',\
r'\textbf{f}',\
]
for i,ax in enumerate(axs):
#ax.text( 0.08,0.86, letters[i] , transform=ax.transAxes, fontsize=14)
ax.yaxis.grid()
ax.set_xlabel(r'$\mu\mathrm{m}$')
ax.xaxis.set_major_locator( matplotlib.ticker.MultipleLocator(10) )
ax.xaxis.set_minor_locator( matplotlib.ticker.MultipleLocator(5) )
#labels = [item.get_text() for item in ax.get_xticklabels()]
#print labels
#labels = ['' if float(l) % 40 != 0 else l for l in labels ]
#ax.set_xticklabels(labels)
# Finalize figure
gs.tight_layout(fig, rect=[0.,0.0,1.0,0.94])
if kwargs.get('closefig', False):
plt.close()
return fig
|
mit
| 8,712,553,290,903,506,000
| 36.097433
| 83
| 0.517302
| false
| 3.23241
| false
| false
| false
|
cheery/language
|
parser/__init__.py
|
1
|
15728
|
from lookahead import CharacterLookAhead, LookAhead
from structures import Constant, Struct
specials = {
',': 'comma',
'(': 'leftparen', ')': 'rightparen', '[': 'leftbracket', ']': 'rightbracket',
}
operators = set([
'or', 'and', 'not',
'!', ':', '=', '-', '+', '*', '/', '<>', '==', '!=', '->',
'<', '<=', '>', '>=', '|', '^', '&', '<<', '>>', '//', '%', '~',
'.', '.;', '.:', ':.', ';', '@', '::', '..', ':=',
])
infix_operators = {
'or': 20,
'and': 30,
'<': 40, '<=': 40, '>': 40, '>=': 40,
'<>': 40, '!=': 40, '==': 40,
'|': 50,
'^': 60,
'&': 70,
'<<': 80, '>>': 80,
'+': 90, '-': 90,
'*': 100, '/': 100, '//': 100, '%': 100,
}
prefix_operators = {
'not': 30,
'+': 110, '-': 110, '~': 110,
}
right_binding = set(['or', 'and', '<', '<=', '>', '>=', '<>', '!=', '=='])
def main():
source = 'tokens'
fd = open(source, 'r')
for structure in parse(fd.read().decode('utf-8'), source, debug=True):
print structure
def parse_file(source, debug=False):
with open(source, 'r') as fd:
return parse(fd.read().decode('utf-8'), source, debug)
def parse(source, filename=None, debug=False):
cla = CharacterLookAhead(source)
if debug:
tla = LookAhead(tokenlogger(tokenize(cla), filename))
else:
tla = LookAhead(tokenize(cla))
if ahead(tla, 'newline'):
expect(tla, 'newline')
if not tla.empty:
return parse_block(tla)
else:
return ()
def parse_block(tla):
yield parse_sentence(tla)
while not tla.empty and not ahead(tla, 'dedent'):
expect(tla, 'newline')
yield parse_sentence(tla)
def parse_sentence(tla, required=True):
location = tla.location
head = parse_word(tla, required, 10)
if head is None:
return
if ahead_string(tla, '=', ':', ':='):
operator = Constant(tla.location, 'operator', expect(tla, 'operator').string)
blocks = find_placeholders(head)
if len(blocks) > 0:
raise Exception("%s: not allowed on toplevel lhs side of '=' or ':'." % linecol(blocks[0].location))
return Struct(location, 'infix', operator, head, parse_sentence(tla))
sentence = Struct(location, 'sentence', head)
for word in repeated(parse_word, tla, False, 0):
sentence.append(word)
blocks = find_placeholders(sentence)
if ahead(tla, 'indent'):
expect(tla, 'indent')
if len(blocks) > 1:
raise Exception("%s: cannot fill this placeholder" % linecol(blocks[0].location))
elif len(blocks) > 0:
block = blocks[0]
for item in parse_block(tla):
block.append(item)
else:
sentence.append(Struct(location, 'block', *parse_block(tla)))
expect(tla, 'dedent')
elif len(blocks) > 0:
raise Exception("%s: cannot fill this placeholder" % linecol(blocks[0].location))
return sentence
def find_placeholders(node, out=None):
out = [] if out is None else out
if node.group == 'block':
out.append(node)
elif isinstance(node, Struct):
for item in node:
if item.group == 'sentence' or item.group == 'function':
continue
find_placeholders(item, out)
return out
def parse_word(tla, required, precedence):
location = tla.location
expr = parse_slot(tla, required, precedence)
if expr is None:
return
if ahead(tla, 'comma'):
expr = Struct(location, 'tuple+', expr)
while ahead(tla, 'comma'):
expect(tla, 'comma')
expr.append(parse_slot(tla, True, precedence))
return expr
def parse_arglist(tla, location, *head):
arglist = Struct(location, 'tuple', *head)
slot = parse_slot(tla, False, 0)
if slot is None:
return arglist
arglist.append(slot)
while ahead(tla, 'comma'):
expect(tla, 'comma')
arglist.append(parse_slot(tla, True, 0))
return arglist
def parse_slot(tla, required, precedence):
if precedence >= 10:
return parse_slice(tla, required, precedence)
location = tla.location
slic = parse_slice(tla, required, precedence)
if ahead_string(tla, '=', ':'):
operator = Constant(tla.location, 'operator', expect(tla, 'operator').string)
return Struct(location, 'infix', operator, slic, parse_slot(tla, required, precedence))
return slic
def parse_slice(tla, required, precedence):
location = tla.location
expr = parse_expr(tla, False, precedence)
if expr is None:
condition = lambda: tla.value.near == tla.value.balanced
else:
condition = lambda: tla.value.balanced
if ahead_string(tla, '.:', ':.') and condition():
mode = ('incr' if tla.step().string == '.:' else 'decr')
start = expr
stop = parse_expr(tla, False, precedence)
if start is None:
start = Constant(tla.location, 'symbol', 'null')
if stop is None:
stop = Constant(tla.location, 'symbol', 'null')
stride = Constant(tla.location, 'symbol', 'null')
step = Constant(tla.location, 'symbol', 'null')
if ahead_string(tla, '::') and tla.value.balanced:
expect(tla, 'operator')
stride = parse_expr(tla, False, precedence)
if ahead_string(tla, '..') and tla.value.balanced:
expect(tla, 'operator')
step = parse_expr(tla, False, precedence)
return Struct(location, mode, start, stop, stride, step)
if expr is None:
return parse_expr(tla, required, precedence)
return expr
def parse_expr(tla, required, precedence):
location = tla.location
if ahead(tla, 'operator') and tla.value.string in prefix_operators:
if tla.value.near <> tla.value.balanced and tla.value.string <> 'not':
raise Exception("%s: This is not C" % linecol(tla.location))
operator = Constant(tla.location, 'operator', expect(tla, 'operator').string)
expr = Struct(location, 'prefix', operator, parse_expr(tla, True, prefix_operators[operator.value]))
else:
expr = parse_fullterm(tla, required)
while ahead(tla, 'operator') and tla.value.string in infix_operators:
prec = infix_operators[tla.value.string]
if prec <= precedence or not tla.value.balanced:
break
prex = prec - (tla.value.string in right_binding)
operator = Constant(tla.location, 'operator', expect(tla, 'operator').string)
expr = Struct(location, 'infix', operator, expr, parse_expr(tla, True, prex))
return expr
def parse_fullterm(tla, required):
term = parse_term(tla, required)
while not tla.empty and tla.value.near:
location = tla.location
if ahead(tla, 'attribute'):
string = expect(tla, 'attribute').string
term = Struct(location, 'attribute', term, Constant(location, 'attribute', string[1:]))
elif ahead(tla, 'leftparen'):
expect(tla, 'leftparen')
term = parse_arglist(tla, location, term)
term.group = 'call'
expect(tla, 'rightparen')
elif ahead(tla, 'leftbracket'):
expect(tla, 'leftbracket')
term = parse_arglist(tla, location, term)
term.group = 'index'
expect(tla, 'rightbracket')
elif ahead_string(tla, ';'):
expect(tla, 'operator')
term = Struct(location, 'call', term, Struct(location, "block"))
elif ahead_string(tla, '.;'):
expect(tla, 'operator')
term = Struct(location, 'attribute', term, Struct(location, "block"))
else:
break
return term
def parse_term(tla, required):
location = tla.location
if ahead(tla, 'symbol'):
return Constant(location, 'symbol', expect(tla, 'symbol').string)
elif ahead_string(tla, ';'):
expect(tla, 'operator')
return Struct(location, 'block')
elif ahead(tla, 'string'):
string = expect(tla, 'string').string
return Constant(location, 'string', string[1:-1])
elif ahead(tla, 'number'):
string = expect(tla, 'number').string
if ahead(tla, 'flot'):
if not tla.value.near:
raise Exception("%s: decimal expression supposed to be typed with no spacing" % (linecol(tla.location)))
string += expect(tla, 'flot').string
return Constant(location, 'float', string)
return Constant(location, 'number', string)
elif ahead(tla, 'leftparen'):
expect(tla, 'leftparen')
if ahead(tla, 'operator'):
operator = Constant(tla.location, 'operator', expect(tla, 'operator').string)
expect(tla, 'rightparen')
return operator
else:
term = parse_arglist(tla, location)
expect(tla, 'rightparen')
if ahead_string(tla, '->'):
expect(tla, 'operator')
blocks = find_placeholders(term)
if len(blocks) > 0:
raise Exception("%s: not allowed inside function argument list" % linecol(blocks[0].location))
return parse_function(tla, location, term)
elif len(term) == 1 and term[0].group != 'block':
return term[0]
else:
term.group = 'tuple'
return term
elif ahead(tla, 'leftbracket'):
expect(tla, 'leftbracket')
arglist = parse_arglist(tla, location)
arglist.group = 'list'
expect(tla, 'rightbracket')
return arglist
elif ahead_string(tla, '->'):
expect(tla, 'operator')
return parse_function(tla, location, Struct(location, 'arglist'))
elif ahead_string(tla, '@'):
expect(tla, 'operator')
term = Constant(location, 'self', None)
if ahead(tla, '.'):
raise Exception("%s: you're serious?" % (linecol(tla.location)))
if ahead(tla, 'symbol') and tla.value.near:
term = Struct(location, 'attribute', term, Constant(tla.location, 'attribute', expect(tla, 'symbol').string))
return term
elif required:
raise Exception("%s: a term is missing after '%s'" % (linecol(tla.previous_location), tla.previous_value.string))
def parse_function(tla, location, func):
func.group = 'function'
sentence = parse_sentence(tla, False)
if sentence is not None:
func.append(sentence)
elif ahead(tla, 'indent'):
expect(tla, 'indent')
func.append(Struct(location, 'block', *parse_block(tla)))
expect(tla, 'dedent')
return func
def repeated(fn, *args):
node = fn(*args)
while node is not None:
yield node
node = fn(*args)
def ahead_string(tla, *strings):
return not tla.empty and tla.value.string in strings
def ahead(tla, *groups):
return not tla.empty and tla.value.group in groups
def expect(tla, group, string=None):
if tla.empty:
raise Exception(u"%s: expected %s, but stream is empty" % (linecol(tla.location), repr_expect(group, string)))
value = tla.value
valid = (value.group == group) and string is None or value.string == string
if not valid:
raise Exception(u"%s: expected %s, got %r(%s)" % (linecol(tla.location), repr_expect(group, string), value.string, value.group))
return tla.step()
def repr_expect(group, string):
if string is None:
return "(%s)" % group
else:
return "%r(%s)" % (string, group)
def tokenlogger(tokens, source):
for token in tokens:
print "%s:%s: %r (%s) near=%r balanced=%r" % (
source, linecol(token.location),
token.string, token.group, token.near, token.balanced
)
yield token
class tokenize(object):
def __init__(self, cla):
self.cla = cla
self.indent = 0
self.layers = [-1]
def __iter__(self):
return self
def next(self):
cla = self.cla
if cla.empty and len(self.layers) > 1:
self.indent = self.layers.pop(-1)
return Token(cla.location, '', 'dedent')
if cla.empty:
raise StopIteration
if self.indent < self.layers[-1]:
indent = self.layers.pop(-1)
if self.indent != self.layers[-1]:
return Token(cla.location, '', 'badline')
return Token(cla.location, '', 'dedent')
if self.indent == self.layers[-1]:
indent = self.layers.pop(-1)
return Token(cla.location, '', 'newline')
while cla.value == ' ':
cla.step()
if cla.value == '#':
while cla.value != '\n':
cla.step()
if cla.value == '\n':
cla.step()
indent = 0
while cla.value == ' ':
indent += 1
cla.step()
if cla.value == '\n' or cla.value == '#':
return self.next()
if cla.empty:
return self.next()
if indent > self.indent:
self.layers.append(self.indent)
self.indent = indent
return Token(cla.location, '', 'indent')
elif indent == self.indent:
return Token(cla.location, '', 'newline')
else:
self.indent = indent
return Token(cla.location, '', 'dedent')
location = cla.location
near = (cla.previous_value != ' ')
string = ""
if issym(cla.value):
while issym(cla.value):
string += cla.step()
balanced = near <> (cla.value == ' ')
if string in operators:
return Token(location, string, 'operator', near, balanced)
if string[:1].isdigit():
return Token(location, string, 'number', near, balanced)
return Token(location, string, 'symbol', near, balanced)
if cla.value in "\"'":
terminator = string = cla.step()
while cla.value != terminator:
string += cla.step()
if cla.value == '\\':
string += cla.step()
string += cla.step()
balanced = near <> (cla.value == ' ')
return Token(location, string, 'string', near, balanced)
string = cla.step()
if string == '.':
while issym(cla.value):
string += cla.step()
if string != '.':
balanced = near <> (cla.value == ' ')
if isnum(string[1]):
return Token(location, string, 'flot', near, balanced)
return Token(location, string, 'attribute', near, balanced)
if string in operators:
while not cla.empty and string + cla.value in operators:
string += cla.step()
balanced = near <> (cla.value == ' ')
return Token(location, string, 'operator', near, balanced)
balanced = near <> (cla.value == ' ')
if string in specials:
return Token(location, string, specials[string], near, balanced)
return Token(location, string, 'unknown', near, balanced)
def issym(ch):
return ch.isalnum() or ch == '_'
def isnum(ch):
return ch.isdigit()
def linecol(location):
return "line %i, col %i" % (location >> 8, location & 255)
class Token(object):
def __init__(self, location, string, group, near=False, balanced=False):
self.location = location
self.string = string
self.group = group
self.near = near
self.balanced = balanced
def __repr__(self):
return '<Token %r>' % self.string
if __name__=='__main__':
main()
|
gpl-3.0
| -1,161,089,621,615,312,600
| 36.182033
| 136
| 0.557541
| false
| 3.727898
| false
| false
| false
|
TumblrCommunity/PowerPortfolio
|
test.py
|
1
|
4318
|
# To execute this test run python test.py on the Terminal
from portfolio.application.base import application
from portfolio.models import needs_db
import os
import json
import unittest
import tempfile
class PortfolioTestCase(unittest.TestCase):
def setUp(self):
self.tester = application.test_client()
def login(self):
passwd = "somepassword"
self.tester.post('/admin/api/login',
data=json.dumps(dict(password=passwd)),
content_type='application/json')
def test_login(self):
passwd = "somepassword"
response = self.tester.post('/admin/api/login',
data=json.dumps(dict(password=passwd)),
content_type='application/json')
self.assertEqual(json.loads(response.data.decode('utf-8')), {'auth':True})
passwd = "notsomepassword"
response = self.tester.post('/admin/api/login',
data=json.dumps(dict(password=passwd)),
content_type='application/json')
self.assertEqual(json.loads(response.data.decode('utf-8')), {'auth':False})
def test_logged_in(self):
response = self.tester.get('/admin/api/logged_in')
self.assertEqual(json.loads(response.data.decode('utf-8')), {'auth':False})
self.login()
response = self.tester.get('/admin/api/logged_in')
self.assertEqual(json.loads(response.data.decode('utf-8')), {'auth':True})
def test_logout(self):
response = self.tester.get('/admin/api/logout')
self.assertEqual(json.loads(response.data.decode('utf-8')), {'error':"Not logged in"})
self.login()
response = self.tester.get('/admin/api/logout')
self.assertEqual(response.status_code, 204)
response = self.tester.get('/admin/api/logout')
self.assertEqual(json.loads(response.data.decode('utf-8')), {'error':"Not logged in"})
def test_home_status_code(self):
response = self.tester.get('/')
self.assertEqual(response.status_code, 200)
def test_count(self):
response = self.tester.get('/api/projects/count', content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.data.decode('utf-8')), {'count':0})
def test_project_new(self):
self.login()
response = self.tester.post('/admin/api/projects/new',
data=json.dumps(dict(name='foo', url="http://", show=True, description="bar")),
content_type='application/json')
self.assertEqual(response.status_code, 204)
#adding two projects - ideally would like this to have this preset in test database
response = self.tester.post('/admin/api/projects/new',
data=json.dumps(dict(name='foo', url="http://", show=True, description="bar")),
content_type='application/json')
self.assertEqual(response.status_code, 204)
def test_project_read(self):
response = self.tester.get('/api/projects/1', content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.data.decode('utf-8')), {'key':1, 'name':'foo','url':"http://", 'show':True, 'description':"bar" })
def test_project_write(self):
self.login()
#test valid update
response = self.tester.post('/admin/api/projects/2',
data=json.dumps(dict(name='foop', description='barp', show = False, url="https://")),
content_type='application/json')
self.assertEqual(response.status_code, 204)
response = self.tester.get('/api/projects/2', content_type='application/json')
self.assertEqual(response.status_code, 200)
#test invalid update
self.assertEqual(json.loads(response.data.decode('utf-8')), {'key':2, 'name':'foop','url':"https://", 'show':False, 'description':"barp" })
response = self.tester.post('/admin/api/projects/2',
data=json.dumps(None),
content_type='application/json')
self.assertEqual(response.status_code, 400)
if __name__ == '__main__':
unittest.main()
|
mit
| 1,062,895,766,827,420,400
| 44.93617
| 147
| 0.606994
| false
| 3.957837
| true
| false
| false
|
ThinkboxSoftware/Deadline
|
Custom/events/Zabbix/API/httpretty/core.py
|
1
|
34264
|
# #!/usr/bin/env python
# -*- coding: utf-8 -*-
# <HTTPretty - HTTP client mock for Python>
# Copyright (C) <2011-2013> Gabriel Falcão <gabriel@nacaolivre.org>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
import re
import codecs
import inspect
import socket
import functools
import itertools
import warnings
import logging
import traceback
import json
import contextlib
from .compat import (
PY3,
StringIO,
text_type,
BaseClass,
BaseHTTPRequestHandler,
quote,
quote_plus,
urlunsplit,
urlsplit,
parse_qs,
unquote,
unquote_utf8,
ClassTypes,
basestring
)
from .http import (
STATUSES,
HttpBaseClass,
parse_requestline,
last_requestline,
)
from .utils import (
utf8,
decode_utf8,
)
from .errors import HTTPrettyError
from datetime import datetime
from datetime import timedelta
from errno import EAGAIN
old_socket = socket.socket
old_create_connection = socket.create_connection
old_gethostbyname = socket.gethostbyname
old_gethostname = socket.gethostname
old_getaddrinfo = socket.getaddrinfo
old_socksocket = None
old_ssl_wrap_socket = None
old_sslwrap_simple = None
old_sslsocket = None
if PY3: # pragma: no cover
basestring = (bytes, str)
try: # pragma: no cover
import socks
old_socksocket = socks.socksocket
except ImportError:
socks = None
try: # pragma: no cover
import ssl
old_ssl_wrap_socket = ssl.wrap_socket
if not PY3:
old_sslwrap_simple = ssl.sslwrap_simple
old_sslsocket = ssl.SSLSocket
except ImportError: # pragma: no cover
ssl = None
DEFAULT_HTTP_PORTS = frozenset([80])
POTENTIAL_HTTP_PORTS = set(DEFAULT_HTTP_PORTS)
DEFAULT_HTTPS_PORTS = frozenset([443])
POTENTIAL_HTTPS_PORTS = set(DEFAULT_HTTPS_PORTS)
class HTTPrettyRequest(BaseHTTPRequestHandler, BaseClass):
"""Represents a HTTP request. It takes a valid multi-line, `\r\n`
separated string with HTTP headers and parse them out using the
internal `parse_request` method.
It also replaces the `rfile` and `wfile` attributes with StringIO
instances so that we garantee that it won't make any I/O, neighter
for writing nor reading.
It has some convenience attributes:
`headers` -> a mimetype object that can be cast into a dictionary,
contains all the request headers
`method` -> the HTTP method used in this request
`querystring` -> a dictionary containing lists with the
attributes. Please notice that if you need a single value from a
query string you will need to get it manually like:
```python
>>> request.querystring
{'name': ['Gabriel Falcao']}
>>> print request.querystring['name'][0]
```
`parsed_body` -> a dictionary containing parsed request body or
None if HTTPrettyRequest doesn't know how to parse it. It
currently supports parsing body data that was sent under the
`content-type` headers values: 'application/json' or
'application/x-www-form-urlencoded'
"""
def __init__(self, headers, body=''):
# first of all, lets make sure that if headers or body are
# unicode strings, it must be converted into a utf-8 encoded
# byte string
self.raw_headers = utf8(headers.strip())
self.body = utf8(body)
# Now let's concatenate the headers with the body, and create
# `rfile` based on it
self.rfile = StringIO(b'\r\n\r\n'.join([self.raw_headers, self.body]))
self.wfile = StringIO() # Creating `wfile` as an empty
# StringIO, just to avoid any real
# I/O calls
# parsing the request line preemptively
self.raw_requestline = self.rfile.readline()
# initiating the error attributes with None
self.error_code = None
self.error_message = None
# Parse the request based on the attributes above
self.parse_request()
# making the HTTP method string available as the command
self.method = self.command
# Now 2 convenient attributes for the HTTPretty API:
# `querystring` holds a dictionary with the parsed query string
try:
self.path = self.path.encode('iso-8859-1')
except UnicodeDecodeError:
pass
self.path = decode_utf8(self.path)
qstring = self.path.split("?", 1)[-1]
self.querystring = self.parse_querystring(qstring)
# And the body will be attempted to be parsed as
# `application/json` or `application/x-www-form-urlencoded`
self.parsed_body = self.parse_request_body(self.body)
def __str__(self):
return '<HTTPrettyRequest("{0}", total_headers={1}, body_length={2})>'.format(
self.headers.get('content-type', ''),
len(self.headers),
len(self.body),
)
def parse_querystring(self, qs):
expanded = unquote_utf8(qs)
parsed = parse_qs(expanded)
result = {}
for k in parsed:
result[k] = list(map(decode_utf8, parsed[k]))
return result
def parse_request_body(self, body):
""" Attempt to parse the post based on the content-type passed. Return the regular body if not """
PARSING_FUNCTIONS = {
'application/json': json.loads,
'text/json': json.loads,
'application/x-www-form-urlencoded': self.parse_querystring,
}
FALLBACK_FUNCTION = lambda x: x
content_type = self.headers.get('content-type', '')
do_parse = PARSING_FUNCTIONS.get(content_type, FALLBACK_FUNCTION)
try:
body = decode_utf8(body)
return do_parse(body)
except:
return body
class EmptyRequestHeaders(dict):
pass
class HTTPrettyRequestEmpty(object):
body = ''
headers = EmptyRequestHeaders()
class FakeSockFile(StringIO):
pass
class FakeSSLSocket(object):
def __init__(self, sock, *args, **kw):
self._httpretty_sock = sock
def __getattr__(self, attr):
return getattr(self._httpretty_sock, attr)
class fakesock(object):
class socket(object):
_entry = None
debuglevel = 0
_sent_data = []
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM,
protocol=0):
self.setsockopt(family, type, protocol)
self.truesock = old_socket(family, type, protocol)
self._closed = True
self.fd = FakeSockFile()
self.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
self._sock = self
self.is_http = False
self._bufsize = 16
def getpeercert(self, *a, **kw):
now = datetime.now()
shift = now + timedelta(days=30 * 12)
return {
'notAfter': shift.strftime('%b %d %H:%M:%S GMT'),
'subjectAltName': (
('DNS', '*%s' % self._host),
('DNS', self._host),
('DNS', '*'),
),
'subject': (
(
('organizationName', '*.%s' % self._host),
),
(
('organizationalUnitName',
'Domain Control Validated'),
),
(
('commonName', '*.%s' % self._host),
),
),
}
def ssl(self, sock, *args, **kw):
return sock
def setsockopt(self, family, type, protocol):
self.family = family
self.protocol = protocol
self.type = type
def connect(self, address):
self._address = (self._host, self._port) = address
self._closed = False
self.is_http = self._port in POTENTIAL_HTTP_PORTS | POTENTIAL_HTTPS_PORTS
if not self.is_http:
self.truesock.connect(self._address)
def close(self):
if not (self.is_http and self._closed):
self.truesock.close()
self._closed = True
def makefile(self, mode='r', bufsize=-1):
"""Returns this fake socket's own StringIO buffer.
If there is an entry associated with the socket, the file
descriptor gets filled in with the entry data before being
returned.
"""
self._mode = mode
self._bufsize = bufsize
if self._entry:
self._entry.fill_filekind(self.fd)
return self.fd
def real_sendall(self, data, *args, **kw):
"""Sends data to the remote server. This method is called
when HTTPretty identifies that someone is trying to send
non-http data.
The received bytes are written in this socket's StringIO
buffer so that HTTPretty can return it accordingly when
necessary.
"""
if self.is_http: # no need to connect if `self.is_http` is
# False because self.connect already did
# that
self.truesock.connect(self._address)
self.truesock.settimeout(0)
self.truesock.sendall(data, *args, **kw)
should_continue = True
while should_continue:
try:
received = self.truesock.recv(self._bufsize)
self.fd.write(received)
should_continue = len(received) > 0
except socket.error as e:
if e.errno == EAGAIN:
continue
break
self.fd.seek(0)
def sendall(self, data, *args, **kw):
self._sent_data.append(data)
try:
requestline, _ = data.split(b'\r\n', 1)
method, path, version = parse_requestline(decode_utf8(requestline))
is_parsing_headers = True
except ValueError:
is_parsing_headers = False
if not self._entry:
# If the previous request wasn't mocked, don't mock the subsequent sending of data
return self.real_sendall(data, *args, **kw)
self.fd.seek(0)
if not is_parsing_headers:
if len(self._sent_data) > 1:
headers = utf8(last_requestline(self._sent_data))
meta = self._entry.request.headers
body = utf8(self._sent_data[-1])
if meta.get('transfer-encoding', '') == 'chunked':
if not body.isdigit() and body != b'\r\n' and body != b'0\r\n\r\n':
self._entry.request.body += body
else:
self._entry.request.body += body
httpretty.historify_request(headers, body, False)
return
# path might come with
s = urlsplit(path)
POTENTIAL_HTTP_PORTS.add(int(s.port or 80))
headers, body = list(map(utf8, data.split(b'\r\n\r\n', 1)))
request = httpretty.historify_request(headers, body)
info = URIInfo(hostname=self._host, port=self._port,
path=s.path,
query=s.query,
last_request=request)
matcher, entries = httpretty.match_uriinfo(info)
if not entries:
self._entry = None
self.real_sendall(data)
return
self._entry = matcher.get_next_entry(method, info, request)
def debug(self, func, *a, **kw):
if self.is_http:
frame = inspect.stack()[0][0]
lines = list(map(utf8, traceback.format_stack(frame)))
message = [
"HTTPretty intercepted and unexpected socket method call.",
("Please open an issue at "
"'https://github.com/gabrielfalcao/HTTPretty/issues'"),
"And paste the following traceback:\n",
"".join(decode_utf8(lines)),
]
raise RuntimeError("\n".join(message))
return func(*a, **kw)
def settimeout(self, new_timeout):
self.timeout = new_timeout
def send(self, *args, **kwargs):
return self.debug(self.truesock.send, *args, **kwargs)
def sendto(self, *args, **kwargs):
return self.debug(self.truesock.sendto, *args, **kwargs)
def recvfrom_into(self, *args, **kwargs):
return self.debug(self.truesock.recvfrom_into, *args, **kwargs)
def recv_into(self, *args, **kwargs):
return self.debug(self.truesock.recv_into, *args, **kwargs)
def recvfrom(self, *args, **kwargs):
return self.debug(self.truesock.recvfrom, *args, **kwargs)
def recv(self, *args, **kwargs):
return self.debug(self.truesock.recv, *args, **kwargs)
def __getattr__(self, name):
return getattr(self.truesock, name)
def fake_wrap_socket(s, *args, **kw):
return s
def create_fake_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
s = fakesock.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
s.settimeout(timeout)
if source_address:
s.bind(source_address)
s.connect(address)
return s
def fake_gethostbyname(host):
return '127.0.0.1'
def fake_gethostname():
return 'localhost'
def fake_getaddrinfo(
host, port, family=None, socktype=None, proto=None, flags=None):
return [(2, 1, 6, '', (host, port))]
class Entry(BaseClass):
def __init__(self, method, uri, body,
adding_headers=None,
forcing_headers=None,
status=200,
streaming=False,
**headers):
self.method = method
self.uri = uri
self.info = None
self.request = None
self.body_is_callable = False
if hasattr(body, "__call__"):
self.callable_body = body
self.body = None
self.body_is_callable = True
elif isinstance(body, text_type):
self.body = utf8(body)
else:
self.body = body
self.streaming = streaming
if not streaming and not self.body_is_callable:
self.body_length = len(self.body or '')
else:
self.body_length = 0
self.adding_headers = adding_headers or {}
self.forcing_headers = forcing_headers or {}
self.status = int(status)
for k, v in headers.items():
name = "-".join(k.split("_")).title()
self.adding_headers[name] = v
self.validate()
def validate(self):
content_length_keys = 'Content-Length', 'content-length'
for key in content_length_keys:
got = self.adding_headers.get(
key, self.forcing_headers.get(key, None))
if got is None:
continue
try:
igot = int(got)
except ValueError:
warnings.warn(
'HTTPretty got to register the Content-Length header ' \
'with "%r" which is not a number' % got,
)
if igot > self.body_length:
raise HTTPrettyError(
'HTTPretty got inconsistent parameters. The header ' \
'Content-Length you registered expects size "%d" but ' \
'the body you registered for that has actually length ' \
'"%d".' % (
igot, self.body_length,
)
)
def __str__(self):
return r'<Entry %s %s getting %d>' % (
self.method, self.uri, self.status)
def normalize_headers(self, headers):
new = {}
for k in headers:
new_k = '-'.join([s.lower() for s in k.split('-')])
new[new_k] = headers[k]
return new
def fill_filekind(self, fk):
now = datetime.utcnow()
headers = {
'status': self.status,
'date': now.strftime('%a, %d %b %Y %H:%M:%S GMT'),
'server': 'Python/HTTPretty',
'connection': 'close',
}
if self.forcing_headers:
headers = self.forcing_headers
if self.adding_headers:
headers.update(self.normalize_headers(self.adding_headers))
headers = self.normalize_headers(headers)
status = headers.get('status', self.status)
if self.body_is_callable:
status, headers, self.body = self.callable_body(self.request, self.info.full_url(), headers)
headers.update({
'content-length': len(self.body)
})
string_list = [
'HTTP/1.1 %d %s' % (status, STATUSES[status]),
]
if 'date' in headers:
string_list.append('date: %s' % headers.pop('date'))
if not self.forcing_headers:
content_type = headers.pop('content-type',
'text/plain; charset=utf-8')
content_length = headers.pop('content-length', self.body_length)
string_list.append('content-type: %s' % content_type)
if not self.streaming:
string_list.append('content-length: %s' % content_length)
string_list.append('server: %s' % headers.pop('server'))
for k, v in headers.items():
string_list.append(
'{0}: {1}'.format(k, v),
)
for item in string_list:
fk.write(utf8(item) + b'\n')
fk.write(b'\r\n')
if self.streaming:
self.body, body = itertools.tee(self.body)
for chunk in body:
fk.write(utf8(chunk))
else:
fk.write(utf8(self.body))
fk.seek(0)
def url_fix(s, charset='utf-8'):
scheme, netloc, path, querystring, fragment = urlsplit(s)
path = quote(path, b'/%')
querystring = quote_plus(querystring, b':&=')
return urlunsplit((scheme, netloc, path, querystring, fragment))
class URIInfo(BaseClass):
def __init__(self,
username='',
password='',
hostname='',
port=80,
path='/',
query='',
fragment='',
scheme='',
last_request=None):
self.username = username or ''
self.password = password or ''
self.hostname = hostname or ''
if port:
port = int(port)
elif scheme == 'https':
port = 443
self.port = port or 80
self.path = path or ''
self.query = query or ''
if scheme:
self.scheme = scheme
elif self.port in POTENTIAL_HTTPS_PORTS:
self.scheme = 'https'
else:
self.scheme = 'http'
self.fragment = fragment or ''
self.last_request = last_request
def __str__(self):
attrs = (
'username',
'password',
'hostname',
'port',
'path',
)
fmt = ", ".join(['%s="%s"' % (k, getattr(self, k, '')) for k in attrs])
return r'<httpretty.URIInfo(%s)>' % fmt
def __hash__(self):
return hash(text_type(self))
def __eq__(self, other):
self_tuple = (
self.port,
decode_utf8(self.hostname.lower()),
url_fix(decode_utf8(self.path)),
)
other_tuple = (
other.port,
decode_utf8(other.hostname.lower()),
url_fix(decode_utf8(other.path)),
)
return self_tuple == other_tuple
def full_url(self, use_querystring=True):
credentials = ""
if self.password:
credentials = "{0}:{1}@".format(
self.username, self.password)
query = ""
if use_querystring and self.query:
query = "?{0}".format(decode_utf8(self.query))
result = "{scheme}://{credentials}{domain}{path}{query}".format(
scheme=self.scheme,
credentials=credentials,
domain=self.get_full_domain(),
path=decode_utf8(self.path),
query=query
)
return result
def get_full_domain(self):
hostname = decode_utf8(self.hostname)
# Port 80/443 should not be appended to the url
if self.port not in DEFAULT_HTTP_PORTS | DEFAULT_HTTPS_PORTS:
return ":".join([hostname, str(self.port)])
return hostname
@classmethod
def from_uri(cls, uri, entry):
result = urlsplit(uri)
if result.scheme == 'https':
POTENTIAL_HTTPS_PORTS.add(int(result.port or 443))
else:
POTENTIAL_HTTP_PORTS.add(int(result.port or 80))
return cls(result.username,
result.password,
result.hostname,
result.port,
result.path,
result.query,
result.fragment,
result.scheme,
entry)
class URIMatcher(object):
regex = None
info = None
def __init__(self, uri, entries, match_querystring=False):
self._match_querystring = match_querystring
if type(uri).__name__ == 'SRE_Pattern':
self.regex = uri
result = urlsplit(uri.pattern)
if result.scheme == 'https':
POTENTIAL_HTTPS_PORTS.add(int(result.port or 443))
else:
POTENTIAL_HTTP_PORTS.add(int(result.port or 80))
else:
self.info = URIInfo.from_uri(uri, entries)
self.entries = entries
#hash of current_entry pointers, per method.
self.current_entries = {}
def matches(self, info):
if self.info:
return self.info == info
else:
return self.regex.search(info.full_url(
use_querystring=self._match_querystring))
def __str__(self):
wrap = 'URLMatcher({0})'
if self.info:
return wrap.format(text_type(self.info))
else:
return wrap.format(self.regex.pattern)
def get_next_entry(self, method, info, request):
"""Cycle through available responses, but only once.
Any subsequent requests will receive the last response"""
if method not in self.current_entries:
self.current_entries[method] = 0
#restrict selection to entries that match the requested method
entries_for_method = [e for e in self.entries if e.method == method]
if self.current_entries[method] >= len(entries_for_method):
self.current_entries[method] = -1
if not self.entries or not entries_for_method:
raise ValueError('I have no entries for method %s: %s'
% (method, self))
entry = entries_for_method[self.current_entries[method]]
if self.current_entries[method] != -1:
self.current_entries[method] += 1
# Attach more info to the entry
# So the callback can be more clever about what to do
# This does also fix the case where the callback
# would be handed a compiled regex as uri instead of the
# real uri
entry.info = info
entry.request = request
return entry
def __hash__(self):
return hash(text_type(self))
def __eq__(self, other):
return text_type(self) == text_type(other)
class httpretty(HttpBaseClass):
"""The URI registration class"""
_entries = {}
latest_requests = []
last_request = HTTPrettyRequestEmpty()
_is_enabled = False
@classmethod
def match_uriinfo(cls, info):
for matcher, value in cls._entries.items():
if matcher.matches(info):
return (matcher, info)
return (None, [])
@classmethod
@contextlib.contextmanager
def record(cls, filename, indentation=4, encoding='utf-8'):
try:
import urllib3
except ImportError:
raise RuntimeError('HTTPretty requires urllib3 installed for recording actual requests.')
http = urllib3.PoolManager()
cls.enable()
calls = []
def record_request(request, uri, headers):
cls.disable()
response = http.request(request.method, uri)
calls.append({
'request': {
'uri': uri,
'method': request.method,
'headers': dict(request.headers),
'body': decode_utf8(request.body),
'querystring': request.querystring
},
'response': {
'status': response.status,
'body': decode_utf8(response.data),
'headers': dict(response.headers)
}
})
cls.enable()
return response.status, response.headers, response.data
for method in cls.METHODS:
cls.register_uri(method, re.compile(r'.*', re.M), body=record_request)
yield
cls.disable()
with codecs.open(filename, 'w', encoding) as f:
f.write(json.dumps(calls, indent=indentation))
@classmethod
@contextlib.contextmanager
def playback(cls, origin):
cls.enable()
data = json.loads(open(origin).read())
for item in data:
uri = item['request']['uri']
method = item['request']['method']
cls.register_uri(method, uri, body=item['response']['body'], forcing_headers=item['response']['headers'])
yield
cls.disable()
@classmethod
def reset(cls):
POTENTIAL_HTTP_PORTS.intersection_update(DEFAULT_HTTP_PORTS)
POTENTIAL_HTTPS_PORTS.intersection_update(DEFAULT_HTTPS_PORTS)
cls._entries.clear()
cls.latest_requests = []
cls.last_request = HTTPrettyRequestEmpty()
@classmethod
def historify_request(cls, headers, body='', append=True):
request = HTTPrettyRequest(headers, body)
cls.last_request = request
if append or not cls.latest_requests:
cls.latest_requests.append(request)
else:
cls.latest_requests[-1] = request
return request
@classmethod
def register_uri(cls, method, uri, body='HTTPretty :)',
adding_headers=None,
forcing_headers=None,
status=200,
responses=None, match_querystring=False,
**headers):
uri_is_string = isinstance(uri, basestring)
if uri_is_string and re.search(r'^\w+://[^/]+[.]\w{2,}$', uri):
uri += '/'
if isinstance(responses, list) and len(responses) > 0:
for response in responses:
response.uri = uri
response.method = method
entries_for_this_uri = responses
else:
headers[str('body')] = body
headers[str('adding_headers')] = adding_headers
headers[str('forcing_headers')] = forcing_headers
headers[str('status')] = status
entries_for_this_uri = [
cls.Response(method=method, uri=uri, **headers),
]
matcher = URIMatcher(uri, entries_for_this_uri,
match_querystring)
if matcher in cls._entries:
matcher.entries.extend(cls._entries[matcher])
del cls._entries[matcher]
cls._entries[matcher] = entries_for_this_uri
def __str__(self):
return '<HTTPretty with %d URI entries>' % len(self._entries)
@classmethod
def Response(cls, body, method=None, uri=None, adding_headers=None, forcing_headers=None,
status=200, streaming=False, **headers):
headers[str('body')] = body
headers[str('adding_headers')] = adding_headers
headers[str('forcing_headers')] = forcing_headers
headers[str('status')] = int(status)
headers[str('streaming')] = streaming
return Entry(method, uri, **headers)
@classmethod
def disable(cls):
cls._is_enabled = False
socket.socket = old_socket
socket.SocketType = old_socket
socket._socketobject = old_socket
socket.create_connection = old_create_connection
socket.gethostname = old_gethostname
socket.gethostbyname = old_gethostbyname
socket.getaddrinfo = old_getaddrinfo
socket.__dict__['socket'] = old_socket
socket.__dict__['_socketobject'] = old_socket
socket.__dict__['SocketType'] = old_socket
socket.__dict__['create_connection'] = old_create_connection
socket.__dict__['gethostname'] = old_gethostname
socket.__dict__['gethostbyname'] = old_gethostbyname
socket.__dict__['getaddrinfo'] = old_getaddrinfo
if socks:
socks.socksocket = old_socksocket
socks.__dict__['socksocket'] = old_socksocket
if ssl:
ssl.wrap_socket = old_ssl_wrap_socket
ssl.SSLSocket = old_sslsocket
ssl.__dict__['wrap_socket'] = old_ssl_wrap_socket
ssl.__dict__['SSLSocket'] = old_sslsocket
if not PY3:
ssl.sslwrap_simple = old_sslwrap_simple
ssl.__dict__['sslwrap_simple'] = old_sslwrap_simple
@classmethod
def is_enabled(cls):
return cls._is_enabled
@classmethod
def enable(cls):
cls._is_enabled = True
socket.socket = fakesock.socket
socket._socketobject = fakesock.socket
socket.SocketType = fakesock.socket
socket.create_connection = create_fake_connection
socket.gethostname = fake_gethostname
socket.gethostbyname = fake_gethostbyname
socket.getaddrinfo = fake_getaddrinfo
socket.__dict__['socket'] = fakesock.socket
socket.__dict__['_socketobject'] = fakesock.socket
socket.__dict__['SocketType'] = fakesock.socket
socket.__dict__['create_connection'] = create_fake_connection
socket.__dict__['gethostname'] = fake_gethostname
socket.__dict__['gethostbyname'] = fake_gethostbyname
socket.__dict__['getaddrinfo'] = fake_getaddrinfo
if socks:
socks.socksocket = fakesock.socket
socks.__dict__['socksocket'] = fakesock.socket
if ssl:
ssl.wrap_socket = fake_wrap_socket
ssl.SSLSocket = FakeSSLSocket
ssl.__dict__['wrap_socket'] = fake_wrap_socket
ssl.__dict__['SSLSocket'] = FakeSSLSocket
if not PY3:
ssl.sslwrap_simple = fake_wrap_socket
ssl.__dict__['sslwrap_simple'] = fake_wrap_socket
def httprettified(test):
"A decorator tests that use HTTPretty"
def decorate_class(klass):
for attr in dir(klass):
if not attr.startswith('test_'):
continue
attr_value = getattr(klass, attr)
if not hasattr(attr_value, "__call__"):
continue
setattr(klass, attr, decorate_callable(attr_value))
return klass
def decorate_callable(test):
@functools.wraps(test)
def wrapper(*args, **kw):
httpretty.reset()
httpretty.enable()
try:
return test(*args, **kw)
finally:
httpretty.disable()
return wrapper
if isinstance(test, ClassTypes):
return decorate_class(test)
return decorate_callable(test)
|
apache-2.0
| -2,241,472,824,125,834,000
| 31.072394
| 117
| 0.540116
| false
| 4.322861
| false
| false
| false
|
bobbyluig/Eclipse
|
src/agility/main.py
|
1
|
45601
|
from agility.maestro import Maestro
from agility.pololu.enumeration import uscSerialMode, ChannelMode, HomeMode
from agility.pololu.usc import Usc
from threading import Event
from shared.debug import Dummy
import numpy as np
import math
from matplotlib.path import Path
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import time
import logging
import sys
logger = logging.getLogger('universe')
class ServoError(Exception):
pass
class Stepper:
def __init__(self, c1, c2, steps, direction=1):
self.c1 = c1 # Direction channel.
self.c2 = c2 # Step channel.
self.steps = steps
self.direction = direction
self.step = 1
self.target = 1
def get_position(self):
"""
Get the stepper's current position in degrees.
:return: Output degrees.
"""
return self.steps_to_deg(self.step)
def deg_to_steps(self, deg):
"""
Converts a normalized degree to the nearest integer step.
:param deg: The input degrees.
:return: The corresponding steps.
"""
steps = int(round(deg * (self.steps / 360))) * self.direction
if steps == 0:
return self.steps
else:
return steps
def steps_to_deg(self, steps):
"""
Converts steps to a degree.
:param steps: The number of steps.
:return: The corresponding angle.
"""
return steps * (360 / self.steps) * self.direction
def step_one(self, direction):
"""
Increment step counter.
:param direction: 1 steps up, -1 steps down.
"""
n = self.step + direction
if n > self.steps or n < 1:
self.step = 1
else:
self.step = n
def set_target(self, deg):
"""
Target a degree. Servo will attempt nearest path to target.
:param deg: The input degrees.
:return: The number of steps, either positive or negative.
"""
# Normalize.
deg -= 360 * (deg // 360)
steps = self.deg_to_steps(deg)
# Compute closest direction.
target = steps - self.step
delta = (self.steps / 2 - target) % self.steps - (self.steps / 2)
# Return.
return delta
class Servo:
def __init__(self, channel, min_deg, max_deg, min_pwm, max_pwm, max_vel,
bias=0, direction=1, left_bound=None, right_bound=None):
self.channel = channel # 0 to 17
self.min_deg = min_deg # -360 to 360 as (degrees)
self.max_deg = max_deg # -360 to 360 as (degrees)
self.min_pwm = min_pwm * 4 # 0 to 4000 as (us)
self.max_pwm = max_pwm * 4 # 0 to 4000 as (us)
self.max_vel = max_vel # 0 to 1000, as (ms / 60deg)
# Bias should be adjusted such that the servo is at kinematic "0" degree when it's target is 0 degrees.
# This is used to compensate for ridge spacing and inaccuracies during installation.
# Think of this like the "home" value of the servo.
self.bias = bias
if left_bound is None:
# Left bound (if not min_deg), with bias.
self.left_bound = self.min_deg
else:
self.left_bound = left_bound
if right_bound is None:
# Left bound (if not max_deg), with bias.
self.right_bound = self.max_deg
else:
self.right_bound = right_bound
assert(self.left_bound >= self.min_deg)
assert(self.right_bound <= self.max_deg)
# If the front of the servo is pointing in a negative axis, set this to negative 1.
# This reverses the directionality of all angle inputs.
self.direction = direction
# Dynamic current data.
self.pwm = 0
self.vel = 0
self.accel = 0
# User defined target. Also used to store last target.
# In units of 0.25 us.
self.target = 0
# Compute constants.
self.k_deg2mae = (self.max_pwm - self.min_pwm) / (self.max_deg - self.min_deg)
self.k_mae2deg = (self.max_deg - self.min_deg) / (self.max_pwm - self.min_pwm)
self.k_vel2mae = (60 * self.k_deg2mae) / self.max_vel * 10
self.k_mae2vel = self.max_vel / ((60 * self.k_deg2mae) * 10)
def zero(self):
"""
Set the servo to zero, ignoring bias.
"""
self.target = self.deg_to_maestro(0)
def get_range(self):
"""
Get the maximum and minimum, removing bias.
:return: (min, max)
"""
low = self.left_bound - self.bias
high = self.right_bound - self.bias
return low, high
def set_target(self, deg):
"""
Set the target for the servo.
:param deg: The input degrees.
"""
deg = self.normalize(deg)
self.target = self.deg_to_maestro(deg)
def normalize(self, deg):
"""
Normalize a degree for the servo, taking into account direction and bias.
:param deg: Input degrees.
:return: Output degrees.
"""
# Account for direction and bias.
deg = deg * self.direction + self.bias
# Normalize.
if deg > self.right_bound:
deg -= 360
elif deg < self.left_bound:
deg += 360
if deg > self.right_bound or deg < self.left_bound:
raise ServoError('Target out of range!')
return deg
def get_position(self):
"""
Get the servo's current position in degrees.
:return: Output degrees.
"""
deg = self.maestro_to_deg(self.pwm)
deg = (deg - self.bias) * self.direction
return deg
def at_target(self):
"""
Checks if the servo is at its target.
:return: True if servo is at its target, else False.
"""
return self.target == self.pwm
def passed_target(self, deg, greater):
"""
Checks if a servo has passed its target.
:param deg: The desired degrees to check.
:param greater: True to check >=, else <=.
:return: True if test is true, else False.
"""
deg = self.normalize(deg)
# Due to clockwise being defined as negative by Finesse, PWM checks should be inverted.
# This is due to the fact that higher PWM in servos is clockwise.
if greater:
return self.deg_to_maestro(deg) <= self.pwm
else:
return self.deg_to_maestro(deg) >= self.pwm
def deg_to_maestro(self, deg):
"""
Converts degrees to 0.25 us.
:param deg: The input degrees.
:return: The PWM in units of 0.25 us.
"""
return round(self.min_pwm + self.k_deg2mae * (deg - self.min_deg))
# Convert 0.25 us to degrees.
def maestro_to_deg(self, pwm):
"""
Converts 0.25 us to degrees.
:param pwm: The input PWM in units of 0.25 us.
:return: Degrees.
"""
return self.min_deg + self.k_mae2deg * (pwm - self.min_pwm)
class Body:
def __init__(self, length, width, cx, cy, mb, ml):
"""
Create a body object.
Note that dimensions are between kinematic roots.
:param length: Length of body (along x-axis).
:param width: Width of body (along y-axis).
:param cx: Bias of center of mass along x.
:param cy: Bias of center of mass along y.
:param mb: Mass of body.
:param ml: Mass of leg.
"""
# Define constants.
self.length = length
self.width = width
self.cx = cx
self.cy = cy
self.mb = mb
self.ml = ml
self.com = np.array((cx, cy, 0))
# Define quick access array.
self.j = np.array((
(2, 1),
(0, 3),
(3, 0),
(1, 2)
))
# Define static vertices.
x = 0.5 * self.length
y = 0.5 * self.width
self.vertices = np.array((
(x, y, 0),
(x, -y, 0),
(-x, y, 0),
(-x, -y, 0)
))
def default_bias(self, next_frame):
"""
Zeros vertices and bias.
:return: Bias.
"""
# Relative to absolute.
original = next_frame + self.vertices
# Get com.
cx, cy = self.get_com(original)
return np.array((-cx, -cy, 0))
@staticmethod
def rotation_matrix(axis, theta):
"""
Return the rotation matrix associated with counterclockwise rotation about the given axis by theta radians.
http://stackoverflow.com/questions/6802577/python-rotation-of-3d-vector (by unutbu).
:param axis: A numpy vector.
:param theta: A float.
:return: The quaternion.
"""
axis /= math.sqrt(np.dot(axis, axis))
a = math.cos(theta / 2.0)
b, c, d = -axis * math.sin(theta / 2.0)
aa, bb, cc, dd = a * a, b * b, c * c, d * d
bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d
return np.array(((aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)),
(2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)),
(2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc)))
def tilt_body(self, vertices, air, theta, lock=True):
"""
Tilt the body to give additional stability.
:param vertices: Vertices of the translated rectangle (4 x 3).
:param air: The index of the leg lifted in the air.
:param theta: Degrees to rotate in radians.
:param lock: Whether or not to lock z-value (usually 0) of the lifted leg.
:return: The tilted vertices.
"""
# Compute rotation axis.
legs = self.j[air]
r0, r1 = vertices[legs]
axis = r1 - r0
# Rotate about axis.
q = self.rotation_matrix(axis, theta)
r = np.dot(vertices, q.T)
if lock:
# Lock the lifted leg back to original position.
delta = vertices[air] - r[air]
vertices = r + delta
else:
# No need to lock. Vertices is simply r.
vertices = r
return vertices
@staticmethod
def closest(x1, x2, y1, y2, x, y):
"""
Compute the point along the two supporting legs that is closest to the center of mass.
This shall be known as "Alastair's magic."
"""
m = (y2 - y1) / (x2 - x1)
b1 = y1 - m * x1
b3 = y + (x / m)
x0 = (b3 - b1) / (m + 1 / m)
y0 = m * x0 + b1
return x0, y0
def get_com(self, frame):
"""
Compute the center of mass given the leg positions.
:param frame: The leg positions.
:return: com -> [cx, cy].
"""
com = self.ml * np.sum(frame[:, :2], axis=0) / (self.ml + self.mb)
com += self.com[:2]
return com
def adjust_crawl(self, off, next_frame, sigma=1.5):
"""
Adjust the center of mass for the crawl gait.
:param off: An array defining which legs are in the air.
:param next_frame: An array representing the next frame (4 x 3).
:param sigma: Safety boundary.
"""
# Get the leg in the air.
air = np.where(off)[0]
air = int(air)
legs = self.j[air]
# Relative to absolute.
original = next_frame + self.vertices
# Get points.
p = original[legs]
x1, y1, z1 = p[0]
x2, y2, z2 = p[1]
# Compute center of mass as with leg positions.
cx, cy = self.get_com(original)
# Get shortest path from zero-moment point to support triangle (perpendicular).
x0, y0 = self.closest(x1, x2, y1, y2, cx, cy)
# Compute additional safety margin.
theta = math.atan2((y2 - y1), (x2 - x1))
rx = sigma * math.sin(theta) + x0
ry = -sigma * math.cos(theta) + y0
rz = 0
rho = np.array((rx, ry, rz))
# Adjust vertices.
# new = original + rho
# Perform tilt.
# new = self.tilt_body(new, air, 0.0)
# Compute bias.
# bias = new - original
return rho
def adjust_trot(self, off, next_frame):
"""
Adjust the center of mass for the crawl gait.
:param off: An array defining which legs are in the air.
:param next_frame: An array representing the next frame (4 x 3).
"""
# Get the leg on the ground.
legs = np.where(~off)[0]
# Relative to absolute.
original = next_frame + self.vertices
# Get points.
p = original[legs]
x1, y1, z1 = p[0]
x2, y2, z2 = p[1]
# Compute center of mass as with leg positions.
cx, cy = self.get_com(original)
# Get closest point from center of mass to support.
x0, y0 = self.closest(x1, x2, y1, y2, cx, cy)
# Compute bias.
rx = x0 - cx
ry = y0 - cy
rz = 0
rho = np.array((rx, ry, rz))
return rho
def adjust(self, off, next_frame, count=None):
"""
Adjust the center of mass.
:param off: An array indicating whether the leg is in the air.
:param next_frame: The next frame.
:param count: The number of legs in the air.
:return: The bias.
"""
# Check which (if any) optimization is needed.
if count is None:
count = np.count_nonzero(off)
if count == 1:
# Crawl gait.
return self.adjust_crawl(off, next_frame)
elif count == 2 and off[1] == off[2]:
# Trot gait.
return self.adjust_trot(off, next_frame)
else:
return self.default_bias(next_frame)
def translate(self, x, y, z):
"""
Translate the body and thus the center of mass.
:param x: Motion along x.
:param y: Motion along y.
:param z: Motion along z.
:return: Bias.
"""
t = np.array((x, y, z), dtype=float)
bias = np.array((self.cx, self.cy, 0), dtype=float) + t
return bias
def is_supported(self, vertices):
"""
Checks if a given support triangle contains the center of mass.
This assumes the robot is not on a slant or hill.
:param vertices: The transformed vertices as a 3 x 2 numpy matrix.
:return: True if center of mass is in triangle, else False.
"""
triangle = Path(vertices)
return triangle.contains_point(self.com[:2])
class Leg:
def __init__(self, servo1, servo2, servo3, lengths, index, ik, fk):
"""
Create a leg object.
:param servo1: The first hip servo object.
:param servo2: The second hip servo object.
:param servo3: The knee servo object.
:param lengths: The leg segment lengths l1 and l2.
:param index: The leg index (1 - 4).
:param ik: Inverse kinematics solver.
:param fk: Forward kinematics solver.
"""
self.servos = [servo1, servo2, servo3]
self.lengths = lengths
self.length = sum(lengths)
self.index = index
self.ik_solver = ik
self.fk_solver = fk
self.position = None
def target_point(self, point):
"""
Target a point in space.
:param point: (x, y, z).
:return: True if target is reachable, else False.
"""
try:
angles = self.ik_solver(self.lengths, point)
self.servos[0].set_target(angles[0])
self.servos[1].set_target(angles[1])
self.servos[2].set_target(angles[2])
self.position = point
except (ServoError, ValueError, ZeroDivisionError):
logger.error('Leg {} is unable to reach point ({:.2f}, {:.2f}, {:.2f})'.format(self.index, *point))
return False
return True
def target_angle(self, angle):
"""
Target an angle configuration.
:param angle: (theta1, theta2, theta3).
:return: True if target is reachable, else False.
"""
try:
self.servos[0].set_target(angle[0])
self.servos[1].set_target(angle[1])
self.servos[2].set_target(angle[2])
self.position = self.fk_solver(self.lengths, angle)
except ServoError:
logger.error('Leg {} is unable to reach angle ({:.2f}, {:.2f}, {:.2f})'.format(self.index, *angle))
return False
return True
def get_angles(self, point):
"""
Convert a point to angles. Will throw exceptions.
:param point: (x, y, z).
:return: The angles.
"""
return self.ik_solver(self.lengths, point)
def update_position(self):
"""
Update current leg position based on servo data.
"""
a = math.radians(self.servos[0].get_position())
b = math.radians(self.servos[1].get_position())
c = math.radians(self.servos[2].get_position())
self.position = self.fk_solver(self.lengths, (a, b, c))
def get_position(self):
"""
Get the position of the leg. Update if necessary.
:return: Position (x, y, z).
"""
if self.position is None:
self.update_position()
return self.position
def __getitem__(self, key):
return self.servos[key]
def __add__(self, other):
return self.servos + other.servos
def __radd__(self, other):
return other + self.servos
def __len__(self):
return len(self.servos)
class Head:
def __init__(self, servo1, servo2, camera):
"""
Create a head object.
:param servo1: Servo object controlling left and right head turns.
:param servo2: Servo object controlling up and down head turns.
:param camera: A camera object for configuration.
"""
self.servos = [servo1, servo2]
self.camera = camera
self.angles = [0, 0]
self.target = [0, 0]
def at_bound(self):
"""
Check if the head is at the left or right bound.
:return: 1 -> left bound, -1 -> right bound, 0 -> not at bound.
"""
servo = self.servos[0]
low, high = servo.get_range()
position = servo.get_position()
# Within one 0.2 degrees is "there".
if abs(position - high) < 0.2:
return 1
elif abs(position - low) < 0.2:
return -1
else:
return 0
def __getitem__(self, item):
return self.servos[item]
def __len__(self):
return len(self.servos)
class Robot:
def __init__(self, leg1, leg2, leg3, leg4, body, head, bias=0):
"""
Define a robot.
:param leg1: Leg object.
:param leg2: Leg object.
:param leg3: Leg object.
:param leg4: Leg object.
:param body: Body object.
:param head: Head object.
:param bias: Rotational bias for body.
"""
# Define legs.
self.legs = [leg1, leg2, leg3, leg4]
self.leg_servos = [servo for leg in self.legs for servo in leg]
# Define head.
self.head = head
self.head_servos = [servo for servo in head]
# Define body.
self.body = body
class Agility:
def __init__(self, robot):
# Set up robot.
self.robot = robot
# Set error.
self.epsilon = 1e-6
# Set up Usc.
try:
self.usc = Usc()
logger.info("Successfully attached to Maestro's low-level interface.")
except ConnectionError:
self.usc = Dummy()
logger.warn("Failed to attached to Maestro's low-level interface. "
"If not debugging, consider this a fatal error.")
# Set up virtual COM and TTL ports.
try:
self.maestro = Maestro()
logger.info("Successfully attached to Maestro's command port.")
except ConnectionError:
self.maestro = Dummy()
logger.warn("Failed to attached to Maestro's command port. "
"If not debugging, consider this a fatal error.")
# Emergency stop.
self.emergency = Event()
# Zero.
self.zero()
def stop(self):
"""
Emergency stop. Stop all wait functions.
"""
self.emergency.set()
def clear(self):
"""
Clear emergency flag.
"""
self.emergency.clear()
def head_rotation(self):
"""
Provides head rotation.
:return: Head rotation in degrees.
"""
servo = self.robot.head[0]
self.maestro.get_position(servo)
return servo.get_position()
def set_head(self, target, t=0):
"""
Move the head to a given position.
Blocks until completion.
:param target: (LR, UD).
:param t: Time in ms. 0 for max speed.
"""
head = self.robot.head
servos = self.robot.head_servos
head[0].set_target(target[0])
head[1].set_target(target[1])
self.maestro.end_together(servos, t, True)
self.wait(servos)
def look_at(self, x, y):
"""
Move the head to look at a given target.
Note that this is an approximation. Best used in a PID loop.
:param x: x-coordinate of target.
:param y: y-coordinate of target.
"""
head = self.robot.head
camera = head.camera
# Define velocity constant.
k = 1.5
# Compute deltas.
dx = (x - 0.5 * camera.width) * -1
dy = (y - 0.5 * camera.height) * -1
dt = dx / camera.width * (camera.fx / 2)
dp = dy / camera.height * (camera.fy / 2)
# Compute suggested velocity. Balance between blur and speed.
vt = int(round(abs(dt * k)))
vp = int(round(abs(dt * k)))
# Construct array.
data = [dt, vt, dp, vp]
# Perform motion.
self.move_head(data)
# Update target.
head.target = [x, y]
return data
def scan(self, t, direction=None, block=False):
"""
Scans head in a direction. If no direction is given, scans toward bound of last known location.
If at minimum of maximum bounds, automatically selects opposite direction.
Blocks until completely scanned towards one direction.
:param t: Time in milliseconds.
:param direction: A direction, either None, 1, or -1.
:param block: Whether to wait until completion.
"""
# Obtain definitions.
head = self.robot.head
camera = head.camera
servo = head.servos[0]
# Get bounds.
low, high = servo.get_range()
# Update servo.
self.maestro.get_position(servo)
# Check bound.
bound = head.at_bound()
# Create direction.
if bound != 0:
direction = bound * -1
if direction is None:
if head.target[0] < 0.5 * camera.width:
direction = 1
else:
direction = -1
# Execute.
if direction == 1:
servo.set_target(high)
else:
servo.set_target(low)
self.maestro.end_in(servo, t)
if block:
self.wait(servo)
def center_head(self, t=0):
"""
Returns head to original position.
:param t: The time in ms.
"""
# Obtain definitions.
head = self.robot.head
servos = head.servos
# Target zero.
for servo in servos:
servo.set_target(0)
# Reset to zero.
head.angles = [0, 0]
# Execute.
self.maestro.end_together(servos, t, True)
self.wait(servos)
def move_head(self, data):
"""
Move head based on data parameters. Does not wait for completion.
:param data: An array given by look_at.
"""
# Obtain definitions.
head = self.robot.head
servos = head.servos
# Update positions.
self.maestro.get_multiple_positions(servos)
for i in range(2):
servo = head[i]
current = servo.get_position()
# Get data.
delta = data[i * 2]
velocity = data[i * 2 + 1]
if velocity == 0:
# Already at target. Do nothing.
servo.target = servo.pwm
target = current
else:
# Ensure that head is within bounds.
low, high = servo.get_range()
target = current + delta
if target < low:
target = low
elif target > high:
target = high
servo.set_target(target)
# Update.
head.angles[i] = target
# Set speed.
self.maestro.set_speed(servo, velocity)
# Execute.
self.maestro.set_target(servo)
@staticmethod
def plot_gait(frames):
"""
Plot a gait given some frames. Used for debugging.
:param frames: Frames generated by execute.
"""
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_xlabel('X Axis')
ax.set_ylabel('Y Axis')
ax.set_zlabel('Z Axis')
x = frames[:, 0, 0]
y = frames[:, 0, 1]
z = frames[:, 0, 2]
ax.plot(x, y, z, marker='o')
plt.show()
def execute_forever(self, frames, dt):
"""
Like execute_frames(), except it runs forever.
:param frames: An array of frames.
:param dt: Delta t.
:return:
"""
# Get all legs and servos for quick access.
legs = self.robot.legs
servos = self.robot.leg_servos
# Update initial leg locations.
self.maestro.get_multiple_positions(servos)
for leg in legs:
leg.get_position()
while True:
for frame in frames:
for i in range(4):
legs[i].target_point(frame[i])
self.maestro.end_together(servos, dt)
self.wait(servos)
def execute_frames(self, frames, dt):
"""
Execute some frames with a constant dt.
:param frames: An array of frames.
:param dt: Delta t.
"""
# Get all legs and servos for quick access.
legs = self.robot.legs
servos = self.robot.leg_servos
# Update initial leg locations.
self.maestro.get_multiple_positions(servos)
for frame in frames:
for i in range(4):
legs[i].target_point(frame[i])
self.maestro.end_together(servos, dt)
self.wait(servos)
def execute_long(self, prev_frame, frames, dt):
"""
Execute frames with constant but possibly long dt.
Automatically computes distance, and, if necessary, interpolates to get more accurate synchronization.
:param prev_frame: The previous frame.
:param frames: An array of frames.
:param dt: Delta t.
"""
# Get all legs and servos for quick access.
legs = self.robot.legs
servos = self.robot.leg_servos
# Define break constant (ms / cm).
k = 100
# Update initial leg locations.
self.maestro.get_multiple_positions(servos)
for frame in frames:
# Compute max distance.
d = max(np.linalg.norm(frame - prev_frame, axis=1))
# Less than break. Too long. Linearly interpolate.
if dt / d > k:
n = int(round(dt / d / k)) + 1
l_frames = self.smooth(prev_frame, frame, n)
l_frames = l_frames[1:]
# Compute time.
t = dt / n
# Execute intermediate frames.
for l_frame in l_frames:
for i in range(4):
legs[i].target_point(l_frame[i])
self.maestro.end_together(servos, t)
self.wait(servos)
else:
t = dt
for i in range(4):
legs[i].target_point(frame[i])
self.maestro.end_together(servos, t)
self.wait(servos)
prev_frame = frame
def execute_variable(self, frames, dts):
"""
Execute some frames with different dt.
:param frames: An array of frames.
:param dts: An array of dt.
"""
# Get all legs and servos for quick access.
legs = self.robot.legs
servos = self.robot.leg_servos
# Update initial leg locations.
self.maestro.get_multiple_positions(servos)
# Assertion check.
assert len(frames) == len(dts)
for t in range(len(frames)):
for i in range(4):
legs[i].target_point(frames[t][i])
self.maestro.end_together(servos, dts[t])
self.wait(servos)
def execute_angles(self, angles, dt):
"""
Like execute_frames(), but uses angles instead.
:param angles: An array of angles.
:param dt: Delta t.
"""
# Get all legs and servos for quick access.
legs = self.robot.legs
servos = self.robot.leg_servos
# Update initial leg locations.
self.maestro.get_multiple_positions(servos)
for angle in angles:
for i in range(4):
legs[i].target_angle(angle)
self.maestro.end_together(servos, dt)
self.wait(servos)
def anglify(self, frames):
"""
Converts frames generated by self.prepare to angles.
:param frames: The input frames.
:return: The output angles ready for execution.
"""
# Get all legs and servos for quick access.
legs = self.robot.legs
# Allocate memory.
angles = np.empty(frames.shape)
for i in range(len(frames)):
for l in range(4):
a = legs[l].get_angles(frames[i][l])
angles[i][l] = a
return angles
@staticmethod
def smooth(a, b, n):
"""
Create a smooth transition from a to b in n steps.
:param a: The first array.
:param b: The second array.
:param n: The number of steps.
:return: An array from [a, b).
"""
assert(a.shape == b.shape)
assert(n > 1)
# Compute delta.
delta = (b - a) / n
# Allocate n-1 with dimension d+1.
shape = (n, *a.shape)
inter = np.empty(shape)
for i in range(n):
inter[i] = a + i * delta
return inter
def get_pose(self):
"""
Get the relative pose of the robot.
:return: A (4 x 3) matrix representing the current state of the robot.
"""
# Get all legs for quick access.
legs = self.robot.legs
# Iterate through all legs.
pose = []
for leg in legs:
position = leg.get_position()
pose.append(position)
return np.array(pose, dtype=float)
def target_point(self, leg, point, t):
"""
Move a leg to a given point in t time.
Blocks until completion.
:param leg: Leg index.
:param point: (x, y, z).
:param t: Time in milliseconds.
"""
# Assertion check.
assert(0 <= leg <= 3)
# Get legs for quick access.
legs = self.robot.legs
# Target.
leg = legs[leg]
leg.target_point(point)
# Execute.
servos = leg.servos
self.maestro.end_together(servos, t, True)
# Block until completion.
self.wait(servos)
def lift_leg(self, leg, lift, t):
"""
Lift a leg (change pose) in t time.
Blcoks until completion.
:param leg: The leg index.
:param lift: How high to lift leg.
:param t: Time to execute pose change.
"""
# Assertion check.
assert (0 <= leg <= 3)
# Get legs for quick access.
legs = self.robot.legs
# Define ground.
ground = -max([leg.length for leg in legs]) + 1
# Empty pose.
pose = np.zeros((4, 3))
# Leg lift.
pose[:, 2] = ground
pose[leg][2] = ground + lift
# Execute.
self.target_pose(pose, t)
def target_pose(self, target, t, lift=2):
"""
Get the robot from its current pose to a new pose. Block until completion.
The robot will lift legs appropriately to eliminate dragging.
Automatically adjusts the center of mass during transition and target if necessary.
:param target: The target pose.
:param t: The total time for the adjustment.
:param lift: How much to lift each leg.
:return: (frames, dt) ready for execution.
"""
# Get body for quick access.
body = self.robot.body
# Create data array.
frames = []
# Get pose. Assume updated.
pose = self.get_pose()
# Early exit.
if np.array_equal(pose, target):
return
# Get ground, which is the lowest point.
curr_g = np.min(pose[:, 2])
next_g = np.min(target[:, 2])
# Generate leg state arrays.
pose_state = np.greater(pose[:, 2], (curr_g + self.epsilon)) # Defines which legs are in the air.
target_state = np.greater(target[:, 2], (next_g + self.epsilon)) # Defines which legs are in the air.
# Get all legs to (0, 0, curr_g) if they are in the air.
if any(pose_state):
f1 = pose.copy()
for i in range(4):
if pose_state[i]:
f1[i] = (0, 0, curr_g)
frames.append(f1)
# Define optimization procedure.
def up_down(ground):
# For every leg that is not at the right (x, y) and is on the ground in target, lift and down.
for i in range(4):
if not np.array_equal(pose[i][:2], target[i][:2]) and not target_state[i]:
# Get previous frame.
prev = frames[-1]
f4, f5 = prev.copy(), prev.copy()
# Move leg to target (x, y) in air.
x, y = target[i][:2]
f4[i] = (x, y, ground + lift)
# Compute bias and adjust.
s = [False, False, False, False]
s[i] = True
bias = body.adjust(s, f4, 1)
f3 = prev - bias
f4 -= bias
# Move leg down to target. Keep bias.
f5[i] = target[i]
f5 -= bias
# Append data.
frames.extend((f3, f4, f5))
def to_next():
f2 = pose.copy()
f2[:, 2] = next_g
frames.append(f2)
# Different optimization order.
if next_g > curr_g:
# For body high -> low, get legs to next height first.
to_next()
up_down(next_g)
elif curr_g > next_g:
# For body low -> high, get legs to next height last.
up_down(curr_g)
to_next()
# Move to final target if necessary.
if not np.array_equal(frames[-1], target):
if any(target_state):
prev = frames[-1]
bias = body.adjust(target_state, target)
frames.extend((prev - bias, target - bias))
else:
frames.append(target)
# Compute times. Assume equal dt.
dt = t / len(frames)
self.execute_long(pose, frames, dt)
def prepare_frames(self, frames, dt, ground):
"""
Prepare some frames which are non-circular (last frame not linked to first frame).
:param frames: The input frames.
:param dt: dt.
:param ground: Ground.
:param loop: Whether the gait loops.
:return: (frames, dt) ready for execution.
"""
# Define body for quick access.
body = self.robot.body
# Create array for biases.
biases = np.empty(frames.shape)
# Generate leg state arrays.
state1 = np.greater(frames[:, :, 2], (ground + self.epsilon)) # Defines which legs are in the air.
state2 = state1.sum(1) # The number of legs in the air.
# Define.
steps = len(frames)
for t in range(steps - 1):
# Look ahead and pass data to center of mass adjustment algorithms.
next_frame = frames[t]
# Determine which legs are off.
off = state1[t]
count = state2[t]
# Perform center of mass adjustments accordingly.
biases[t] = body.adjust(off, next_frame, count)
# Adjust frames.
frames -= biases
return frames, dt
def prepare_gait(self, gait, debug=False):
"""
Prepare a given gait class.
:param gait: The gait class.
:param debug: Show gait in a graph.
:return: (frames, dt) ready for execution.
"""
# Define body for quick access.
body = self.robot.body
# Get gait properties.
steps = gait.steps
ground = gait.ground
dt = gait.time / steps
ts = np.linspace(0, 1000, num=steps, endpoint=False)
# Get all legs for quick access.
legs = self.robot.legs
# Compute shape.
shape = (steps, 4, 3)
# Evaluate gait.
f = [gait.evaluate(leg, ts) for leg in legs]
frames = np.concatenate(f).reshape(shape, order='F')
# Debugging.
if debug:
self.plot_gait(frames)
# Create array for biases.
biases = np.empty(shape)
# Generate leg state arrays.
state1 = np.greater(biases[:, :, 2], (ground + 1e-6)) # Defines which legs are in the air.
state2 = state1.sum(1) # The number of legs in the air.
# Iterate and perform static analysis.
for t in range(steps):
# Look ahead and pass data to center of mass adjustment algorithms.
next_frame = frames[(t + 1) % steps]
# Determine which legs are off.
off = state1[t]
count = state2[t]
# Perform center of mass adjustments accordingly.
biases[t] = body.adjust(off, next_frame, count)
# Adjust frames.
frames -= biases
return frames, dt
def prepare_smoothly(self, gait):
"""
Prepare a gait by intelligently applying smoothing. Only works for planar COM adjustments.
Plus, who doesn't like smooth things? (I'm really tired right now.)
:param gait: The gait object.
:return: (frames, dt) ready for execution.
"""
# Define body for quick access.
body = self.robot.body
# Get gait properties.
steps = gait.steps
ground = gait.ground
dt = gait.time / steps
ts = np.linspace(0, 1000, num=steps, endpoint=False)
# Get all legs for quick access.
legs = self.robot.legs
# Compute shape.
shape = (steps, 4, 3)
# Evaluate gait.
f = [gait.evaluate(leg, ts) for leg in legs]
frames = np.concatenate(f).reshape(shape, order='F')
# Generate leg state arrays.
state1 = np.greater(frames[:, :, 2], (ground + 1e-6)) # Defines which legs are in the air.
state2 = state1.sum(1) # The number of legs in the air.
# Get indices of legs in air.
air = np.where(state2 != 0)[0]
air = air.tolist()
# Create array for biases.
biases = np.empty(shape)
# Keep track of last air -> ground.
t = air[-1]
if state2[(t + 1) % steps] == 0:
# Last air frame is an air -> ground transition.
last_ag = t
else:
# There will
last_ag = None
# Compute biases for each frame that is not on the ground.
for i in range(len(air)):
# Get the index relative to all frames.
t = air[i]
# Compute bias as usual.
next_frame = frames[(t + 1) % steps]
off = state1[t]
count = state2[t]
biases[t] = body.adjust(off, next_frame, count)
# Checks if the current frame represents a ground -> air transition.
if state2[t - 1] == 0:
curr_bias = biases[t]
prev_bias = biases[last_ag]
# Smooth from [t, last_ag).
if t > last_ag:
n = t - last_ag
inter = self.smooth(prev_bias, curr_bias, n)
biases[last_ag:t] = inter
else:
n = steps - last_ag + t
inter = self.smooth(prev_bias, curr_bias, n)
biases[last_ag:] = inter[:(steps - last_ag)]
biases[:t] = inter[(steps - last_ag):]
# Check if the current frame represents an air -> ground transition.
if state2[(t + 1) % steps] == 0:
last_ag = t
# Adjust frames.
frames -= biases
return frames, dt
def move_body(self, x, y, z, t=0):
"""
Move the body some x, y, and z.
:param x: Move x.
:param y: Move y.
:param z: Move z.
:param t: The time in ms.
"""
legs = self.robot.legs
servos = self.robot.leg_servos
self.maestro.get_multiple_positions(servos)
for leg in legs:
a, b, c = leg.get_position()
a -= x
b -= y
c -= z
leg.target_point((-x, -y, -leg.length - z))
self.maestro.end_together(servos, t)
self.wait(servos)
def configure(self):
"""
Configure the Maestro by writing home positions and other configuration data to the device.
"""
settings = self.usc.getUscSettings()
settings.serialMode = uscSerialMode.SERIAL_MODE_USB_DUAL_PORT
for leg in self.robot.legs:
for servo in leg:
servo.zero()
channel = settings.channelSettings[servo.channel]
channel.mode = ChannelMode.Servo
channel.homeMode = HomeMode.Goto
channel.home = servo.target
channel.minimum = (servo.min_pwm // 64) * 64
channel.maximum = -(-servo.max_pwm // 64) * 64
for servo in self.robot.head:
servo.zero()
channel = settings.channelSettings[servo.channel]
channel.mode = ChannelMode.Servo
channel.homeMode = HomeMode.Goto
channel.home = servo.target
channel.minimum = (servo.min_pwm // 64) * 64
channel.maximum = -(-servo.max_pwm // 64) * 64
self.usc.setUscSettings(settings, False)
self.usc.reinitialize(500)
def go_home(self):
"""
Let the Maestro return all servos to home.
"""
self.maestro.go_home()
def ready(self, z, t=2000):
"""
Ready a gait by lower robot to plane.
:param z: Height of gait.
:param t: Time in milliseconds
"""
# Compute desired pose.
pose = np.zeros((4, 3))
pose[:, 2] = z
# Execute position.
self.target_pose(pose, t)
def zero(self):
"""
Manual return home by resetting all leg servo targets.
"""
# Get all legs and servos for quick access.
legs = self.robot.legs
s1 = self.robot.leg_servos
for leg in legs:
z = -leg.length
leg.target_point((0, 0, z))
# Execute.
self.set_head((0, 0), 1000)
self.maestro.end_together(s1, 1000, True)
# Wait until completion.
self.wait()
def wait(self, servos=None):
"""
Block until all servos have reached their targets.
:param servos: An array of servos. If None, checks if all servos have reached their targets (more efficient).
"""
while not self.is_at_target(servos=servos) and not self.emergency.is_set():
time.sleep(0.001)
def is_at_target(self, servos=None):
"""
Check if servos are at their target.
:param servos: One or more servo objects. If None, checks if all servos have reached their targets (more efficient).
:return: True if all servos are at their targets, False otherwise.
"""
if servos is None:
return not self.maestro.get_moving_state()
elif isinstance(servos, Servo):
self.maestro.get_position(servos)
if servos.at_target():
return True
return False
else:
self.maestro.get_multiple_positions(servos)
if all(servo.at_target() for servo in servos):
return True
return False
|
mit
| -7,332,994,944,111,070,000
| 27.934645
| 124
| 0.52898
| false
| 3.824304
| false
| false
| false
|
torgartor21/solar
|
solar/solar/interfaces/db/redis_graph_db.py
|
1
|
9405
|
import json
import redis
import fakeredis
from .base import BaseGraphDB, Node, Relation
from .redis_db import OrderedHash
class RedisGraphDB(BaseGraphDB):
DB = {
'host': 'localhost',
'port': 6379,
}
REDIS_CLIENT = redis.StrictRedis
def __init__(self):
self._r = self.REDIS_CLIENT(**self.DB)
self.entities = {}
def node_db_to_object(self, node_db):
if isinstance(node_db, Node):
return node_db
return Node(
self,
node_db['name'],
[node_db['collection']],
node_db['properties']
)
def relation_db_to_object(self, relation_db):
if isinstance(relation_db, Relation):
return relation_db
if relation_db['type_'] == BaseGraphDB.RELATION_TYPES.input_to_input.name:
source_collection = BaseGraphDB.COLLECTIONS.input
dest_collection = BaseGraphDB.COLLECTIONS.input
elif relation_db['type_'] == BaseGraphDB.RELATION_TYPES.resource_input.name:
source_collection = BaseGraphDB.COLLECTIONS.resource
dest_collection = BaseGraphDB.COLLECTIONS.input
elif relation_db['type_'] == BaseGraphDB.RELATION_TYPES.resource_event.name:
source_collection = BaseGraphDB.COLLECTIONS.resource
dest_collection = BaseGraphDB.COLLECTIONS.events
source = self.get(relation_db['source'], collection=source_collection)
dest = self.get(relation_db['dest'], collection=dest_collection)
return Relation(
self,
source,
dest,
relation_db['properties']
)
def all(self, collection=BaseGraphDB.DEFAULT_COLLECTION):
"""Return all elements (nodes) of type `collection`."""
key_glob = self._make_collection_key(collection, '*')
for result in self._all(key_glob):
yield result
def all_relations(self, type_=BaseGraphDB.DEFAULT_RELATION):
"""Return all relations of type `type_`."""
key_glob = self._make_relation_key(type_, '*')
for result in self._all(key_glob):
yield result
def _all(self, key_glob):
keys = self._r.keys(key_glob)
with self._r.pipeline() as pipe:
pipe.multi()
values = [self._r.get(key) for key in keys]
pipe.execute()
for value in values:
yield json.loads(value)
def clear(self):
"""Clear the whole DB."""
self._r.flushdb()
def clear_collection(self, collection=BaseGraphDB.DEFAULT_COLLECTION):
"""Clear all elements (nodes) of type `collection`."""
key_glob = self._make_collection_key(collection, '*')
self._r.delete(self._r.keys(key_glob))
def create(self, name, properties={}, collection=BaseGraphDB.DEFAULT_COLLECTION):
"""Create element (node) with given name, properties, of type `collection`."""
if isinstance(collection, self.COLLECTIONS):
collection = collection.name
properties = {
'name': name,
'properties': properties,
'collection': collection,
}
self._r.set(
self._make_collection_key(collection, name),
json.dumps(properties)
)
return properties
def create_relation(self,
source,
dest,
properties={},
type_=BaseGraphDB.DEFAULT_RELATION):
"""
Create relation (connection) of type `type_` from source to dest with
given properties.
"""
return self.create_relation_str(
source.uid, dest.uid, properties, type_=type_)
def create_relation_str(self, source, dest,
properties={}, type_=BaseGraphDB.DEFAULT_RELATION):
if isinstance(type_, self.RELATION_TYPES):
type_ = type_.name
uid = self._make_relation_uid(source, dest)
properties = {
'source': source,
'dest': dest,
'properties': properties,
'type_': type_,
}
self._r.set(
self._make_relation_key(type_, uid),
json.dumps(properties)
)
return properties
def get(self, name, collection=BaseGraphDB.DEFAULT_COLLECTION,
return_empty=False):
"""Fetch element with given name and collection type."""
try:
collection_key = self._make_collection_key(collection, name)
item = self._r.get(collection_key)
if not item and return_empty:
return item
return json.loads(item)
except TypeError:
raise KeyError(collection_key)
def delete(self, name, collection=BaseGraphDB.DEFAULT_COLLECTION):
keys = self._r.keys(self._make_collection_key(collection, name))
if keys:
self._r.delete(*keys)
def get_or_create(self,
name,
properties={},
collection=BaseGraphDB.DEFAULT_COLLECTION):
"""
Fetch or create element (if not exists) with given name, properties of
type `collection`.
"""
try:
return self.get(name, collection=collection)
except KeyError:
return self.create(name, properties=properties, collection=collection)
def _relations_glob(self,
source=None,
dest=None,
type_=BaseGraphDB.DEFAULT_RELATION):
if source is None:
source = '*'
else:
source = source.uid
if dest is None:
dest = '*'
else:
dest = dest.uid
return self._make_relation_key(type_, self._make_relation_uid(source, dest))
def delete_relations(self,
source=None,
dest=None,
type_=BaseGraphDB.DEFAULT_RELATION,
has_properties=None):
"""Delete all relations of type `type_` from source to dest."""
glob = self._relations_glob(source=source, dest=dest, type_=type_)
keys = self._r.keys(glob)
if not keys:
return
if not has_properties:
self._r.delete(*keys)
rels = self.get_relations(
source=source, dest=dest, type_=type_, has_properties=has_properties
)
for r in rels:
self.delete_relations(
source=r.start_node,
dest=r.end_node,
type_=type_
)
def get_relations(self,
source=None,
dest=None,
type_=BaseGraphDB.DEFAULT_RELATION,
has_properties=None):
"""Fetch all relations of type `type_` from source to dest."""
glob = self._relations_glob(source=source, dest=dest, type_=type_)
def check_has_properties(r):
if has_properties:
for k, v in has_properties.items():
if not r['properties'].get(k) == v:
return False
return True
for r in self._all(glob):
# Glob is primitive, we must filter stuff correctly here
if source and r['source'] != source.uid:
continue
if dest and r['dest'] != dest.uid:
continue
if not check_has_properties(r):
continue
yield r
def get_relation(self, source, dest, type_=BaseGraphDB.DEFAULT_RELATION):
"""Fetch relations with given source, dest and type_."""
uid = self._make_relation_key(source.uid, dest.uid)
try:
return json.loads(
self._r.get(self._make_relation_key(type_, uid))
)
except TypeError:
raise KeyError
def get_or_create_relation(self,
source,
dest,
properties=None,
type_=BaseGraphDB.DEFAULT_RELATION):
"""Fetch or create relation with given properties."""
properties = properties or {}
try:
return self.get_relation(source, dest, type_=type_)
except KeyError:
return self.create_relation(source, dest, properties=properties, type_=type_)
def _make_collection_key(self, collection, _id):
if isinstance(collection, self.COLLECTIONS):
collection = collection.name
# NOTE: hiera-redis backend depends on this!
return '{0}:{1}'.format(collection, _id)
def _make_relation_uid(self, source, dest):
"""
There can be only one relation from source to dest, that's why
this function works.
"""
return '{0}-{1}'.format(source, dest)
def _make_relation_key(self, type_, _id):
if isinstance(type_, self.RELATION_TYPES):
type_ = type_.name
# NOTE: hiera-redis backend depends on this!
return '{0}:{1}'.format(type_, _id)
def get_ordered_hash(self, collection):
return OrderedHash(self._r, collection)
class FakeRedisGraphDB(RedisGraphDB):
REDIS_CLIENT = fakeredis.FakeStrictRedis
|
apache-2.0
| -3,255,286,357,683,565,600
| 30.560403
| 89
| 0.547581
| false
| 4.434229
| false
| false
| false
|
jainanisha90/WeVoteServer
|
search/query_test_script.py
|
1
|
8852
|
#!/usr/bin/env python
# Test this by entering the search string "election" on a command line like this:
# /home/wevote/WeVoteServer/search/query_test_script.py election
from elasticsearch import Elasticsearch
import sys
es = Elasticsearch(["172.31.24.246:9200"], timeout = 120, max_retries = 5, retry_on_timeout = True)
if len(sys.argv) < 2:
print "Usage: %s <search term>" % (sys.argv[0])
sys.exit(-1)
search_term = sys.argv[1]
#query = { "query": {"match": { "candidate_name": "Joe"}}}
#query = { "query": {"match": { "candidate_name": "Joe"}}}
#query = { "query": { "multi_match": { "type": "phrase_prefix", "query": search_term, "fields": [ "candidate_name", "candidate_twitter_handle", "twitter_name", "measure_subtitle", "measure_text", "measure_title", "office_name", "first_name", "middle_name", "last_name", "party", "organization_name", "organization_twitter_handle", "twitter_description" ] } }}
query = { "query": { "multi_match": { "type": "phrase_prefix", "query": search_term, "fields": [ "google_civic_election_id", "candidate_name", "candidate_twitter_handle", "election_name", "twitter_name", "measure_subtitle", "measure_text", "measure_title", "office_name", "party", "organization_name", "organization_twitter_handle", "twitter_description" ] } }}
query_with_election_date = { "query": { "multi_match": { "type": "phrase_prefix",
"query": search_term,
"fields": [ "election_name^3", "google_civic_election_id",
"candidate_name",
"candidate_twitter_handle", "election_name",
"twitter_name", "measure_subtitle", "measure_text",
"measure_title", "office_name", "party",
"organization_name", "organization_twitter_handle",
"twitter_description", "state_name"],
"slop": 5}},
"sort": [{"election_day_text": {"order": "desc"}},
{"_score": {"order": "desc"}}]}
query_with_missing_last_election_date = { "query": { "multi_match": { "type": "phrase_prefix",
"query": search_term,
"fields": [ "election_name^3", "google_civic_election_id",
"candidate_name",
"candidate_twitter_handle", "election_name",
"twitter_name", "measure_subtitle", "measure_text",
"measure_title", "office_name", "party",
"organization_name", "organization_twitter_handle",
"twitter_description", "state_name"],
"slop": 5}},
"sort": [{"election_day_text": {"missing": "_last", "order": "desc"}},
{"_score": {"order": "desc"}}]}
query_with_missing_election_date_without_order = { "query": { "multi_match": { "type": "phrase_prefix",
"query": search_term,
"fields": [ "election_name^3", "google_civic_election_id",
"candidate_name",
"candidate_twitter_handle", "election_name",
"twitter_name", "measure_subtitle", "measure_text",
"measure_title", "office_name", "party",
"organization_name", "organization_twitter_handle",
"twitter_description", "state_name"],
"slop": 5}},
"sort": [{"election_day_text": {"missing": "1111-11-11"}},
{"_score": {"order": "desc"}}]}
query_with_election_missing_date_value = { "query": { "multi_match": { "type": "phrase_prefix",
"query": search_term,
"fields": [ "election_name^3", "google_civic_election_id",
"candidate_name",
"candidate_twitter_handle", "election_name",
"twitter_name", "measure_subtitle", "measure_text",
"measure_title", "office_name", "party",
"organization_name", "organization_twitter_handle",
"twitter_description", "state_name"],
"slop": 5}},
"sort": [{"election_day_text": {"missing": "1111-11-11", "order": "desc"}},
{"_score": {"order": "desc"}}]}
# Example of querying ALL indexes
res = es.search(body=query)
res_with_election_date = es.search(body=query_with_election_date)
res_with_missing_last_election_date = es.search(body=query_with_missing_last_election_date)
# res_with_missing_election_date_without_order = es.search(body=query_with_missing_election_date_without_order)
# res_with_election_missing_date_value = es.search(body=query_with_election_missing_date_value)
print "Got %d hits from all index search: " % res['hits']['total']
print "Got %d hits from all index search: " % res_with_election_date['hits']['total']
print "Got %d hits from all index search: " % res_with_missing_last_election_date['hits']['total']
# print "Got %d hits from all index search: " % res_with_missing_election_date_without_order['hits']['total']
# print "Got %d hits from all index search: " % res_with_election_missing_date_value['hits']['total']
for hit in res['hits']['hits']:
print "------------- RESULT --------------"
for field in hit:
print "%s: %s" % (field, hit[field])
print "============================================"
print "============================================"
for hit in res_with_election_date['hits']['hits']:
print "------------- RESULT --------------"
for field in hit:
print "%s: %s" % (field, hit[field])
print "============================================"
print "============================================"
for hit in res_with_missing_last_election_date['hits']['hits']:
print "------------- RESULT --------------"
for field in hit:
print "%s: %s" % (field, hit[field])
print "============================================"
# print "============================================"
# for hit in res_with_missing_election_date_without_order['hits']['hits']:
# print "------------- RESULT --------------"
# for field in hit:
# print "%s: %s" % (field, hit[field])
# print "============================================"
# print "============================================"
# for hit in res_with_election_missing_date_value['hits']['hits']:
# print "------------- RESULT --------------"
# for field in hit:
# print "%s: %s" % (field, hit[field])
# example of querying single index
if (True):
res = es.search(index="elections", body={ "query": {"match": { "google_civic_election_id": "5000"}}})
print "Got %d hits from single index search: " % res['hits']['total']
for hit in res['hits']['hits']:
for field in hit:
print "%s: %s" % (field, hit[field])
|
mit
| -3,072,688,002,611,289,600
| 72.766667
| 361
| 0.405219
| false
| 4.861065
| false
| false
| false
|
bourguet/operator_precedence_parsing
|
modified_operator_precedence.py
|
1
|
9379
|
#! /usr/bin/env python3
import sys
import lexer
from tree import Node, CompositeNode
class SymbolDesc:
def __init__(self, symbol, lprio, rprio, evaluator):
self.symbol = symbol
self.lprio = lprio
self.rprio = rprio
self.evaluator = evaluator
def __repr__(self):
return '<Symbol {} {}/{}>'.format(self.symbol, self.lprio, self.rprio)
def identity_evaluator(args):
if len(args) == 1 and type(args[0]) == SymbolDesc:
return Node(args[0].symbol)
else:
return CompositeNode('ID ERROR', args)
def binary_evaluator(args):
if len(args) != 3 or type(args[0]) == SymbolDesc or type(args[1]) != SymbolDesc or type(args[2]) == SymbolDesc:
return CompositeNode('BINARY ERROR', args)
return CompositeNode(args[1].symbol, [args[0], args[2]])
class Parser:
def __init__(self):
self.presymbols = {}
self.presymbols['$soi$'] = SymbolDesc('$soi$', 0, 0, None)
self.postsymbols = {}
self.postsymbols['$soi$'] = SymbolDesc('$soi$', 0, 0, None)
def register_presymbol(self, oper, lprio, rprio, evaluator=None):
if evaluator is None:
evaluator = unary_evaluator
if type(oper) is str:
self.presymbols[oper] = SymbolDesc(oper, lprio, rprio, evaluator)
else:
for op in oper:
self.presymbols[op] = SymbolDesc(op, lprio, rprio, evaluator)
def register_postsymbol(self, oper, lprio, rprio, evaluator=None):
if evaluator is None:
evaluator = binary_evaluator
if type(oper) is str:
self.postsymbols[oper] = SymbolDesc(oper, lprio, rprio, evaluator)
else:
for op in oper:
self.postsymbols[op] = SymbolDesc(op, lprio, rprio, evaluator)
def advance(self):
try:
self.cur_token = self.lexer.__next__()
except StopIteration:
self.cur_token = None
def reset(self, s):
self.lexer = lexer.tokenize(s)
self.advance()
self.stack = [self.presymbols['$soi$']]
def id_symbol(self, id):
return SymbolDesc(id, 999, 1000, identity_evaluator)
def evaluate_handle(self, args):
for i in args:
if type(i) == SymbolDesc:
return i.evaluator(args)
raise RuntimeError('Internal error: no evaluator found in {}'.format(args))
def evaluate(self):
idx = len(self.stack)-1
if type(self.stack[idx]) != SymbolDesc:
idx -= 1
curprio = self.stack[idx].lprio
while type(self.stack[idx-1]) != SymbolDesc or self.stack[idx-1].rprio == curprio:
idx -= 1
if type(self.stack[idx]) == SymbolDesc:
curprio = self.stack[idx].lprio
args = self.stack[idx:]
self.stack[idx:] = []
self.stack.append(self.evaluate_handle(args))
def tos_symbol(self):
idx = len(self.stack)-1
while type(self.stack[idx]) != SymbolDesc:
idx -= 1
return self.stack[idx]
def cur_sym(self, allow_presymbol):
if self.cur_token is None:
return None
elif self.cur_token.kind == 'ID':
return self.id_symbol(self.cur_token)
elif self.cur_token.kind == 'NUMBER':
return self.id_symbol(self.cur_token)
elif allow_presymbol and self.cur_token.lexem in self.presymbols:
return self.presymbols[self.cur_token.lexem]
elif self.cur_token.lexem in self.postsymbols:
return self.postsymbols[self.cur_token.lexem]
else:
return None
def parse(self, s):
self.reset(s)
while True:
sym = self.cur_sym(type(self.stack[-1]) == SymbolDesc)
if sym is None:
break
while self.tos_symbol().rprio > sym.lprio:
self.evaluate()
sym = self.cur_sym(False)
self.stack.append(sym)
self.advance()
while len(self.stack) > 2 or (len(self.stack) == 2 and type(self.stack[-1]) == SymbolDesc):
self.evaluate()
if len(self.stack) == 1:
res = None
elif len(self.stack) == 2:
res = self.stack[1]
if self.cur_token is not None:
res = CompositeNode('REMAINING INPUT', [res, self.cur_token])
return res
def open_parenthesis_evaluator(args):
if (len(args) == 3
and type(args[0]) == SymbolDesc and args[0].symbol == '('
and type(args[1]) != SymbolDesc
and type(args[2]) == SymbolDesc and args[2].symbol == ')'):
return args[1]
elif (len(args) == 3
and type(args[0]) != SymbolDesc
and type(args[1]) == SymbolDesc and args[1].symbol == '('
and type(args[2]) == SymbolDesc and args[2].symbol == ')'):
return CompositeNode('call', [args[0]])
elif (len(args) == 4
and type(args[0]) != SymbolDesc
and type(args[1]) == SymbolDesc and args[1].symbol == '('
and type(args[2]) != SymbolDesc
and type(args[3]) == SymbolDesc and args[3].symbol == ')'):
if args[2].token == ',':
callargs = args[2].children
else:
callargs = [args[2]]
callargs.insert(0, args[0])
return CompositeNode('call', callargs)
else:
return CompositeNode('( ERROR', args)
def close_parenthesis_evaluator(args):
return CompositeNode(') ERROR', args)
def open_bracket_evaluator(args):
if (len(args) == 4
and type(args[0]) != SymbolDesc
and type(args[1]) == SymbolDesc and args[1].symbol == '['
and type(args[2]) != SymbolDesc
and type(args[3]) == SymbolDesc and args[3].symbol == ']'):
return CompositeNode('get', [args[0], args[2]])
else:
return CompositeNode('[ ERROR', args)
def close_bracket_evaluator(args):
return CompositeNode('] ERROR', args)
def coma_evaluator(args):
return CompositeNode(',', [x for x in args if type(x) != SymbolDesc])
def unary_evaluator(args):
if len(args) != 2:
return CompositeNode('UNARY ERROR', args)
if type(args[0]) == SymbolDesc and type(args[1]) != SymbolDesc:
return CompositeNode(args[0].symbol, [args[1]])
elif type(args[0]) != SymbolDesc and type(args[1]) == SymbolDesc:
return CompositeNode('post'+args[1].symbol, [args[0]])
else:
return CompositeNode('UNARY ERROR', args)
def unary_or_binary_evaluator(args):
if (len(args) == 2
and type(args[0]) == SymbolDesc
and type(args[1]) != SymbolDesc):
return CompositeNode(args[0].symbol, [args[1]])
elif (len(args) == 2
and type(args[0]) != SymbolDesc
and type(args[1]) == SymbolDesc):
return CompositeNode('post'+args[1].symbol, [args[0]])
elif (len(args) == 3
and type(args[0]) != SymbolDesc
and type(args[1]) == SymbolDesc
and type(args[2]) != SymbolDesc):
return CompositeNode(args[1].symbol, [args[0], args[2]])
else:
return CompositeNode('1,2-ARY ERROR', args)
def question_evaluator(args):
if (len(args) != 5
or type(args[0]) == SymbolDesc
or type(args[1]) != SymbolDesc or args[1].symbol != '?'
or type(args[2]) == SymbolDesc
or type(args[3]) != SymbolDesc or args[3].symbol != ':'
or type(args[4]) == SymbolDesc):
return CompositeNode('? ERROR', args)
return CompositeNode('?', [args[0], args[2], args[4]])
def colon_evaluator(args):
return CompositeNode(': ERROR', args)
def cexp_parser():
parser = Parser()
parser.register_postsymbol(',', 2, 2, coma_evaluator)
parser.register_postsymbol(['=', '*=', '/=', '%=', '+=', '-=', '<<=', '>>=', '&=', '|=', '^='], 5, 4)
parser.register_postsymbol('?', 7, 1, question_evaluator)
parser.register_postsymbol(':', 1, 6, colon_evaluator)
parser.register_postsymbol('||', 8, 9)
parser.register_postsymbol('&&', 10, 11)
parser.register_postsymbol('|', 12, 13)
parser.register_postsymbol('^', 14, 15)
parser.register_postsymbol('&', 16, 17)
parser.register_postsymbol(['==', '!='], 18, 19)
parser.register_postsymbol(['<', '>', '<=', '>='], 20, 21)
parser.register_postsymbol(['<<', '>>'], 22, 23)
parser.register_postsymbol(['+', '-'], 24, 25)
parser.register_postsymbol(['/', '%', '*'], 26, 27)
parser.register_postsymbol('**', 29, 28)
parser.register_presymbol(['+', '-', '++', '--', '~', '!', '&', '*'], 31, 30, unary_evaluator)
parser.register_postsymbol(['++', '--'], 32, 33, unary_evaluator)
parser.register_postsymbol(['.', '->'], 32, 33)
parser.register_postsymbol('(', 100, 1, open_parenthesis_evaluator)
parser.register_postsymbol(')', 1, 100, close_parenthesis_evaluator)
parser.register_postsymbol('[', 100, 1, open_bracket_evaluator)
parser.register_postsymbol(']', 1, 100, close_bracket_evaluator)
return parser
def main(args):
parser = cexp_parser()
for s in args[1:]:
try:
exp = parser.parse(s)
print('{} -> {}'.format(s, exp))
except RuntimeError as run_error:
print('Unable to parse {}: {}'.format(s, run_error))
if __name__ == "__main__":
main(sys.argv)
|
bsd-2-clause
| 3,458,328,838,560,375,000
| 34.661597
| 115
| 0.565625
| false
| 3.560744
| false
| false
| false
|
rwl/PyCIM
|
CIM14/ENTSOE/Dynamics/IEC61970/Meas/MeasMeasurement.py
|
1
|
2367
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.ENTSOE.Dynamics.IEC61970.Core.CoreIdentifiedObject import CoreIdentifiedObject
class MeasMeasurement(CoreIdentifiedObject):
def __init__(self, PowerSystemResource=None, *args, **kw_args):
"""Initialises a new 'MeasMeasurement' instance.
@param PowerSystemResource:
"""
self._PowerSystemResource = None
self.PowerSystemResource = PowerSystemResource
super(MeasMeasurement, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["PowerSystemResource"]
_many_refs = []
def getPowerSystemResource(self):
"""
"""
return self._PowerSystemResource
def setPowerSystemResource(self, value):
if self._PowerSystemResource is not None:
filtered = [x for x in self.PowerSystemResource.Measurements if x != self]
self._PowerSystemResource._Measurements = filtered
self._PowerSystemResource = value
if self._PowerSystemResource is not None:
if self not in self._PowerSystemResource._Measurements:
self._PowerSystemResource._Measurements.append(self)
PowerSystemResource = property(getPowerSystemResource, setPowerSystemResource)
|
mit
| 2,298,372,686,646,743,600
| 39.810345
| 89
| 0.718209
| false
| 4.47448
| false
| false
| false
|
wbonnet/sbit
|
sbit/cli_command.py
|
1
|
2965
|
#
# The contents of this file are subject to the Apache 2.0 license you may not
# use this file except in compliance with the License.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
#
# Copyright 2017 DFT project (http://www.debianfirmwaretoolkit.org).
# All rights reserved. Use is subject to license terms.
#
#
# Contributors list :
#
# William Bonnet wllmbnnt@gmail.com, wbonnet@theitmakers.com
#
#
""" This module implements The base class and functionnalities used by all the
cli targets.
"""
import subprocess
from sbit.model import Key
# -----------------------------------------------------------------------------
#
# Class CliCommand
#
# -----------------------------------------------------------------------------
class CliCommand(object):
"""This class implements the base class used for all command from cli
It provides method used in all the derivated command, such has
command execution and error handling, qemu setup and tear down, etc
"""
# -------------------------------------------------------------------------
#
# __init__
#
# -------------------------------------------------------------------------
def __init__(self, configuration):
"""Default constructor
"""
# Object storing the configuration definition. holds all the
# configuration and definition used by the different stage of
# the tool execution
self.cfg = configuration
# -------------------------------------------------------------------------
#
# execute_command
#
# -------------------------------------------------------------------------
def execute_command(self, command):
""" This method run a command as a subprocess. Typical use case is
running commands.
This method is a wrapper to subprocess.run , and will be moved soon
in a helper object. It provides mutalisation of error handling
"""
self.cfg.logging.debug("running : " + command)
try:
# Execute the subprocess, output ans errors are piped
completed = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, check=True, universal_newlines=False)
# Return the output of the process to the caller
return completed.returncode, completed.stdout, completed.stderr
# We catch xecutionerror, but continue execution and return completed structure to the caller
# It has to be done since we execute tests that can fail. Thus global execution hould not stop
# on first error
except subprocess.CalledProcessError as exception:
# Return the output of the process to the caller
return exception.returncode, exception.stdout.decode(Key.UTF8.value), \
exception.stderr.decode(Key.UTF8.value)
|
apache-2.0
| 6,353,952,181,077,633,000
| 33.08046
| 98
| 0.597976
| false
| 4.933444
| false
| false
| false
|
sxjscience/tvm
|
tutorials/get_started/tvmc_command_line_driver.py
|
1
|
12118
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Getting Started with TVM command line driver - TVMC
===================================================
**Authors**:
`Leandro Nunes <https://github.com/leandron>`_,
`Matthew Barrett <https://github.com/mbaret>`_
This tutorial is an introduction to working with TVMC, the TVM command
line driver. TVMC is a tool that exposes TVM features such as
auto-tuning, compiling, profiling and execution of models, via a
command line interface.
In this tutorial we are going to use TVMC to compile, run and tune a
ResNet-50 on a x86 CPU.
We are going to start by downloading ResNet 50 V2. Then, we are going
to use TVMC to compile this model into a TVM module, and use the
compiled module to generate predictions. Finally, we are going to experiment
with the auto-tuning options, that can be used to help the compiler to
improve network performance.
The final goal is to give an overview of TVMC's capabilities and also
some guidance on where to look for more information.
"""
######################################################################
# Using TVMC
# ----------
#
# TVMC is a Python application, part of the TVM Python package.
# When you install TVM using a Python package, you will get TVMC as
# as a command line application called ``tvmc``.
#
# Alternatively, if you have TVM as a Python module on your
# ``$PYTHONPATH``,you can access the command line driver functionality
# via the executable python module, ``python -m tvm.driver.tvmc``.
#
# For simplicity, this tutorial will mention TVMC command line using
# ``tvmc <options>``, but the same results can be obtained with
# ``python -m tvm.driver.tvmc <options>``.
#
# You can check the help page using:
#
# .. code-block:: bash
#
# tvmc --help
#
#
# As you can see in the help page, the main features are
# accessible via the subcommands ``tune``, ``compile`` and ``run``.
# To read about specific options under a given subcommand, use
# ``tvmc <subcommand> --help``.
#
# In the following sections we will use TVMC to tune, compile and
# run a model. But first, we need a model.
#
######################################################################
# Obtaining the model
# -------------------
#
# We are going to use ResNet-50 V2 as an example to experiment with TVMC.
# The version below is in ONNX format. To download the file, you can use
# the command below:
#
# .. code-block:: bash
#
# wget https://github.com/onnx/models/raw/master/vision/classification/resnet/model/resnet50-v2-7.onnx
#
#
######################################################################
# .. note:: Supported model formats
#
# TVMC supports models created with Keras, ONNX, TensorFlow, TFLite
# and Torch. Use the option``--model-format`` if you need to
# explicitly provide the model format you are using. See ``tvmc
# compile --help`` for more information.
#
######################################################################
# Compiling the model
# -------------------
#
# The next step once we've downloaded ResNet-50, is to compile it,
# To accomplish that, we are going to use ``tvmc compile``. The
# output we get from the compilation process is a TAR package,
# that can be used to run our model on the target device.
#
# .. code-block:: bash
#
# tvmc compile \
# --target "llvm" \
# --output compiled_module.tar \
# resnet50-v2-7.onnx
#
# Once compilation finishes, the output ``compiled_module.tar`` will be created. This
# can be directly loaded by your application and run via the TVM runtime APIs.
#
######################################################################
# .. note:: Defining the correct target
#
# Specifying the correct target (option ``--target``) can have a huge
# impact on the performance of the compiled module, as it can take
# advantage of hardware features available on the target. For more
# information, please refer to `Auto-tuning a convolutional network
# for x86 CPU <https://tvm.apache.org/docs/tutorials/autotvm/tune_relay_x86.html#define-network>`_.
#
######################################################################
#
# In the next step, we are going to use the compiled module, providing it
# with some inputs, to generate some predictions.
#
######################################################################
# Input pre-processing
# --------------------
#
# In order to generate predictions, we will need two things:
#
# - the compiled module, which we just produced;
# - a valid input to the model
#
# Each model is particular when it comes to expected tensor shapes, formats and data
# types. For this reason, most models require some pre and
# post processing, to ensure the input(s) is valid and to interpret the output(s).
#
# In TVMC, we adopted NumPy's ``.npz`` format for both input and output data.
# This is a well-supported NumPy format to serialize multiple arrays into a file.
#
# We will use the usual cat image, similar to other TVM tutorials:
#
# .. image:: https://s3.amazonaws.com/model-server/inputs/kitten.jpg
# :height: 224px
# :width: 224px
# :align: center
#
# For our ResNet 50 V2 model, the input is expected to be in ImageNet format.
# Here is an example of a script to pre-process an image for ResNet 50 V2.
#
from tvm.contrib.download import download_testdata
from PIL import Image
import numpy as np
img_url = "https://s3.amazonaws.com/model-server/inputs/kitten.jpg"
img_path = download_testdata(img_url, "imagenet_cat.png", module="data")
# Resize it to 224x224
resized_image = Image.open(img_path).resize((224, 224))
img_data = np.asarray(resized_image).astype("float32")
# ONNX expects NCHW input, so convert the array
img_data = np.transpose(img_data, (2, 0, 1))
# Normalize according to ImageNet
imagenet_mean = np.array([0.485, 0.456, 0.406])
imagenet_stddev = np.array([0.229, 0.224, 0.225])
norm_img_data = np.zeros(img_data.shape).astype("float32")
for i in range(img_data.shape[0]):
norm_img_data[i, :, :] = (img_data[i, :, :] / 255 - imagenet_mean[i]) / imagenet_stddev[i]
# Add batch dimension
img_data = np.expand_dims(norm_img_data, axis=0)
# Save to .npz (outputs imagenet_cat.npz)
np.savez("imagenet_cat", data=img_data)
######################################################################
# Running the compiled module
# ---------------------------
#
# With both the compiled module and input file in hand, we can run it by
# invoking ``tvmc run``.
#
# .. code-block:: bash
#
# tvmc run \
# --inputs imagenet_cat.npz \
# --output predictions.npz \
# compiled_module.tar
#
# When running the above command, a new file ``predictions.npz`` should
# be produced. It contains the output tensors.
#
# In this example, we are running the model on the same machine that we used
# for compilation. In some cases we might want to run it remotely via
# an RPC Tracker. To read more about these options please check ``tvmc
# run --help``.
#
######################################################################
# Output post-processing
# ----------------------
#
# As previously mentioned, each model will have its own particular way
# of providing output tensors.
#
# In our case, we need to run some post-processing to render the
# outputs from ResNet 50 V2 into a more human-readable form.
#
# The script below shows an example of the post-processing to extract
# labels from the output of our compiled module.
#
import os.path
import numpy as np
from scipy.special import softmax
from tvm.contrib.download import download_testdata
# Download a list of labels
labels_url = "https://s3.amazonaws.com/onnx-model-zoo/synset.txt"
labels_path = download_testdata(labels_url, "synset.txt", module="data")
with open(labels_path, "r") as f:
labels = [l.rstrip() for l in f]
output_file = "predictions.npz"
# Open the output and read the output tensor
if os.path.exists(output_file):
with np.load(output_file) as data:
scores = softmax(data["output_0"])
scores = np.squeeze(scores)
scores = np.argsort(scores)[::-1]
for i in scores[0:5]:
print("class='%s' with probability=%f" % (labels[i], scores[i]))
########################################################################
# When running the script, a list of predictions should be printed similar
# the the example below.
#
# .. code-block:: bash
#
# $ python post_processing.py
# class=n02123045 tabby, tabby cat ; probability=446.000000
# class=n02123159 tiger cat ; probability=675.000000
# class=n02124075 Egyptian cat ; probability=836.000000
# class=n02129604 tiger, Panthera tigris ; probability=917.000000
# class=n04040759 radiator ; probability=213.000000
#
######################################################################
# Tuning the model
# ----------------
#
# In some cases, we might not get the expected performance when running
# inferences using our compiled module. In cases like this, we can make use
# of the auto-tuner, to find a better configuration for our model and
# get a boost in performance.
#
# Tuning in TVM refers to the process by which a model is optimized
# to run faster on a given target. This differs from training or
# fine-tuning in that it does not affect the accuracy of the model,
# but only the runtime performance.
#
# As part of the tuning process, TVM will try running many different
# operator implementation variants to see which perform best. The
# results of these runs are stored in a tuning records file, which is
# ultimately the output of the ``tune`` subcommand.
#
# In the simplest form, tuning requires you to provide three things:
#
# - the target specification of the device you intend to run this model on;
# - the path to an output file in which the tuning records will be stored, and finally,
# - a path to the model to be tuned.
#
#
# The example below demonstrates how that works in practice:
#
# .. code-block:: bash
#
# tvmc tune \
# --target "llvm" \
# --output autotuner_records.json \
# resnet50-v2-7.onnx
#
#
# Tuning sessions can take a long time, so ``tvmc tune`` offers many options to
# customize your tuning process, in terms of number of repetitions (``--repeat`` and
# ``--number``, for example), the tuning algorithm to be use, and so on.
# Check ``tvmc tune --help`` for more information.
#
# As an output of the tuning process above, we obtained the tuning records stored
# in ``autotuner_records.json``. This file can be used in two ways:
#
# - as an input to further tuning (via ``tvmc tune --tuning-records``), or
# - as an input to the compiler
#
# The compiler will use the results to generate high performance code for the model
# on your specified target. To do that we can use ``tvmc compile --tuning-records``.
# Check ``tvmc compile --help`` for more information.
#
######################################################################
# Final Remarks
# -------------
#
# In this tutorial, we presented TVMC, a command line driver for TVM.
# We demonstrated how to compile, run and tune a model, as well
# as discussed the need for pre and post processing of inputs and outputs.
#
# Here we presented a simple example using ResNet 50 V2 locally. However, TVMC
# supports many more features including cross-compilation, remote execution and
# profiling/benchmarking.
#
# To see what other options are available, please have a look at ``tvmc --help``.
#
|
apache-2.0
| 2,922,419,167,132,378,000
| 35.065476
| 104
| 0.661825
| false
| 3.684403
| false
| false
| false
|
pleed/pyqemu
|
target-i386/pyqemu/processinfo.py
|
1
|
26210
|
#!/usr/bin/python
import PyFlxInstrument
from Structures import *
# --- class Image ------------------------------------------------------
class Image( object):
def get_entrypoint( self):
try:
return self.cached.entrypoint
except:
return self.ldr_data_table_entry.EntryPoint
def get_sizeofimage( self):
try:
return self.cached.sizeofimage
except:
return self.ldr_data_table_entry.SizeOfImage
def get_dllbase( self):
try:
return self.cached.dllbase
except:
return self.ldr_data_table_entry.DllBase
def get_fulldllname( self):
try:
return self.cached.fulldllname
except:
return self.ldr_data_table_entry.FullDllName.str()
def get_basedllname( self):
try:
return self.cached.basedllname
except:
return self.ldr_data_table_entry.BaseDllName.str()
EntryPoint = property( get_entrypoint)
SizeOfImage = property( get_sizeofimage)
DllBase = property( get_dllbase)
FullDllName = property( get_fulldllname)
BaseDllName = property( get_basedllname)
Name = property( get_basedllname) # for compatibility with a yet-to-be-implemented general memory range class
def __init__( self, ldr_data_table_entry, process):
self.ldr_data_table_entry = ldr_data_table_entry
self.process = process
self.valid = False
self.exports_done = False
self.exports = {}
self.last_executed_page = None
self.image_type = IMAGE_TYPE_UNKNOWN
self.cached = GenericStruct()
self.pending_pages = set( [])
self.dump_pending = False
self.update()
def update( self):
# sanity check the LDR_DATA_TABLE_ENTRY struct:
# - Check whether DllBase is on a page boundary
# - Check whether EntryPoint is within [DllBase, DllBase+SizeOfImage) or 0
# - Check whether the entire DLL resides in userspace?
# - Check whether SizeOfImage is a multiple of the page size
# - Check whether SizeOfImage != 0
valid = self.valid
if not valid:
valid = True
valid = valid and not (self.ldr_data_table_entry.DllBase % PAGESIZE)
valid = valid and self.ldr_data_table_entry.EntryPoint >= self.ldr_data_table_entry.DllBase \
and self.ldr_data_table_entry.EntryPoint < self.ldr_data_table_entry.DllBase + self.ldr_data_table_entry.SizeOfImage
valid = valid and self.ldr_data_table_entry.DllBase < USER_KERNEL_SPLIT \
and self.ldr_data_table_entry.DllBase + self.ldr_data_table_entry.SizeOfImage < USER_KERNEL_SPLIT
valid = valid and not (self.ldr_data_table_entry.SizeOfImage % PAGESIZE)
valid = valid and self.ldr_data_table_entry.SizeOfImage != 0
# if we cannot yet fetch the FullDllName, try again later
try:
fulldllname = self.ldr_data_table_entry.FullDllName.str()
except PageFaultException, pagefault:
valid = False
self.pending_pages.add( pagefault.value / PAGESIZE)
#PyBochsC.pending_page( True)
if not self.valid and valid:
# this image was previously not valid, but is now, so it must be new
#if self.BaseDllName.startswith( self.process.eprocess.ImageFileName.strip( "\0")):
# print "Entrypoint is 0x%08x" % self.EntryPoint
# watchpoint = EntryPointWatchpoint( self.process, self.EntryPoint)
# self.process.watchpoints.add_function_call_watchpoint( watchpoint)
if self.BaseDllName.lower().endswith( '.dll'):
self.image_type = IMAGE_TYPE_DLL
elif self.BaseDllName.lower().endswith( '.exe'):
self.image_type = IMAGE_TYPE_EXE
#print "DLL: %s"%(self.BaseDllName.lower())
if self.valid or valid:
self.cached.entrypoint = int( self.ldr_data_table_entry.EntryPoint)
self.cached.sizeofimage = int( self.ldr_data_table_entry.SizeOfImage)
self.cached.dllbase = int( self.ldr_data_table_entry.DllBase)
self.cached.fulldllname = self.ldr_data_table_entry.FullDllName.str()
self.cached.basedllname = self.ldr_data_table_entry.BaseDllName.str()
if valid and self.process.watched and not hasattr( self, "pe"):
try:
self.pe = PE( VMemBackend( self.DllBase,
self.DllBase + self.SizeOfImage ),
self.BaseDllName,
True)
except PageFaultException, pagefault:
self.pending_pages.add( pagefault.value / PAGESIZE)
if valid and not self.exports_done and hasattr( self, "pe") and hasattr( self.pe.Exports, "ExportAddressTable"):
try:
self.exports.update(self.pe.Exports.all_exports())
self.process.symbols.update(self.exports)
self.exports_done = True
except PageFaultException, pagefault:
self.pending_pages.add( pagefault.value / PAGESIZE)
if not self.valid and valid and self.process.watched:
self.dump_pending = True
pending = False
for page in xrange( self.DllBase, self.DllBase + self.SizeOfImage, PAGESIZE):
try:
dummy = self.process.backend.read( page, 1)
except:
self.pending_pages.add( page / PAGESIZE)
pending = True
if pending:
pass
self.valid = valid
def dump( self):
start = self.DllBase
size = self.SizeOfImage
time = 0
try:
data = PyFlxInstrument.vmem_read( start, size)
tag = self.FullDllName
for p in xrange( start / PAGESIZE, (start + size) / PAGESIZE ):
if p in self.process.writes:
self.process.writes[ p].last_dumped = time
else:
self.process.writes[ p] = ModifiedPage( self, p)
self.process.writes[ p].last_dumped = time
self.dump_pending = False
except PageFaultException, pagefault:
self.pending_pages.add( pagefault.value / PAGESIZE)
#PyBochsC.pending_page( True)
# --- class Process ----------------------------------------------------
class Process( object):
def get_pid( self): return self.eprocess.UniqueProcessId
pid = property( get_pid)
def get_ppid( self): return self.eprocess.InheritedFromUniqueProcessId
ppid = property( get_ppid)
def get_cur_tid(self):
teb = self.kpcr.PrcbData.CurrentThread.deref().Teb
if teb.is_null():
return -1
return teb.deref().ClientId.UniqueThread
cur_tid = property(get_cur_tid)
def get_imagefilename( self): return self.eprocess.ImageFileName
ImageFileName = property( get_imagefilename)
def check_update_pending( self): return not self.valid or self.last_updated < self.last_seen
update_pending = property( check_update_pending)
def innovate( self):
self.innovated = True
def innovates( function):
#function decorator
def innovating_wrapper( self, *args, **kwargs):
self.innovate()
function( *args, **kwargs)
return innovating_wrapper
def ev_write( self, address, size):
# Convention: This is only called if the process is watched
# Writes from kernel space code should not be of interest
eip = PyFlxInstrument.eip()
if eip < USER_KERNEL_SPLIT and address + size < USER_KERNEL_SPLIT: # FIXME investigate: why is the write target limitation here?
self.shortterm_writes.add( address/256)
page = address / PAGESIZE
if page not in self.writes:
self.writes[ page] = ModifiedPage( self, page)
self.writes[ page].write(eip, address, size) # FIXME do we care about spilling writes across two pages?
return -1 # if the process is watched, we want to take note of writes happening from userspace code
else:
return 0
def dump_range( self, address):
# TODO:
# really dump ranges, attach tags, dump whole images if range falls within image
time = 0 #PyBochsC.emulator_time()
vad = self.vad_tree.by_address( address)
if vad != None:
start = vad.StartingVpn * PAGESIZE
end = (vad.EndingVpn + 1) * PAGESIZE
size = end-start
try:
t = DUMP_IMAGE
tag = vad.ControlArea.deref().FilePointer.deref().FileName.str()
except:
# Maybe packers like morphine modified the module lists for us?
image = self.get_image_by_address( address)
if image:
t = DUMP_IMAGE
tag = image.BaseDllName
else:
t = DUMP_UNSPECIFIED
tag = "anonymous"
try:
data = PyFlxInstrument.vmem_read( start, size)
t |= DUMP_FULL
except PageFaultException, pagefault:
print "Page fault when trying to dump", pagefault
# zero-pad missing memory
data = ""
print "trying to dump from 0x%08x to 0x%08x" % (start, end)
for i in xrange( start, end, PAGESIZE):
try:
data += PyFlxInstrument.vmem_read( i, PAGESIZE)
except PageFaultException:
data += '\0' * PAGESIZE
t |= DUMP_PARTIAL
# clear the sets:
page = address / PAGESIZE
writers = self.writes[ page].writers.copy()
while page in self.writes:
del self.writes[page]
page -= 1
page = address / PAGESIZE + 1 #self.writes[address/PAGESIZE] already clear
while page in self.writes:
del self.writes[page]
page += 1
print "about to insert a %u byte dump into the database, with type %u and tag %s" %( len(data), t, tag)
else:
raise Exception( "Executing non-existing memory?")
def pending_page( self):
if len( self.pending_pages) > 0:
return self.pending_pages.pop() * PAGESIZE
else:
for base in self.images:
if len( self.images[ base].pending_pages) > 0:
return self.images[ base].pending_pages.pop() * PAGESIZE
elif self.images[ base].dump_pending:
self.images[ base].dump()
return None
def print_stack( self, function, source, offset = 0):
function_name = function.name
ESP = PyFlxInstrument.genreg(PyBochsC.REG_ESP)
function_definition = []
for arg in function.arguments:
if type(arg.type) == pygccxml.declarations.cpptypes.pointer_t:
if str(arg.type.base) in ('xxxchar', 'char const'):
t = P(STR)
elif str(arg.type.base) in ('xxxwchar_t', 'wchar_t const'):
t = P(WSTR)
else:
t = "I"
elif type(arg.type) in (pygccxml.declarations.typedef.typedef_t, pygccxml.declarations.cpptypes.declarated_t):
if arg.type.declaration.name in ('LPCSTR', 'xxxLPSTR'):
t = P(STR)
elif arg.type.declaration.name in ('LPCWSTR','xxxLPWSTR'):
t = P(WSTR)
else:
dwords = arg.type.byte_size / 4
t = "I" * dwords # FIXME
else:
dwords = arg.type.byte_size / 4
t = "I" * dwords # FIXME
arg_definition = (arg.name, t)
function_definition.append(arg_definition)
stack = Stack(function_definition)( self.backend, ESP + offset)
output = []
for arg_def in function_definition:
arg = getattr( stack, arg_def[ 0])
if hasattr( arg, "deref"):
try:
output.append(u"%s = %s" % (arg_def[0], arg.deref()))
except PageFaultException:
output.append("%s = !0x%08x" % (arg_def[0], arg.offset))
except UnicodeEncodeError:
s = arg.deref()
output.append(u"%s = %s %u %s" % (arg_def[0],'+++',len(arg.deref()),unicode(s).encode('utf-8')))
except UnicodeDecodeError:
s = arg.deref()
str(s)
output.append(u"%s = %s %u %r" % (arg_def[0],'---',len(arg.deref()),str(s))) # FIXME UNICODE DECODE ERRORS
else:
output.append(u"%s = %s" % (arg_def[0], arg))
foo = u', '.join(output)
if offset:
print u"PPID %u/PID %u/TID %u/STOLEN/0x%08x -> %s(%r)" % (self.ppid,self.pid,self.cur_tid,source,unicode(function_name), foo)# FIXME UNICODE DECODE ERRORS
else:
print u"PPID %u/PID %u/TID %u/0x%08x -> %s(%r)" % (self.ppid,self.pid,self.cur_tid,source,unicode(function_name), foo)# FIXME UNICODE DECODE ERRORS
def ev_branch( self, source, target, type):
# Convention: This is only called if the process is watched
if target < USER_KERNEL_SPLIT:
#self.watchpoints.visit_location( target)
self.shortterm_branches.add( target/256)
func = None
source_image = self.get_image_by_address(source)
target_image = self.get_image_by_address(target)
if source_image == target_image:
pass
elif (source_image and source_image.DllBase == self.eprocess.Peb.deref().ImageBaseAddress and target_image) \
or (not source_image and target_image):
# store branches from within the image to other memory (for import reconstruction)
if target in self.symbols:
function_name = self.symbols[target][2]
if target not in self.gccxml_cache and function_name not in self.unknown_symbols:
self.innovate() # new, unknown branch target
try:
func = namespace.free_function(name=function_name)
self.gccxml_cache[target] = func
except pygccxml.declarations.matcher.declaration_not_found_t:
self.unknown_symbols.append(function_name)
except pygccxml.declarations.matcher.multiple_declarations_found_t:
# print "multiple matches for function '%s()'" % function_name
func = namespace.free_functions(name=function_name)[0]
self.gccxml_cache[target] = func
elif target in self.gccxml_cache:
func = self.gccxml_cache[target]
if func:
self.print_stack(func, source)
elif target not in self.symbols and source < USER_KERNEL_SPLIT: # kernel returns to userland addresses, but there's normally no symbol there
# interesting, target seems to be within a DLL, but there's no symbol at that address
# stolen bytes?
earlier_symbols = [address for address in self.symbols.keys() if address < target]
earlier_symbols.sort()
if earlier_symbols:
orig_target = target
target = earlier_symbols[-1]
address = target
stack_offset = 0
invalid = False
while address < orig_target:
insn = pydasm.get_instruction( PyFlxInstrument.vmem_read( address, 50, self.pdb), pydasm.MODE_32) # FIXME use real x86 instruction length limit here
#print pydasm.get_instruction_string(insn, pydasm.FORMAT_INTEL, address), insn.op1.reg, insn.op2.reg, insn.op3.reg
if not insn:
invalid = True
break
elif insn and insn.op1.reg == pydasm.REGISTER_ESP:
invalid = True # ESP is destroyed
elif insn.type == pydasm.INSTRUCTION_TYPE_POP:
stack_offset -= 4
elif insn.type == pydasm.INSTRUCTION_TYPE_PUSH:
stack_offset += 4
elif insn.type == pydasm.INSTRUCTION_TYPE_RET:
invalid = True # indicator of function boundary -> no luck for us
address += insn.length
candidate = self.symbols[target]
function_name = candidate[2]
if not invalid:
if target not in self.gccxml_cache and function_name not in self.unknown_symbols:
self.innovate() # new, unknown branch target
try:
func = namespace.free_function(name=function_name)
self.gccxml_cache[target] = func
except pygccxml.declarations.matcher.declaration_not_found_t:
self.unknown_symbols.append(function_name)
except pygccxml.declarations.matcher.multiple_declarations_found_t:
# multiple matches
func = namespace.free_functions(name=function_name)[0]
self.gccxml_cache[target] = func
elif target in self.gccxml_cache:
func = self.gccxml_cache[target]
if func:
self.print_stack(func, source, stack_offset)
else:
print "0x%08x -> 0x%08x: symbol at target not found, invalid candidate: %s, offset %u, image there is %s" % (source, orig_target, str(candidate),orig_target-target, target_image.BaseDllName)
pass
elif source_image and source_image.DllBase != self.eprocess.Peb.deref().ImageBaseAddress:
pass
page = target / PAGESIZE
if page in self.writes and target in self.writes[ page].writeset:
self.innovate()
print "executing 0x%08x -> 0x%08x" % (source, target)
self.dump_range( target)
return 1
else:
# not in user mode
return 0
def get_image_by_address( self, address):
bases = [base for base in self.images if base <= address]
bases.sort()
if bases:
image = self.images[bases[-1]]
else:
return None
if address <= image.DllBase + image.SizeOfImage:
return image
else:
return None
def __init__( self):
#self.pdb = pdb
#linear = PyBochsC.logical2linear( 0x30, 0, pdb)
linear = PyFlxInstrument.creg(4) #R_FS
self.backend = VMemBackend( 0, 0x100000000)
self.kpcr = KPCR( self.backend, linear)
self.watched = False
#self.watchpoints = Watchpoints( self)
self.symbols = {}
self.unknown_symbols = [] # insert symbols that pygccxml cannot find here
self.gccxml_cache = {}
self.pending_pages = set([])
self.images = {} # indexed by base address
self.valid = False
self.eprocess = None
self.last_seen = 0
self.last_updated = 0
self.vad_tree = VadTree( self)
self.writes = {}
self.last_executed_modified_page = None
self.innovated = False
self.dll_locations = set( [])
self.shortterm_writes = set( [])
self.shortterm_branches = set( [])
self.update()
def check_watched( self):
if not self.valid:
return False
return False #flx
if not self.watched:
imagefilename = self.kpcr.PrcbData.CurrentThread.deref().ApcState.Process.deref().ImageFileName
self.watched = globals()[ "samplename"].upper().startswith( imagefilename.strip( "\0").upper())
try:
ppid = self.ppid
except PageFaultException, pagefault:
self.pending_pages.add( pagefault.value / PAGESIZE)
#PyBochsC.pending_page( True)
return self.watched
for pdb in helper.processes:
try:
pid = helper.processes[ pdb].pid
except PageFaultException, pagefault:
self.pending_pages.add( pagefault.value / PAGESIZE)
#PyBochsC.pending_page( True)
continue
except AttributeError:
continue
if helper.processes[ pdb].watched and ppid == pid:
self.watched = True
break
if self.watched:
print "Now watching process with name '%s'" % imagefilename
self.innovate()
return self.watched
def update( self):
# Sanity check the data structures
valid = self.valid
if not valid:
valid = True
eprocess = self.kpcr.PrcbData.CurrentThread.deref().ApcState.Process.deref()
valid = valid and eprocess.CreateTime != 0
valid = valid and eprocess.ActiveThreads != 0
valid = valid and (eprocess.Peb.pointer & 0x7ff00000) == 0x7ff00000 # FIXME use named constant
valid = valid and eprocess.UniqueProcessId != 0
valid = valid and eprocess.InheritedFromUniqueProcessId != 0
# If all else fails, is this the System Process?
valid = valid or eprocess.ImageFileName.startswith( "System") \
and eprocess.UniqueProcessId == 4 \
and eprocess.InheritedFromUniqueProcessId == 0
# If all else fails, is this the Idle Process?
valid = valid or eprocess.ImageFileName.startswith( "Idle") \
and eprocess.UniqueProcessId == 4 \
and eprocess.InheritedFromUniqueProcessId == 0
if not self.valid and valid:
# new process
# print "New process '%s', PID %u, PPID %u" % (eprocess.ImageFileName, eprocess.UniqueProcessId, eprocess.InheritedFromUniqueProcessId)
# Cache eprocess - FIXME does doing this once suffice? is this even real caching( it's a StructuredData() after all)
self.eprocess = eprocess
if self.valid:
self.update_images()
self.valid = valid
self.check_watched()
self.last_updated = 0 #PyBochsC.emulator_time()
def update_images( self):
try:
eprocess = self.kpcr.PrcbData.CurrentThread.deref().ApcState.Process.deref()
except:
print "Could not fetch eprocess struct for process with page directory base 0x%08x" % self.pdb
return
try:
Peb = eprocess.Peb.deref()
except:
print "Could not fetch Peb pointed to by pointer at 0x%08x, pdb is 0x%08x" \
% (eprocess.Peb.offset, self.pdb)
return
try:
LdrData = eprocess.Peb.deref().Ldr.deref()
except:
print "Could not fetch LdrData pointed to by pointer at 0x%08x, pdb is 0x%08x" \
% ( eprocess.Peb.deref().Ldr.offset, self.pdb)
return
module_list = LdrData.InMemoryOrderModuleList
image = LdrData.InMemoryOrderModuleList.next()
while None != image:
if image.DllBase not in self.images:
# a new DLL was found in memory
self.innovate()
self.images[ image.DllBase] = Image( image, self)
elif not self.images[ image.DllBase].valid or not self.images[ image.DllBase].exports_done:
self.images[ image.DllBase].update()
elif self.watched and not hasattr( self.images[ image.DllBase], "pe"):
self.images[ image.DllBase].update()
image = LdrData.InMemoryOrderModuleList.next()
def enter( self):
if self.watched:
w = len( self.shortterm_writes)
b = len( self.shortterm_branches)
ratio = b and float( w) / float( b)
if w >= 50:
ratio = b and float( w) / float( b)
if ratio > 2:
self.innovate()
print "writes: %8u, branch targets: %6u, ratio: %04.2f" % ( w, b, ratio)
self.shortterm_writes.clear()
self.shortterm_branches.clear()
self.last_seen = 0 #PyBochsC.emulator_time()
# PyBochsC.pending_page( self.pending_pages != [])
if self.watched and self.innovated:
helper.sched_nonwatched = 0
self.innovated = False
elif self.valid and not self.eprocess.UniqueProcessId in (0,4):
helper.sched_nonwatched += 1
if not helper.sched_nonwatched % 200:
print helper.sched_nonwatched
if helper.sched_nonwatched > LIVENESS_BOUND and CHECK_LIVENESS:
print "No watched process appears to be live and showing progress, shutting down!"
#PyBochsC.shutdown()
pass
def leave( self):
pass
|
gpl-2.0
| 7,401,991,622,607,399,000
| 40.86901
| 218
| 0.544525
| false
| 4.182224
| false
| false
| false
|
tangle70/Python
|
ssh-listfiles.py
|
1
|
1039
|
#!/bin/env/python
###################################################################################
#
# A script to list files in a directory using via SSH using the paramiko module.
#
###################################################################################
import paramiko
def listFiles(srv, uname, passwd):
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(srv, username=uname, password=passwd)
stdin, stdout, stderr = ssh.exec_command('ls')
stdin.flush()
data = stdout
x = 0
print '################################################'
print srv
for line in data:
line = line.replace('\n','')
print ' ', line
except:
print '################################################'
print 'ERROR: conencting to', srv
srv = 'srv'
uname = 'uname'
passwd = 'passwd'
listFiles(srv,uname,passwd)
print '################################################'
|
gpl-2.0
| -1,191,662,196,745,778,700
| 30.484848
| 83
| 0.407122
| false
| 4.971292
| false
| false
| false
|
ftfarias/PySubsim
|
old/ship.py
|
1
|
1753
|
# -*- coding: utf-8 -*-
# class Ship(MovableNewtonObject):
# def __init__(self, drag_factor, max_turn_per_hour, max_acceleration):
# super(Ship, self).__init__()
# self._rudder = 0
# self.max_turn_per_hour = max_turn_per_hour
# self.drag_factor = drag_factor
# self.frontal_drag_factor = drag_factor
# self.drag_factor = drag_factor
# self.drag_force = Point(0, 0)
# self.turbine_acceleration = Point(0, 0)
# self.turbine = Turbine(self, max_acceleration)
# return self._velocity.angle
# def set_course(self, angle):
# '''
# :param angle: new angle in radians
# :return: none
# '''
# angle = normalize_angle_2pi(angle)
# self._velocity.angle = angle
# self._acceleration.angle = self._velocity.angle # assumes the rotation also changes the acceleration
# def __str__(self):
# return "pos:{p} vel:{v}({vt:.1f};{va:.0f}˚) accel:{a}({at:.1f};{aa:.0f}˚) rudder:{rudder}".format(
# p=self._position,
# v=self._velocity,
# vt=self._velocity.angle,
# va=self._velocity.bearing,
# a=self._acceleration,
# at=self._acceleration.angle,
# aa=self._acceleration.bearing,
# rudder=self.rudder)
#
# def debug(self):
# return "pos:{p} vel:{v}({vt:.1f};{va:.0f}˚) accel:{a}({at:.1f};{aa:.0f}˚)".format(
# p=self._position,
# v=self._velocity,
# vt=self._velocity.angle,
# va=self._velocity.bearing,
# a=self._acceleration,
# at=self._acceleration.angle,
# aa=self._acceleration.bearing)
#
#
|
gpl-3.0
| -4,022,949,757,789,183,500
| 30.232143
| 111
| 0.527158
| false
| 3.041739
| false
| false
| false
|
dziadu/gitbrowser
|
settings/base.py
|
1
|
2525
|
"""
Django settings for gitbrowser project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.core.urlresolvers import reverse_lazy
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = None
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.sitemaps',
'gitbrowser',
'bootstrap3'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'gitbrowser.middlewares.LoginRequiredMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'gitbrowser.middlewares.InterceptGitwebMiddleware',
'gitbrowser.middlewares.ContentSecurityPolicyMiddleware',
)
ROOT_URLCONF = 'gitbrowser.urls'
WSGI_APPLICATION = 'gitbrowser.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'KEY_FUNCTION': os.path.join(BASE_DIR, 'utils.cache.gen_cache_key'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = reverse_lazy('list')
LOGOUT_URL = LOGIN_REDIRECT_URL
LOGIN_EXEMPT_URLS = [
'^robots\.txt$',
]
# A single string or a list of string will appended to all CSPs
# A dictionary will be merged with the default CSPs
CONTENT_SECURITY_POLICY_URLS = None
|
gpl-2.0
| 4,518,041,189,520,562,700
| 23.514563
| 71
| 0.752475
| false
| 3.188131
| false
| false
| false
|
python-recsys/mrec
|
mrec/base_recommender.py
|
1
|
3656
|
class BaseRecommender(object):
"""
Minimal interface to be implemented by recommenders.
"""
def get_similar_items(self,j,max_similar_items=30):
"""
Get the most similar items to a supplied item.
Parameters
==========
j : int
Index of item for which to get similar items.
max_similar_items : int
Maximum number of similar items to return.
Returns
=======
sims : list
Sorted list of similar items, best first. Each entry is
a tuple of the form (i,score).
"""
pass
def recommend_items(self,dataset,u,max_items=10,return_scores=True):
"""
Recommend new items for a user.
Parameters
==========
dataset : scipy.sparse.csr_matrix
User-item matrix containing known items.
u : int
Index of user for which to make recommendations.
max_items : int
Maximum number of recommended items to return.
return_scores : bool
If true return a score along with each recommended item.
Returns
=======
recs : list
List of (idx,score) pairs if return_scores is True, else
just a list of idxs.
"""
pass
def batch_recommend_items(self,dataset,max_items=10,return_scores=True,show_progress=False):
"""
Recommend new items for all users in the training dataset.
Parameters
==========
dataset : scipy.sparse.csr_matrix
User-item matrix containing known items.
max_items : int
Maximum number of recommended items to return.
return_scores : bool
If true return a score along with each recommended item.
show_progress: bool
If true print something to stdout to show progress.
Returns
=======
recs : list of lists
Each entry is a list of (idx,score) pairs if return_scores is True,
else just a list of idxs.
"""
# default implementation, you may be able to optimize this for some recommenders.
recs = []
for u in xrange(self.num_users):
if show_progress and u%1000 == 0:
print u,'..',
recs.append(self.recommend_items(dataset,u,max_items,return_scores))
if show_progress:
print
return recs
def range_recommend_items(self,dataset,user_start,user_end,max_items=10,return_scores=True):
"""
Recommend new items for a range of users in the training dataset.
Assumes you've already called fit() to learn the similarity matrix.
Parameters
==========
dataset : scipy.sparse.csr_matrix
User-item matrix containing known items.
user_start : int
Index of first user in the range to recommend.
user_end : int
Index one beyond last user in the range to recommend.
max_items : int
Maximum number of recommended items to return.
return_scores : bool
If true return a score along with each recommended item.
Returns
=======
recs : list of lists
Each entry is a list of (idx,score) pairs if return_scores is True,
else just a list of idxs.
"""
# default implementation, you may be able to optimize this for some recommenders.
recs = []
for u in xrange(user_start,user_end):
recs.append(self.recommend_items(dataset,u,max_items,return_scores))
return recs
|
bsd-3-clause
| 2,509,509,946,566,095,400
| 33.168224
| 96
| 0.576313
| false
| 4.657325
| false
| false
| false
|
wisechengyi/pants
|
contrib/python/src/python/pants/contrib/python/checks/checker/variable_names.py
|
1
|
5328
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import ast
import keyword
import re
from functools import wraps
import six
from pants.contrib.python.checks.checker.common import CheckstylePlugin
ALL_LOWER_CASE_RE = re.compile(r"^[a-z][a-z\d]*$")
ALL_UPPER_CASE_RE = re.compile(r"^[A-Z][A-Z\d]+$")
LOWER_SNAKE_RE = re.compile(r"^([a-z][a-z\d]*)(_[a-z\d]+)*$")
UPPER_SNAKE_RE = re.compile(r"^([A-Z][A-Z\d]*)(_[A-Z\d]+)*$")
UPPER_CAMEL_RE = re.compile(r"^([A-Z][a-z\d]*)+$")
RESERVED_NAMES = frozenset(keyword.kwlist)
BUILTIN_NAMES = dir(six.moves.builtins)
def allow_underscores(num):
def wrap(function):
@wraps(function)
def wrapped_function(name):
if name.startswith("_" * (num + 1)):
return False
return function(name.lstrip("_"))
return wrapped_function
return wrap
@allow_underscores(1)
def is_upper_camel(name):
"""UpperCamel, AllowingHTTPAbbrevations, _WithUpToOneUnderscoreAllowable."""
return bool(UPPER_CAMEL_RE.match(name) and not ALL_UPPER_CASE_RE.match(name))
@allow_underscores(2)
def is_lower_snake(name):
"""lower_snake_case, _with, __two_underscores_allowable."""
return LOWER_SNAKE_RE.match(name) is not None
def is_reserved_name(name):
return name in BUILTIN_NAMES or name in RESERVED_NAMES
def is_reserved_with_trailing_underscore(name):
"""For example, super_, id_, type_"""
if name.endswith("_") and not name.endswith("__"):
return is_reserved_name(name[:-1])
return False
def is_builtin_name(name):
"""For example, __foo__ or __bar__."""
if name.startswith("__") and name.endswith("__"):
return ALL_LOWER_CASE_RE.match(name[2:-2]) is not None
return False
@allow_underscores(2)
def is_constant(name):
return UPPER_SNAKE_RE.match(name) is not None
class PEP8VariableNames(CheckstylePlugin):
"""Enforces PEP8 recommendations for variable names.
Specifically:
UpperCamel class names
lower_snake / _lower_snake / __lower_snake function names
lower_snake expression variable names
CLASS_LEVEL_CONSTANTS = {}
GLOBAL_LEVEL_CONSTANTS = {}
"""
@classmethod
def name(cls):
return "variable-names"
CLASS_GLOBAL_BUILTINS = frozenset({"__slots__", "__metaclass__",})
def iter_class_methods(self, class_node):
for node in class_node.body:
if isinstance(node, ast.FunctionDef):
yield node
def iter_class_globals(self, class_node):
for node in class_node.body:
# TODO(wickman) Occasionally you have the pattern where you set methods equal to each other
# which should be allowable, for example:
# class Foo(object):
# def bar(self):
# pass
# alt_bar = bar
if isinstance(node, ast.Assign):
for name in node.targets:
if isinstance(name, ast.Name):
yield name
def nits(self):
class_methods = set()
all_methods = {
function_def
for function_def in ast.walk(self.python_file.tree)
if isinstance(function_def, ast.FunctionDef)
}
for class_def in self.iter_ast_types(ast.ClassDef):
if not is_upper_camel(class_def.name):
yield self.error("T000", "Classes must be UpperCamelCased", class_def)
for class_global in self.iter_class_globals(class_def):
if (
not is_constant(class_global.id)
and class_global.id not in self.CLASS_GLOBAL_BUILTINS
):
yield self.error(
"T001", "Class globals must be UPPER_SNAKE_CASED", class_global
)
if not class_def.bases or all(
isinstance(base, ast.Name) and base.id == "object" for base in class_def.bases
):
class_methods.update(self.iter_class_methods(class_def))
else:
# If the class is inheriting from anything that is potentially a bad actor, rely
# upon checking that bad actor out of band. Fixes PANTS-172.
for method in self.iter_class_methods(class_def):
all_methods.discard(method)
for function_def in all_methods - class_methods:
if is_reserved_name(function_def.name):
yield self.error("T801", "Method name overrides a builtin.", function_def)
# TODO(wickman) Only enforce this for classes that derive from object. If they
# don't derive object, it's possible that the superclass naming is out of its
# control.
for function_def in all_methods:
if not any(
(
is_lower_snake(function_def.name),
is_builtin_name(function_def.name),
is_reserved_with_trailing_underscore(function_def.name),
)
):
yield self.error("T002", "Method names must be lower_snake_cased", function_def)
|
apache-2.0
| 5,738,994,270,418,695,000
| 34.052632
| 103
| 0.598161
| false
| 3.738947
| false
| false
| false
|
hckrtst/learnpython
|
py3_essential_training/19 Projects/testimonials/bwCGI.py
|
1
|
3856
|
#!/usr/bin/python3
# bwCGI.py by Bill Weinman <http://bw.org/contact/>
# Copyright (c) 1995-2010 The BearHeart Group, LLC
#
from cgi import FieldStorage
import cgitb
import os
__version__ = '0.3.2'
_cookie_var = 'HTTP_COOKIE'
class bwCGI:
''' handy cgi stuff '''
_header_state = False # True after header has been sent
cgi_cookies = dict()
cgi_headers = dict()
def __init__(self, **kwargs):
self.set_header('Content-type', kwargs.get('content_type', 'text/html'))
if _cookie_var in os.environ:
self.parse_cookies()
def set_header(self, k, v):
'''
set a header
use str for single value, list for multiples values
'''
if k in self.cgi_headers:
if isinstance(self.cgi_headers[k], list): self.cgi_headers[k].append(v)
else: self.cgi_headers[k] = [ self.cgi_headers[k], v ]
else:
self.cgi_headers[k] = str(v)
return v
def get_header(self, k):
return self.cgi_headers.get(k, None)
def send_header(self):
''' send the header(s), only once '''
if self._header_state: return
for k in self.cgi_headers:
value = self.cgi_headers[k]
if isinstance(value, list):
for v in value: print('{}: {}'.format(k, v))
else:
print('{}: {}'.format(k, value))
print()
self._header_state = True
cgitb.enable() # only after the header has been sent
def set_cookie(self, key, value, **kwargs):
''' kwargs can include expires, path, or domain
'''
cookie = '{}={}'.format(str(key), str(value))
if kwargs.keys():
for k in kwargs.keys():
cookie = '{}; {}={}'.format(cookie, k, kwargs[k])
self.set_header('Set-Cookie', cookie)
def parse_cookies(self):
for ck in os.environ[_cookie_var].split(';'):
lhs, rhs = ck.strip().split('=')
self.cgi_cookies[lhs.strip()] = rhs.strip()
def get_cookies(self):
return self.cgi_cookies;
def get_cookie(self, key):
return self.cgi_cookies.get(key, None)
def linkback(self):
''' return a relative URI for use as a linkback to this script '''
for e in ( 'REQUEST_URI', 'SCRIPT_NAME' ):
if e in os.environ:
l = os.environ[e]
break
else: return '*** cannot make linkback ***'
if '?' in l: l = l[0:l.find('?')]
return os.path.basename(l)
def vars(self):
return FieldStorage()
# utility methods
def entity_encode(self, s):
''' convert unicode to XML entities
returns encoded string
'''
outbytes = bytearray()
for c in s:
if ord(c) > 127:
outbytes += bytes('&#{:d};'.format(ord(c)), encoding = 'utf_8')
else: outbytes.append(ord(c))
return str(outbytes, encoding = 'utf_8')
def test():
if _cookie_var not in os.environ:
os.environ[_cookie_var] = 'one=1; two=2; three=3'
cgi = bwCGI(content_type='text/plain')
cgi.set_header('X-bwCGI', __version__)
cgi.set_header('X-number', 42)
cgi.set_cookie('one', 1)
cgi.set_cookie('two', 2)
cgi.set_cookie('three', 3, path='/', expires='31-Dec-2010 23:59:59 GMT', domain='.bw.org')
cgi.set_cookie('five', 5)
cgi.send_header() # should only see one set of headers
cgi.send_header()
cgi.send_header()
print('Hello, CGI')
print('header X-bwCGI:', cgi.get_header('X-bwCGI'))
print('header Eggs:', cgi.get_header('Eggs'))
print('Cookies:')
print(sorted(cgi.get_cookies()))
print('cookie one:', cgi.get_cookie('one'))
print('cookie seven:', cgi.get_cookie('seven'))
if __name__ == '__main__': test()
|
mit
| -3,525,216,791,796,005,000
| 31.133333
| 94
| 0.547977
| false
| 3.518248
| false
| false
| false
|
MMaus/mutils
|
mmnotebooks/bslip.py
|
1
|
31581
|
from libshai import integro
from pylab import (norm, pi, hstack, vstack, array, sign, sin, cos, arctan2,
sqrt, zeros,
figure, subplot, plot, legend, xlabel, ylabel)
from numpy import float64
from copy import deepcopy
import mutils.io as mio
import fastode # local!
class SimulationError(Exception):
pass
class BSLIP(mio.saveable):
""" Class of the bipedal walking SLIP """
def __init__(self, params=None, IC=None):
"""
The BSLIP is a bipedal walking SLIP model.
params (mutils.misc.Struct): parameter of the model
IC (array): initial conditions. [x, y, z, vx, vy, vz]
*NOTE* the system starts in single stance and *must* have
positive vertical velocity ("vy > 0")
"""
super(BSLIP, self).__init__()
self.params = deepcopy(params)
self.state = deepcopy(IC)
self.odd_step = True # leg 1 or leg two on ground?
self.dt = .01
self.odess = fastode.FastODE('bslipss')
self.odeds = fastode.FastODE('bslipds')
self.buf = zeros((2000, self.odess.WIDTH), dtype=float64)
self.t = 0
self.t_td = 0
self.t_to = 0
self.singleStance = True
self.failed = False
self.skip_forces = False
self.ErrMsg = ""
# storage for ode solutions
self.feet1_seq = []
self.feet2_seq = []
self.t_ss_seq = []
self.t_ds_seq = []
self.y_ss_seq = []
self.y_ds_seq = []
self.forces_ss_seq = []
self.forces_ds_seq = []
self.DEBUG = False
if self.params is not None:
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
def init_ode(self):
""" re-initialize the ODE solver """
self.ode = integro.odeDP5(self.dy_Stance, pars=self.params)
self.ode.ODE_RTOL = 1e-9
def restore(self, filename):
"""
update the restore procedure: re-initialize the ODE solver!
:args:
filename (str): the filename where the model information is stored
"""
super(BSLIP, self).restore(filename)
self.ode = integro.odeDP5(self.dy_Stance, pars=self.params)
self.ode.ODE_RTOL = 1e-9
def legfunc1(self, t, y, pars):
"""
Force (scalar) function of leg 1: Here, spring function
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (dict): parameters of the model. Must include
'foot1' (3x float) foot1 position
'lp1' (4x float) parameters of leg 1
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
NOTE: Overwrite this function to get different models.
The signature must not change.
"""
#DEBUG:
#print 'pf1: ', pars['foot1']
l1 = norm(array(y[:3]) - array(pars['foot1']))
return -pars['lp1'][0] * (l1 - pars['lp1'][1])
def legfunc2(self, t, y, pars):
"""
leg function of leg 2: a spring function
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (dict): parameters of the model. Must include
'foot1' (3x float) foot1 position
'lp1' (4x float) parameters of leg 1
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
NOTE: Overwrite this function to get different models.
The signature must not change.
"""
l2 = norm(array(y[:3]) - array(pars['foot2']))
return -pars['lp2'][0] * (l2 - pars['lp2'][1])
def evt_vy0(self, t, states, traj, p):
"""
triggers the vy=0 event
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
:returns:
(bool) vy=0 detected? (both directions)
"""
return sign(states[0][4]) * sign(states[1][4]) != 1
def update_params_ss(self):
"""
Updates the model parameters in the single stance vy=0 event.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_ds(self):
"""
Updates the model parameters in the double stance vy=0 event.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_td(self):
"""
Updates the model parameters at touchdown events.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def update_params_to(self):
"""
Updates the model parameters at takeoff events.
Here, this function does nothing.
Overwrite it in derived models to enable e.g. control actions.
"""
pass
def takeoff_event(self, t, states, traj, pars, legfun):
"""
triggers the take off of a leg
Hint: use a lambda function to adapt the call signature
This function is force-triggered. The parameter format (pars) must
be the same as for legfun (which is called from here!)
*NOTE* you can overwrite this method for derived models. However,
this is not required if the takeoff condition is "zero force".
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
pars (<any>): the leg functions parameters
legfun (function of (t, y, pars) ): the leg force function.
:returns:
(bool) takeoff detected? (force has falling zero crossing)
"""
F0 = legfun(t[0], states[0], pars)
F1 = legfun(t[1], states[1], pars)
return F0 > 0 and F1 <= 0
def touchdown_event(self, t, states, traj, pars):
"""
triggers the touchdown of the leading leg.
Hint: use a lambda function to adapt the call signature
:args:
t (2x float): list of time prior to and after event
states (2x array): list of states prior to and after event
traj (trajectory patch): a trajectory patch (ignored here)
pars (4x float): the leg functions parameters. Format:
[l0, alpha, beta, floorlevel]
pars format:
[l0, alpha, beta, floorlevel]
:returns:
(bool) takeoff detected? (force has falling zero crossing)
"""
def zfoot(state, pars):
foot = state[1] - pars[0] * sin(pars[1])
return foot - pars[3]
return zfoot(states[0], pars) > 0 and zfoot(states[1], pars) <= 0
def touchdown_event_refine(self, t, state, pars):
"""
The touchdown event function for refinement of touchdown detection.
The zero-crossing of the output is defined as instant of the event.
Hint: use a lambda function to adapt the call signature
:args:
t (float): time (ignored)
y (6x float): CoM state [position, velocity]
pars (4x float): the leg functions parameters. Format:
[l0, alpha, beta, floorlevel]
:returns:
f (float): the axial leg force ["f = k * (l - l0)"]
"""
foot = state.squeeze()[1] - pars[0] * sin(pars[1])
return foot - pars[3] # foot - ground level
def dy_Stance(self, t, y, pars, return_force = False):
"""
This is the ode function that is passed to the solver. Internally, it calles:
legfunc1 - force of leg 1 (overwrite for new models)
legfunc2 - force of leg 2 (overwrite for new models)
:args:
t (float): simulation time
y (6x float): CoM state
pars (dict): parameters, will be passed to legfunc1 and legfunc2.
must also include 'foot1' (3x float), 'foot2' (3x float), 'm' (float)
and 'g' (3x float) indicating the feet positions, mass and direction of
gravity, respectively.
return_force (bool, default: False): return [F_leg1, F_leg2] (6x
float) instead of dy/dt.
"""
f1 = max(self.legfunc1(t, y, pars), 0) # only push
l1 = norm(array(y[:3]) - array(pars['foot1']))
f1_vec = (array(y[:3]) - array(pars['foot1'])) / l1 * f1
f2 = max(self.legfunc2(t, y, pars), 0) # only push
l2 = norm(array(y[:3]) - array(pars['foot2']))
f2_vec = (array(y[:3]) - array(pars['foot2'])) / l2 * f2
if return_force:
return hstack([f1_vec, f2_vec])
return hstack([y[3:], (f1_vec + f2_vec) / pars['m'] + pars['g']])
def get_touchdown(self, t, y, params):
"""
Compute the touchdown position of the leg. Overwrite this for different leg parameters!
:args:
t (float): time
y (6x float): state of the CoM
params (4x float): leg parameter: stiffness, l0, alpha, beta
:returns:
[xFoot, yFoot, zFoot] the position of the leg tip
"""
k, l0, alpha, beta = params
xf = y[0] + l0 * cos(alpha) * cos(beta)
yf = y[1] - l0 * sin(alpha)
zf = y[2] - l0 * cos(alpha) * sin(beta)
return array([xf, yf, zf])
def checkSim(self):
"""
Raises an error if the model failed.
Overwrite in derived classes to avoid raised errors.
"""
if self.failed:
raise SimulationError("simulation failed!")
def do_step(self):
"""
Performs a step from the current state, using the current parameters.
The simulation results are also stored in self.[y|t]_[s|d]s_seq,
the states and times of single and double support phases.
*requires*:
self.
- params (dict): model and leg function parameters
- odd_step (bool): whether or not to trigger contact of leg2 (leg1 otherwise)
- state (6x float): the initial state
:args:
(None)
:returns:
t_ss, y_ss, t_ds, y_ds: time and simulation results for single stance and double stance
phases
:raises:
TypeError - invalid IC or parameter
SimulationError - if the simulation fails.
"""
# test initial conditions.
# test wether there is a current state and current parameters
if self.params is None:
raise TypeError("parameters not set")
if self.state is None:
raise TypeError("state (initial condition) not set")
if self.failed:
raise SimulationError("Simulation failed previously.")
#demo_p_reduced = [13100, 12900, 68.5 * pi / 180., -.05] # [k1, k2, alpha, beta]
#demo_p = { 'foot1' : [0, 0, 0],
# 'foot2' : [-1.5, 0, 0],
# 'm' : 80,
# 'g' : [0, -9.81, 0],
# 'lp1' : [13100, 1, 68.5 * pi / 180, -0.05], # leg params: stiffness, l0, alpha, beta
# 'lp2' : [12900, 1, 68.5 * pi / 180, 0.1],
# 'delta_beta' : .05
# }
p = self.params # shortcut
leadingleg = 1. if self.odd_step else 2.
pars = [p['lp1'][0],
p['lp2'][0],
p['lp1'][2],
p['lp2'][2],
p['lp1'][1],
p['lp2'][1],
p['lp1'][3],
p['lp2'][3],
p['m'],
p['g'][1],
p['foot1'][0],
p['foot1'][1],
p['foot1'][2],
p['foot2'][0],
p['foot2'][1],
p['foot2'][2],
leadingleg]
# maximal time for simulation of single stance or double stance (each)
max_T = 1.
# run single stance
self.buf[0, 1:] = array(self.state) #.copy()
N = self.odess.odeOnce(self.buf, self.t + max_T, dt=1e-3, pars = pars)
self.state = self.buf[N,1:].copy()
self.y_ss_seq.append(self.buf[:N+1, 1:].copy())
self.t_ss_seq.append(self.buf[:N+1,0].copy())
# quick sanity check: simulation time not exceeded?
if self.buf[N,0] - self.t >= max_T - 1e-2:
self.failed=True
print "N=", N
raise SimulationError("Maximal simulation time (single stance) reached!")
self.t = self.buf[N,0]
# touchdown detected:
# update foot parameters
# (1) foot2 = foot1
# (2) foot1 = [NEW]
# (3) leading_leg = ~leading_leg
# update leg positions; change trailing leg
y = self.state # shortcut
vx, vz = y[3], y[5]
a_v_com = -arctan2(vz, vx) # correct with our coordinate system
pars[13] = pars[10]
pars[15] = pars[12]
if pars[16] == 1.:
# stance leg is leg 1 -> update leg 2 params
pars[10] = y[0] + cos(pars[3]) * cos(pars[7] + a_v_com) * pars[5]
pars[12] = y[2] - cos(pars[3]) * sin(pars[7] + a_v_com) * pars[5]
#pars[13] = res[N, 1] + cos(pars[3])*cos(pars[7])*pars[5]
#pars[15] = res[N, 3] + cos(pars[3])*sin(pars[7])*pars[5]
pars[16] = 2.;
else:
pars[10] = y[0] + cos(pars[2]) * cos(pars[6] + a_v_com) * pars[4]
pars[12] = y[2] - cos(pars[2]) * sin(pars[6] + a_v_com) * pars[4]
#pars[10] = res[N, 1] + cos(pars[2])*cos(pars[6])*pars[4]
#pars[12] = res[N, 3] + cos(pars[2])*sin(pars[6])*pars[4]
pars[16] = 1.;
self.params['foot1'] = pars[10:13][:]
self.params['foot2'] = pars[13:16][:]
# run double stance
self.buf[0, 1:] = array(self.state) #.copy()
N = self.odeds.odeOnce(self.buf, self.t + max_T, dt=1e-3, pars = pars)
self.state = self.buf[N,1:].copy()
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
self.y_ds_seq.append(self.buf[:N+1, 1:].copy())
self.t_ds_seq.append(self.buf[:N+1,0].copy())
# quick sanity check: simulation time not exceeded?
if self.buf[N,0] - self.t >= max_T - 1e-2:
self.failed=True
raise SimulationError("Maximal simulation time (double stance) reached!")
self.t = self.buf[N,0]
#self.y_ds_seq.append(y2)
#self.t_ds_seq.append(t2)
self.odd_step = not self.odd_step
return self.t_ss_seq[-1], self.y_ss_seq[-1], self.t_ds_seq[-1], self.y_ds_seq[-1]
if self.odd_step:
td_pars = self.params['lp2'][1:] + [ground, ] # set touchdown parameters
td_pars_2 = self.params['lp2'] # another format of touchdown parameters (for get_touchdown)
newfoot = 'foot2' # which foot position to update?
to_evt_fun = self.legfunc1 # force generation for takeoff trigger in double support
to_evt_ds_refine = self.legfunc1 # function for refinement of DS
self.odd_step = False # next step is "even": leg "2" in single stance on ground
else:
td_pars = self.params['lp1'][1:] + [ground, ] # set touchdown parameters
td_pars_2 = self.params['lp1'] # another format of touchdown parameters (for get_touchdown)
newfoot = 'foot1' # which foot position to update?
to_evt_fun = self.legfunc2 # force generation for takeoff trigger in double support
to_evt_ds_refine = self.legfunc2 # function for refinement of DS
self.odd_step = True # next step is "odd": leg "1" in single stance on ground
# stage 1a: simulate until vy=0
self.singleStance = True
self.ode.event = self.evt_vy0
if self.state[4] <= 0:
self.failed = True
self.ErrMsg = ("initial vertical velocity < 0: single " +
"stance apex cannot be reached!")
t0 = self.t
tE = t0 + max_T
t_a, y_a = self.ode(self.state, t0, tE, dt=self.dt)
#d_pars_l2 = self.params['lp2'][1:] + [ground, ]
if self.DEBUG:
print "finished stage 1 (raw)"
if t_a[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. simulation time exceeded - " +
"this often indicates simulation failure")
else:
tt1, yy1 = self.ode.refine(lambda tf, yf: yf[4])
if self.DEBUG:
print "finished stage 1 (fine)"
self.state = yy1
# compute forces
if not self.skip_forces:
forces_ss = [self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t_a, y_a)]
#self.forces_ss_seq.append()
t = [] # dummy, if next step is not executed
y = array([[]])
if not self.failed:
self.update_params_ss()
# stage 1b: simulate until touchdown of leading leg
# touchdown event of leading leg
self.ode.event = lambda t,states,traj,p: self.touchdown_event(t, states, traj, td_pars)
t0 = tt1
tE = t0 + max_T
t, y = self.ode(self.state, t0, tE, dt=self.dt)
if self.DEBUG:
print "finished stage 2 (raw)"
if t[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. sim time exceeded in single stance - no "
+ "touchdown occurred")
else:
#d_pars_l2 = self.params['lp2'][1:] + [ground, ]
tt, yy = self.ode.refine(lambda tf, yf: self.touchdown_event_refine(tf, yf, td_pars))
if self.DEBUG:
print "finished stage 2 (fine)"
self.state = yy
forces_ss.extend([self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t[1:], y[1:, :])])
if not self.skip_forces:
self.forces_ss_seq.append(vstack(forces_ss))
if not self.failed:
# allow application of control law
self.t_td = tt
self.singleStance = False
self.update_params_td()
# accumulate results from stage 1a and stage 1b
if not self.failed:
t = hstack([t_a, t[1:]])
y = vstack([y_a, y[1:, :]])
# stage 2: double support
# compute leg 2 touchdown position
t2_a = []
y2_a = array([[]])
if not self.failed:
xf, yf, zf = self.get_touchdown(tt, yy, td_pars_2)
self.params[newfoot] = [xf, yf, zf]
# stage 2a: simulate until vy=0
self.ode.event = self.evt_vy0
t0 = tt
tE = t0 + max_T
t2_a, y2_a = self.ode(self.state, t0, tE, dt=self.dt)
if t2_a[-1] >= tE:
self.failed = True
self.ErrMsg = ("max. sim time exceeded - no nadir event " +
"detected in double stance")
if self.DEBUG:
print "finished stage 3 (raw)"
else:
tt2, yy2 = self.ode.refine(lambda tf, yf: yf[4])
if self.DEBUG:
print "finished stage 3 (fine)"
self.state = yy2
if not self.skip_forces:
forces_ds = [self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t2_a, y2_a)]
if not self.failed:
# allow application of control law
self.update_params_ds()
# stage 2b: double stance - simulate until takeoff of trailing leg
# define and solve double stance ode
#ode = integro.odeDP5(self.dy_Stance, pars=self.params)
# event is takeoff of leg 1
t2_b = []
y2_b = array([[]])
if not self.failed:
self.ode.event = lambda t,states,traj,p: self.takeoff_event(t,
states, traj, p, legfun=to_evt_fun)
t0 = tt2
tE = t0 + max_T
t2_b, y2_b = self.ode(self.state, t0, tE, dt=self.dt)
if t2_b[-1] >= tE:
self.failed = True
self.ErrMsg = ("sim. time exeeded - takeoff of trailing leg " +
"not detected")
if self.DEBUG:
print "finished stage 4 (raw)"
else:
# refinement: force reaches zero
tt, yy = self.ode.refine(lambda tf, yf: to_evt_ds_refine(tf, yf, self.params))
if self.DEBUG:
print "finished stage 4 (fine)"
self.state = yy
if not self.skip_forces:
forces_ds.extend([self.dy_Stance(xt, xy, self.params, return_force=True) for
xt, xy in zip(t2_b[1:], y2_b[1:, :])])
self.forces_ds_seq.append(vstack(forces_ds))
# allow application of control law
self.t_to = tt
self.singleStance = True
self.update_params_to()
# accumulate results from stage 1a and stage 1b
if not self.failed:
t2 = hstack([t2_a, t2_b[1:]])
y2 = vstack([y2_a, y2_b[1:, :]])
#store simulation results
if not self.failed:
self.y_ss_seq.append(y)
self.y_ds_seq.append(y2)
self.t_ss_seq.append(t)
self.t_ds_seq.append(t2)
self.feet1_seq.append(self.params['foot1'])
self.feet2_seq.append(self.params['foot2'])
if not self.failed:
if len(t2) > 0:
self.t = t2[-1]
if self.failed:
raise SimulationError(self.ErrMsg)
return t, y, t2, y2
class BSLIP_newTD(BSLIP):
""" derived from BSLIP. The get_touchdown function is overwritten
such that the leg placement is w.r.t. walking direction.
*NOTE* This is also a show-case how to use inheritance for modelling here.
"""
def get_touchdown(self, t, y, params):
"""
Compute the touchdown position of the leg w.r.t. CoM velocity
:args:
t (float): time
y (6x float): state of the CoM
params (4x float): leg parameter: stiffness, l0, alpha, beta
:returns:
[xFoot, yFoot, zFoot] the position of the leg tip
"""
k, l0, alpha, beta = params
vx, vz = y[3], y[5]
a_v_com = -arctan2(vz, vx) # correct with our coordinate system
#for debugging
#print "v_com_angle:", a_v_com * 180. / pi
xf = y[0] + l0 * cos(alpha) * cos(beta + a_v_com)
yf = y[1] - l0 * sin(alpha)
zf = y[2] - l0 * cos(alpha) * sin(beta + a_v_com)
#for debugging
#print "foot: %2.3f,%2.3f,%2.3f," % ( xf,yf, zf)
return array([xf, yf, zf])
def ICeuklid_to_ICcircle(IC):
"""
converts from IC in euklidean space to IC in circle parameters (rotational invariant).
The formats are:
IC_euklid: [x, y, z, vx, vy, vz]
IC_circle: [y, vy, |v|, |l|, phiv], where |v| is the magnitude of CoM velocity, |l|
is the distance from leg1 (assumed to be at [0,0,0]) to CoM, and phiv the angle
of the velocity in horizontal plane wrt x-axis
*NOTE* for re-conversion, the leg position is additionally required
:args:
IC (6x float): the initial conditions in euklidean space
:returns:
IC (5x float): the initial conditions in circular coordinates
"""
x,y,z,vx,vy,vz = IC
v = sqrt(vx**2 + vy**2 + vz**2)
l = sqrt(x**2 + y**2 + z**2)
#phiv = arctan2(vz, vx)
#phiv = arctan2(-vz, vx)
phiv = -arctan2(-vz, vx)
#phix = arctan2(-z, -x)
phix = arctan2(z, -x)
# warnings.warn('TODO: fix phi_x (add)')
# print "phix:", phix * 180 / pi
return [y, vy, v, l, phiv + phix]
def ICcircle_to_ICeuklid(IC):
"""
converts from IC in cirle parameters to IC in euklidean space (rotational invariant).
The formats are:
IC_euklid: [x, y, z, vx, vy, vz]
IC_circle: [y, vy, |v|, |l|, phiv], where |v| is the magnitude of CoM velocity, |l|
is the distance from leg1 (assumed to be at [0,0,0]) to CoM, and phiv the angle
of the velocity in horizontal plane wrt x-axis
*NOTE* for re-conversion, the leg position is additionally required, assumed to be [0,0,0]
Further, it is assumed that the axis foot-CoM points in x-axis
:args:
IC (5x float): the initial conditions in circular coordinates
:returns:
IC (6x float): the initial conditions in euklidean space
"""
y, vy, v, l, phiv = IC
z = 0
xsq = l**2 - y**2
if xsq < 0:
raise RuntimeError('Error in initial conditions: y > l!')
x = -sqrt(xsq)
vhsq = v**2 - vy**2
if vhsq < 0:
raise RuntimeError('Error in initial conditions: |vy| > |v|!')
v_horiz = sqrt(vhsq)
vx = v_horiz * cos(phiv)
#vz = v_horiz * sin(phiv)
vz = v_horiz * sin(phiv)
return [x, y, z, vx, vy, vz]
def circ2normal_param(fixParams, P):
"""
converts the set (fixParams, P) to a set of initial conditions for
a BSLIP model.
:args:
fixParams (dict): set of parameters for BSLIP, plus "delta_beta" key
P [4x float]: step parameters k1, k2, alpha, beta (last two: for both legs)
"""
k1, k2, alpha, beta = P
par = deepcopy(fixParams)
par['foot1'] = [0, 0, 0]
par['foot2'] = [-2*par['lp2'][1], 0, 0] # set x to some very negative value
par['lp1'][0] = k1
par['lp2'][0] = k2
par['lp1'][2] = par['lp2'][2] = alpha
par['lp1'][3] = beta
par['lp2'][3] = -beta + par['delta_beta']
return par
def pred_to_p(baseParams, P):
"""
converts the set (fixParams, P) to a set of initial conditions for
a BSLIP model.
:args:
fixParams (dict): set of parameters for BSLIP
P [8x float]: step parameters k1, k2, alpha1, alpha2, beta1, beta2,
l01, l02
"""
k1, k2, a1, a2, b1, b2, l01, l02 = P
par = deepcopy(baseParams)
par['foot1'] = [0, 0, 0]
par['foot2'] = [-2*par['lp2'][1], 0, 0] # set x to some very negative value
par['lp1'][0] = k1
par['lp2'][0] = k2
par['lp1'][1] = l01
par['lp2'][1] = l02
par['lp1'][2] = a1
par['lp2'][2] = a2
par['lp1'][3] = b1
par['lp2'][3] = b2
return par
def new_stridefunction(fixParams):
""" returns a function that maps [IC, P] -> [FS],
in the BSLIP_newTD model
where IC: (reduced) initial conditions
P: reduced parameter vector (4x float)
FS: final state
"""
model = BSLIP_newTD(fixParams,[0,0,0,0,0,0])
model.skip_force = True #speed up simulation a little bit
def stridefun(IC, P):
""" performs a stride of the given model.
:args:
IC: (reduced) initial conditions: [y, vy, v, l, phiv]
P: (reduced) parameter set: [k1, k2, alpha, beta]
:returns:
FS: final state, same format as initial conditions
"""
full_IC = ICcircle_to_ICeuklid(IC)
par = circ2normal_param(fixParams, P)
model.state = full_IC
model.params = par
model.init_ode()
model.do_step()
model.do_step()
fs = model.state.copy() # final state of simulation
fs[:3] -= model.params['foot1'] # set origin to location of foot1 (which is on ground)
return array(ICeuklid_to_ICcircle(fs))
return stridefun
def stridefunction(fixParams):
""" returns a function that maps [IC, P] -> [FS],
in the BSLIP_newTD model
where IC: (reduced) initial conditions
P: reduced parameter vector (8x float): k1, k2, a1, a2, b1, b2, l01,
l02
FS: final state
"""
model = BSLIP_newTD(fixParams,[0,0,0,0,0,0])
model.skip_force = True #speed up simulation a little bit
def stridefun2(IC, P):
""" performs a stride of the given model.
:args:
IC: (reduced) initial conditions: [y, vy, v, l, phiv]
P: (reduced) parameter set: (k1, k2, a1, a2, b1, b2, l01, l02)
:returns:
FS: final state, same format as initial conditions
"""
full_IC = ICcircle_to_ICeuklid(IC)
par = pred_to_p(fixParams, P)
model.state = full_IC
model.params = par
model.init_ode()
model.do_step()
model.do_step()
fs = model.state.copy() # final state of simulation
fs[:3] -= model.params['foot1'] # set origin to location of foot1 (which is on ground)
return array(ICeuklid_to_ICcircle(fs))
return stridefun2
def vis_sim(mdl):
"""
quick hack that visualizes the simulation results from a model
:args:
mdl (BSLIP): model that has run some steps
"""
# visualize
fig = figure(figsize=(18,8))
fig.clf()
subplot(1,2,1)
rep = 0
for ys, yd, f1, f2 in zip(mdl.y_ss_seq, mdl.y_ds_seq, mdl.feet1_seq[1:], mdl.feet2_seq[1:]):
label1 = label2 = label3 = label4 = None
if rep == 0:
label1 = 'single stance'
label2 = 'double stance'
label3 = 'foot leg#1'
label4 = 'foot leg#2'
plot(ys[:, 0], ys[:, 1], 'b-', linewidth=1, label=label1)
plot(yd[:, 0], yd[: ,1], 'g-', linewidth=3, label=label2)
plot(f1[0], f1[1], 'kd', label=label3)
plot(f2[0], f2[1], 'cd', label=label4)
rep += 1
legend(loc='best')
xlabel('horizontal position [m]')
ylabel('vertical position [m]')
subplot(1,2,2)
rep = 0
for ys, yd, f1, f2 in zip(mdl.y_ss_seq, mdl.y_ds_seq, mdl.feet1_seq[1:], mdl.feet2_seq[1:]):
label1 = label2 = label3 = label4 = None
if rep == 0:
label1 = 'single stance'
label2 = 'double stance'
label3 = 'foot leg#1'
label4 = 'foot leg#2'
plot(ys[:, 0], ys[:, 2], 'r-', linewidth=1, label=label1)
plot(yd[:, 0], yd[: ,2], 'm-', linewidth=3, label=label2)
plot(f1[0], f1[2], 'kd', label=label3)
plot(f2[0], f2[2], 'cd', label=label4)
rep += 1
legend(loc='best')
#axis('equal')
xlabel('horizontal position [m]')
ylabel('lateral position [m]')
return fig
# define some example values
demo_p_reduced = [13100, 12900, 68.5 * pi / 180., -.05] # [k1, k2, alpha, beta]
demo_p = { 'foot1' : [0, 0, 0],
'foot2' : [-1.5, 0, 0],
'm' : 80,
'g' : [0, -9.81, 0],
'lp1' : [13100, 1, 68.5 * pi / 180, -0.05], # leg params: stiffness, l0, alpha, beta
'lp2' : [12900, 1, 68.5 * pi / 180, 0.1],
'delta_beta' : .05
}
demo_IC = array([-0.153942, 0.929608, 0, 1.16798, 0.593798, -0.045518])
|
gpl-2.0
| -1,109,068,607,172,693,500
| 34.564189
| 103
| 0.528482
| false
| 3.391066
| false
| false
| false
|
hyphaltip/cndtools
|
util/runGeneid.py
|
1
|
4841
|
#!/usr/bin/env python
# Copyright (c) 2006
# Colin Dewey (University of Wisconsin-Madison)
# cdewey@biostat.wisc.edu
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
import os
from optparse import OptionParser
import FASTA
import GFF
usage = "usage: %prog [options] < fastaInput > gffOutput"
optparser = OptionParser(usage)
optparser.add_option("-b", "--binary", dest="bin",
help="Path to Geneid binary",
default="geneid")
optparser.add_option("-s", "--seglength", type="int", dest="segLength",
help="break large sequences into SEGLENGTH size pieces",
default=100000000)
optparser.add_option("-p", "--paramfile", dest="paramFilename",
help="use PARAMFILE for parameters",
default="/usr/local/apps/geneid/param/human1iso.param",
metavar="PARAMFILE")
(options, args) = optparser.parse_args()
if len(args) != 0:
optparser.error("incorrect number of arguments")
def runGeneid(rec,
param="/usr/local/apps/geneid/param/human1iso.param",
segLength=100000000,
bin="geneid",
options=None):
if not options:
options = []
if "-G" not in options:
options.append("-G")
optString = ' '.join(options)
seqFilename = os.tmpnam()
cmd = "%(bin)s %(optString)s -P %(param)s %(seqFilename)s" % vars()
gffRecs = []
subrec = FASTA.Record()
subrec.title = rec.title
for i in range(((len(rec.sequence) - 1)/ segLength) + 1):
subrec.sequence = rec.sequence[i * segLength: (i + 1) * segLength]
seqFile = file(seqFilename, 'w')
seqFile.write(str(subrec))
seqFile.close()
for line in os.popen(cmd):
if not line or line.startswith('#'):
continue
fields = line.rstrip().split('\t')
cdsRec = GFF.Record(seqname=rec.title,
source="geneid",
feature="CDS",
start=int(fields[3]) + i * segLength,
end=int(fields[4]) + i * segLength,
score=float(fields[5]),
strand=fields[6],
frame=fields[7],
attributes={"gene_id": [fields[8]],
"transcript_id": [fields[8] + ".1"]})
exonType = fields[2]
if exonType in ["First", "Single"]:
startCodonRec = cdsRec.copy()
startCodonRec.feature = "start_codon"
startCodonRec.score = None
startCodonRec.frame = None
if cdsRec.strand == '+':
startCodonRec.end = startCodonRec.start + 2
else:
startCodonRec.start = startCodonRec.end - 2
gffRecs.append(startCodonRec)
exonRec = cdsRec.copy()
exonRec.feature = "exon"
exonRec.frame = None
gffRecs.append(exonRec)
gffRecs.append(cdsRec)
if exonType in ["Terminal", "Single"]:
stopCodonRec = cdsRec.copy()
stopCodonRec.feature = "stop_codon"
stopCodonRec.score = None
stopCodonRec.frame = None
if cdsRec.strand == '+':
stopCodonRec.start = stopCodonRec.end - 2
else:
stopCodonRec.end = stopCodonRec.start + 2
gffRecs.append(stopCodonRec)
try:
os.remove(seqFilename)
except OSError, e:
sys.stderr.write("Could not delete temporary file %s: %s" % \
(seqFilename, str(e)))
return gffRecs
for rec in FASTA.Iterator(sys.stdin):
print >>sys.stderr, rec.title
gffRecs = runGeneid(rec,
bin=options.bin,
param=options.paramFilename,
segLength=options.segLength)
for gffRec in gffRecs:
print gffRec
|
gpl-2.0
| 8,994,272,857,674,351,000
| 35.126866
| 81
| 0.546375
| false
| 3.926196
| false
| false
| false
|
burz/simcom
|
src/parser.py
|
1
|
35470
|
import symbol_table
import syntax_tree
import interpreter
negated_relation = { '=' : '#', '#' : '=', '<' : '>=', '>' : '<=', '<=' : '>', '>=' : '<' }
class Parser_error(Exception):
def __init__(self, error):
self.error = error
def __str__(self):
return "error: {}".format(self.error)
class Parser(object):
def parse_tokens(self, tokens):
self.tokens = tokens
self.symbol_table = symbol_table.Symbol_table()
self.position = 0
self.in_expression = 0
instructions = self.Program()
if not instructions:
raise Parser_error("There is no 'PROGRAM' declared")
return instructions, self.symbol_table
def token(self):
if self.position >= len(self.tokens):
return False
return self.tokens[self.position]
def token_type(self):
if self.position >= len(self.tokens):
return False
return self.tokens[self.position].token_type
def token_line(self):
if self.position >= len(self.tokens):
return False
return self.tokens[self.position].line
def next_token(self):
self.position += 1
def type_check_binary_operation(self, operator, expression_left, expression_right, line):
if not type(expression_right.type_object) in [symbol_table.Integer, symbol_table.Constant]:
raise Parser_error("The expression to the left of the '{}' on line {} is not an INTEGER".
format(operator, line))
if not type(expression_left.type_object) in [symbol_table.Integer, symbol_table.Constant]:
raise Parser_error("The expression to the right of the '{}' on line {} is not an INTEGER".
format(operator, line))
def Program(self):
if not self.token_type() == 'PROGRAM':
return False
line = self.token_line()
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The 'PROGRAM' on line {} is not followed by an identifier".format(line))
if not self.token_type() == ';':
raise Parser_error("PROGRAM '{}' on line {} is not followed by a ';'".format(
identifier.data, identifier.line))
self.next_token()
self.Declarations()
instructions = False
if self.token_type() == 'BEGIN':
begin_line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'BEGIN' on line {} is not followed by any Instructions".format(
begin_line))
if not self.token_type() == 'END':
raise Parser_error("The 'PROGRAM' on line {} is not terminated by an 'END'".format(line))
end_line = self.token_line()
self.next_token()
final_id = self.identifier()
if not final_id:
raise Parser_error("The 'END' on line {} is not followed by a program name to close".format(
end_line))
if not final_id.data == identifier.data:
raise Parser_error(
"The name of the program on line {} does not match the name of it's closing on line {}".
format(identifier.line, final_id.line))
if not self.token_type() == '.':
raise Parser_error("The program closing on line {} is not followed by a '.'".format(end_line))
self.next_token()
return syntax_tree.Syntax_tree(instructions)
def Declarations(self):
self.in_procedure = False
self.forward_declarations = {}
self.call_type_checks = []
self.argument_number_checks = []
while self.ConstDecl() or self.TypeDecl() or self.VarDecl() or self.ProcDecl():
pass
if self.forward_declarations:
error = ''
for name, call in self.forward_declarations:
error += " The function '{}' on line {} has not been defined\n".format(name, call.line)
raise Parser_error(error[7:-1])
for check in self.call_type_checks:
if not type(check.type_object) is symbol_table.Integer:
raise Parser_error("The call to '{}' on line {} must result in an INTEGER".format(
check.definition.name, check.line))
def ConstDecl(self):
if not self.token_type() == 'CONST':
return False
self.next_token()
while True:
identifier = self.identifier()
if not identifier:
return True
if not self.token_type() == '=':
raise Parser_error("The constant declaration of '{}' on line {} is not followed by a '='".
format(identifier.data, identifier.line))
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error(
"The constant declaration of '{}' on line {} is not followed by an Expression".
format(identifier.data, identifier.line))
if not type(expression.type_object) is symbol_table.Integer:
raise Parser_error(
"The expression following the constant declaration of '{}' on line {} is not an INTEGER".
format(identifier.data, identifier.line))
value = interpreter.Interpreter.evaluate_expression(interpreter.Interpreter(), expression)
if not self.token_type() == ';':
raise Parser_error("The constant declaration of '{}' on line {} is not followed by a ';'".
format(identifier.data, identifier.line))
self.next_token()
constant = symbol_table.Constant(self.symbol_table.integer_singleton, value, expression.line)
if not self.symbol_table.insert(identifier.data, constant):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error("The constant delaration of '{}' on line {} ".format(
identifier.data, identifier.line) +
"conflicts with the previous declaration on line {}".format(
previous_definition.line))
return True
def TypeDecl(self):
if not self.token_type() == 'TYPE':
return False
self.next_token()
while True:
identifier = self.identifier()
if not identifier:
return True
if not self.token_type() == '=':
raise Parser_error("The type declaration of '{}' on line {} is not followed by a '='".
format(identifier.data, identifier.line))
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The type declaration of '{}' on line {} is not followed by a Type".
format(identifier.data, identifier.line))
if not self.token_type() == ';':
raise Parser_error("The type declaration of '{}' on line {} is not followed by a ';'".
format(identifier.data, identifier.line))
self.next_token()
if not self.symbol_table.insert(identifier.data, type_object):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error(
"The type delaration of '{}' on line {} conflicts with the previous declaration on line {}".
format(identifier.data, identifier.line, previous_definition.line))
return True
def VarDecl(self):
if not self.token_type() == 'VAR':
return False
self.next_token()
while True:
identifiers = self.IdentifierList()
if not identifiers:
return True
if not self.token_type() == ':':
if len(identifiers) is 1:
raise Parser_error("The variable declaration of '{}' on line {} is not followed by a ':'".
format(identifiers[0].data, identifiers[0].line))
else:
error = "The variable declarations of:\n"
for identifier in identifiers:
error += " '{}' on line '{}'\n".format(identifier.data, identifier.line)
raise Parser_error(error + " are not follwed by a ':'")
self.next_token()
type_object = self.Type()
if not type_object:
if len(identifiers) is 1:
raise Parser_error("The variable declaration of '{}' on line {} is not followed by a Type".
format(identifiers[0].data, identifiers[0].line))
else:
error = "The variable declarations of:\n"
for identifier in identifiers:
error += " '{}' on line '{}'\n".format(identifier.data, identifier.line)
raise Parser_error(error + " are not follwed by a Type")
if not self.token_type() == ';':
if len(identifiers) is 1:
raise Parser_error("The variable declaration of '{}' on line {} is not followed by a ';'".
format(identifiers[0].data, identifiers[0].line))
else:
error = "The variable declarations of:\n"
for identifier in identifiers:
error += " '{}' on line '{}'\n".format(identifier.data, identifier.line)
raise Parser_error(error + " are not follwed by a ';'")
self.next_token()
for identifier in identifiers:
if not self.symbol_table.insert(identifier.data, type_object):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error("The variable declaration of '{}' on line {} ".format(
identifier.data, identifier.line) +
"conflicts with the previous declaration at {}".format(
previous_definition.line))
return True
def ProcDecl(self):
if not self.token_type() == 'PROCEDURE':
return False
self.in_procedure = True
line = self.token_line()
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The 'PROCEDURE' on line {} is not followed by an identifier".format(line))
if not self.token_type() == '(':
raise Parser_error("The procedure declaration of '{}' on line {} is not followed by a '('".
format(identifier.data, line))
par_line = self.token_line()
self.next_token()
self.symbol_table.push_scope()
formals = self.Formals()
if not self.token_type() == ')':
raise Parser_error("The '(' on line {} is not terminated by a ')'".format(par_line))
self.next_token()
return_type_object = False
if self.token_type() == ':':
return_type_line = self.token_line()
self.next_token()
return_type_object = self.Type()
if not return_type_object:
raise Parser_error("The ':' on line {} is not followed by a Type".format(return_type_line))
if not self.token_type() == ';':
raise Parser_error("The procedure declaration of '{}' on line {} is not followed by a ';'".
format(identifier.data, line))
self.next_token()
while self.VarDecl():
pass
instructions = False
if self.token_type() == 'BEGIN':
begin_line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'BEGIN' on line {} is not followed by any Instructions".format(
begin_line))
return_expression = False
return_line = False
if self.token_type() == 'RETURN':
return_line = self.token_line()
self.next_token()
return_expression = self.Expression()
if not return_expression:
raise Parser_error("The 'RETURN' on line {} is not followed by an Expression".format(
return_line))
if not return_expression.type_object is return_type_object:
raise Parser_error(
"The return type defined for '{}' on line {} does not match the type of the".
format(identifier.data, line) +
"return expression on line {}".format(return_line))
elif return_type_object:
raise Parser_error(
"Expected a return statement in the procedure declaration of '{}' on line {}".
format(identifier.data, line))
if not self.token_type() == 'END':
raise Parser_error("The procedure declaration of '{}' on line {} is not followed by an 'END'".
format(identifier.data, line))
end_line = self.token_line()
self.next_token()
closing_name = self.identifier()
if not closing_name:
raise Parser_error("The 'END' on line {} is not followed by a procedure name to close".format(
end_line))
if not closing_name.data == identifier.data:
raise Parser_error("Expected a closing of procedure '{}'; got '{}' on line {}".format(
identifier.data, closing_name.data, closing_name.line))
if not self.token_type() == ';':
raise Parser_error("Expected a ';' following the closing of the procedure '{}' on line {}".
format(closing_name.data, closing_name.line))
self.next_token()
scope = self.symbol_table.pop_scope()
procedure = symbol_table.Procedure(identifier.data, formals, scope, return_type_object,
instructions, return_expression, line)
if not self.symbol_table.insert(identifier.data, procedure):
previous_definition = self.symbol_table.find(identifier.data)
raise Parser_error("The procedure definition of '{}' on line {} ".format(
identifier.data, line) +
"conflicts with the previous declaration on line {}".format(
previous_definition.line))
self.in_procedure = False
if self.forward_declarations:
delete = []
for name, calls in self.forward_declarations.iteritems():
if name == identifier.data:
for call in calls:
call.definition = procedure
call.type_object = return_type_object
delete.append(name)
for name in delete:
del self.forward_declarations[name]
return True
def Type(self):
identifier = self.identifier()
if identifier:
definition = self.symbol_table.find(identifier.data)
if not type(definition) in [symbol_table.Integer, symbol_table.Array, symbol_table.Record]:
raise Parser_error("The identifier '{}' on line {} does not name a type".format(
identifier.data, identifier.line))
return definition
if self.token_type() == 'ARRAY':
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The 'ARRAY' on line {} is not followed by an Expression".format(line))
if not type(expression.type_object) is symbol_table.Integer:
raise Parser_error("The Expression following the 'ARRAY' on line {} must be an INTEGER".
format(expression.line))
size = interpreter.Interpreter.evaluate_expression(interpreter.Interpreter(), expression)
if not self.token_type() == 'OF':
raise Parser_error("The 'ARRAY' on line {} is not followed by a 'OF'".format(line))
of_line = self.token_line()
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The 'OF' on line {} is not followed by a Type".format(of_line))
return symbol_table.Array(type_object, size, line)
if self.token_type() == 'RECORD':
line = self.token_line()
self.next_token()
self.symbol_table.push_scope()
while True:
identifiers = self.IdentifierList()
if not identifiers:
break
if not self.token_type() == ':':
raise Parser_error(
"The IdentifierList following the 'RECORD' on line {} is not followed by a ':'".
format(identifiers[0].line))
col_line = self.token_line()
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The ':' on line {} is not followed by a Type".format(col_line))
if not self.token_type() == ';':
raise Parser_error("The field declarations on line {} are not followed by a ';'".
format(col_line))
self.next_token()
for ident in identifiers:
if not self.symbol_table.insert(ident.data, type_object):
previous_definition = self.symbol_table.find(ident.data)
raise Parser_error(
"The definition of '{}' on line {} conflicts with the previous definition at {}".
format(ident.data, ident.line, previous_definition.line))
if not self.token_type() == 'END':
raise Parser_error(
"The definition of the 'RECORD' on line {} was not terminated by an 'END'".
format(line))
self.next_token()
scope = self.symbol_table.pop_scope()
return symbol_table.Record(scope, line)
return False
def Expression(self):
self.in_expression += 1
if self.token_type() == '+':
line = self.token_line()
self.next_token()
term = self.Term()
if not term:
raise Parser_error("The '+' on line {} is not followed by a Term".format(line))
elif self.token_type() == '-':
line = self.token_line()
self.next_token()
term = self.Term()
if not term:
raise Parser_error("The '-' on line {} is not followed by a Term".format(line))
constant = symbol_table.Constant(self.symbol_table.integer_singleton, 0, line)
number = syntax_tree.Number(constant, constant.line)
expression = syntax_tree.Expression(number, constant.type_object, number.line)
self.type_check_binary_operation('-', expression, term, line)
binary = syntax_tree.Binary('-', expression, term, line)
term = syntax_tree.Expression(binary, constant.type_object, binary.line)
else:
line = self.token_line
term = self.Term()
if not term:
self.in_expression -= 1
return False
while self.token_type() in ['+', '-']:
op_line = self.token_line()
operator = self.token_type()
self.next_token()
new_term = self.Term()
if not new_term:
raise Parser_error("The '{}' on line {} is not followed by a Term".format(operator, op_line))
self.type_check_binary_operation(operator, term, new_term, op_line)
if type(term.child) is syntax_tree.Number and type(new_term.child) is syntax_tree.Number:
interp = interpreter.Interpreter()
term_result = interp.evaluate_expression(term)
new_term_result = interp.evaluate_expression(new_term)
if operator == '+':
result = term_result + new_term_result
else: # -
result = term_result - new_term_result
constant = symbol_table.Constant(self.symbol_table.integer_singleton, result, op_line)
child = syntax_tree.Number(constant, constant.line)
else:
child = syntax_tree.Binary(operator, term, new_term, op_line)
term = syntax_tree.Expression(child, self.symbol_table.integer_singleton, child.line)
self.in_expression -= 1
return term
def Term(self):
factor = self.Factor()
if not factor:
return False
while self.token_type() in ['*', 'DIV', 'MOD']:
line = self.token_line()
operator = self.token_type()
self.next_token()
new_factor = self.Factor()
if not new_factor:
raise Parser_error("The '{}' on line {} is not followed by a Factor".format(operator, line))
self.type_check_binary_operation(operator, factor, new_factor, line)
if type(factor.child) is syntax_tree.Number and type(new_factor.child) is syntax_tree.Number:
interp = interpreter.Interpreter()
factor_result = interp.evaluate_expression(factor)
new_factor_result = interp.evaluate_expression(new_factor)
if operator == '*':
result = factor_result * new_factor_result
elif operator == 'DIV':
if new_factor_result is 0:
raise Parser_error("The right side of the 'DIV' on line {} evaluated to 0".format(line))
result = factor_result / new_factor_result
else: # MOD
if new_factor_result is 0:
raise Parser_error("The right side of the 'MOD' on line {} evaluated to 0".format(line))
result = factor_result % new_factor_result
constant = symbol_table.Constant(self.symbol_table.integer_singleton, result, line)
child = syntax_tree.Number(constant, constant.line)
else:
child = syntax_tree.Binary(operator, factor, new_factor, line)
factor = syntax_tree.Expression(child, self.symbol_table.integer_singleton, child.line)
return factor
def Factor(self):
integer = self.integer()
if integer:
return integer
designator = self.Designator()
if designator:
if type(designator) is syntax_tree.Number:
return syntax_tree.Expression(designator, self.symbol_table.integer_singleton,
designator.line)
return syntax_tree.Expression(designator, designator.type_object, designator.line)
if self.token_type() == '(':
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The '(' on line {} is not followed by an Expression".format(line))
if not self.token_type() == ')':
raise Parser_error("The '(' on line {} is not terminated by a ')'".format(line))
self.next_token()
return expression
call = self.Call()
if call:
return syntax_tree.Expression(call, call.type_object, call.line)
return False
def Instructions(self):
instruction = self.Instruction()
if not instruction:
return False
instructions = [instruction]
while self.token_type() == ';':
line = self.token_line()
self.next_token()
instruction = self.Instruction()
if not instruction:
raise Parser_error("The ';' on line {} is not followed by any instructions".format(line))
instructions.append(instruction)
return syntax_tree.Instructions(instructions, instructions[0].line)
def Instruction(self):
instruction = (self.Assign() or self.If() or self.Repeat() or self.While() or self.Read() or
self.Write() or self.Call())
if not instruction:
return False
return syntax_tree.Instruction(instruction, instruction.line)
def Assign(self):
starting_position = self.position
location = self.Designator()
if not location:
return False
if not self.token_type() == ':=':
self.position = starting_position
return False
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The ':=' on line {} is not followed by an Expression".format(line))
if not type(location.type_object) is type(expression.type_object):
raise Parser_error("The types of the location and expression for ':=' on line {} do not match".
format(line))
return syntax_tree.Assign(location, expression, line)
def If(self):
if not self.token_type() == 'IF':
return False
line = self.token_line()
self.next_token()
condition = self.Condition()
if not condition:
raise Parser_error("The 'IF' on line {} is not followed by a Condition".format(line))
if not self.token_type() == 'THEN':
raise Parser_error("The 'IF' on line {} is not followed by a 'THEN'".format(line))
then_line = self.token_line()
self.next_token()
instructions_true = self.Instructions()
if not instructions_true:
raise Parser_error("The 'THEN' on line {} is not followed by any Instructions".format(
then_line))
instructions_false = False
if self.token_type() == 'ELSE':
else_line = self.token_line()
self.next_token()
instructions_false = self.Instructions()
if not instructions_false:
raise Parser_error("The 'ELSE' on line {} is not followed by any Instructions".format(
else_line))
if not self.token_type() == 'END':
raise Parser_exception("The 'IF' on line {} is not followed by an 'END'".format(line))
self.next_token()
return syntax_tree.If(condition, instructions_true, instructions_false, line)
def Repeat(self):
if not self.token_type() == 'REPEAT':
return False
line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'REPEAT' on line {} is not followed by any Instructions".format(line))
if not self.token_type() == 'UNTIL':
raise Parser_error("The 'REPEAT' on line {} is not followed by an 'UNTIL'".format(line))
until_line = self.token_line()
self.next_token()
condition = self.Condition()
if not condition:
raise Parser_error("The 'UNTIL' on line {} is not followed by a Condition".format(until_line))
if not self.token_type() == 'END':
raise Parser_error("The 'REPEAT' on line {} is not terminated by an 'END'".format(line))
self.next_token()
return syntax_tree.Repeat(condition, instructions, line)
def While(self):
if not self.token_type() == 'WHILE':
return False
line = self.token_line()
self.next_token()
condition = self.Condition()
if not condition:
raise Parser_error("The 'WHILE' on line {} is not followed by a Condition".format(line))
if not self.token_type() == 'DO':
raise Parser_error("The 'WHILE' on line {} is not followed by a 'DO'".format(line))
do_line = self.token_line()
self.next_token()
instructions = self.Instructions()
if not instructions:
raise Parser_error("The 'DO' on line {} is not followed by any Instructions".format(do_line))
if not self.token_type() == 'END':
raise Parser_error("The 'WHILE' on line {} is not teminated by an 'END'".format(line))
self.next_token()
repeat_relation = negated_relation[condition.relation]
repeat_condition = syntax_tree.Condition(repeat_relation, condition.expression_left,
condition.expression_right, condition.line)
repeat = syntax_tree.Repeat(repeat_condition, instructions, repeat_condition.line)
instruction = syntax_tree.Instruction(repeat, repeat.line)
instructions = syntax_tree.Instructions([instruction], instruction.line)
return syntax_tree.If(condition, instructions, False, line)
def Condition(self):
starting_position = self.position
expression_left = self.Expression()
if not expression_left:
return False
relation = self.token()
if not relation.data in ['=', '#', '<', '>', '<=', '>=']:
self.position = starting_position
return False
self.next_token()
expression_right = self.Expression()
if not expression_right:
raise Parser_error("There is no Expression following the '{}' on line {}".format(
operator.data, operator.line))
self.type_check_binary_operation(relation.data, expression_left, expression_right, relation.line)
return syntax_tree.Condition(relation.data, expression_left, expression_right, relation.line)
def Write(self):
if not self.token_type() == 'WRITE':
return False
line = self.token_line()
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The 'WRITE' on line {} is not followed by an Expression".format(line))
if not type(expression.type_object) is symbol_table.Integer:
raise Parser_error("The Expression on line {} must result in an INTEGER".format(
expression.line))
return syntax_tree.Write(expression, line)
def Read(self):
if not self.token_type() == 'READ':
return False
line = self.token_line()
self.next_token()
designator = self.Designator()
if not designator:
raise Parser_error("The 'READ' on line {} is not followed by a Designator".format(line))
return syntax_tree.Read(designator, line)
def Call(self):
starting_position = self.position
identifier = self.identifier()
if not identifier:
return False
definition = self.symbol_table.find(identifier.data)
if not self.token_type() == '(':
self.position = starting_position
return False
forward = False
if not definition:
if not self.in_procedure:
raise Parser_error("The Procedure '{}' on line {} has not been defined".format(
identifier.data, identifier.line))
forward = True
elif not type(definition) is symbol_table.Procedure:
raise Parser_error("'{}' on line {} is not a Procedure".format(
identifier.data, identifier.line))
line = self.token_line()
self.next_token()
actuals = self.Actuals()
if forward:
if self.in_expression:
return_type = self.symbol_table.integer_singleton
else:
return_type = False
else:
return_type = definition.type_object
call = syntax_tree.Call(definition, actuals, return_type, identifier.line)
if not forward:
length = len(actuals) if actuals else 0
definition_length = len(definition.formals) if definition.formals else 0
if length != definition_length:
raise Parser_error(
"The call to '{}' on line {} does not have the correct number of arguments ({} for {})".
format(identifier.data, identifier.line, length, definition_length))
else:
self.argument_number_checks.append(call)
if not identifier.data in self.forward_declarations:
self.forward_declarations[identifier.data] = [call]
else:
self.forward_declarations[identifier.data].append(call)
if self.in_expression:
self.call_type_checks.append(call)
if not self.token_type() == ')':
raise Parser_error("The '(' on line {} is not terminated by a ')'".format(line))
self.next_token()
return call
def Designator(self):
starting_position = self.position
identifier = self.identifier()
if not identifier:
return False
if self.token_type() == '(':
self.position = starting_position
return False
table_entry = self.symbol_table.find(identifier.data)
if not table_entry:
self.position = starting_position
return False
if type(table_entry) is symbol_table.Constant:
return syntax_tree.Number(table_entry, identifier.line)
selectors = self.Selector()
variable = syntax_tree.Variable(identifier.data, table_entry, identifier.line)
location = syntax_tree.Location(variable, table_entry, variable.line)
for selector in selectors:
if type(location.child) == syntax_tree.Variable:
definition = location.child.table_entry
else:
definition = location.child.type_object
if type(selector) is syntax_tree.Expression:
if not type(definition) is symbol_table.Array:
raise Parser_error("The index on line {} does not follow an Array".format(selector.line))
index = syntax_tree.Index(location, selector, definition.type_object, selector.line)
location = syntax_tree.Location(index, index.type_object, index.line)
else:
if not type(definition) is symbol_table.Record:
raise Parser_error("The field '{}' on line {} does not follow a Record".format(
selector.data, selector.line))
table_entry = definition.scope.find(selector.data)
if not table_entry:
raise Parser_error("The field '{}' on line {} has not been defined".format(
selector.data, selector.line))
variable = syntax_tree.Variable(selector.data, table_entry, selector.line)
field = syntax_tree.Field(location, variable, table_entry, variable.line)
location = syntax_tree.Location(field, table_entry, field.line)
return location
def Formals(self):
formal = self.Formal()
if not formal:
return False
formals = []
formals += formal
while self.token_type() == ';':
line = self.token_line()
self.next_token()
formal = self.Formal()
if not formal:
raise Parser_error("The ';' on line {} is not followed by a Formal".format(line))
formals += formal
return formals
def Formal(self):
line = self.token_line()
identifiers = self.IdentifierList()
if not identifiers:
return False
if not self.token_type() == ':':
raise Parser_error("The IdentifierList on line {} is not followed by a ':'".format(line))
line = self.token_line()
self.next_token()
type_object = self.Type()
if not type_object:
raise Parser_error("The ':' on line {} is not followed by a Type".format(line))
definitions = []
for identifier in identifiers:
self.symbol_table.insert(identifier.data, type_object)
definitions.append(identifier.data)
return definitions
def Actuals(self):
return self.ExpressionList()
def Selector(self):
selectors = []
while True:
if self.token_type() == '[':
line = self.token_line()
self.next_token()
expr_list = self.ExpressionList()
if not expr_list:
raise Parser_error("The '[' on line {} is not followed by an ExpressionList".format(line))
if not self.token_type() == ']':
raise Parser_error("The '[' on line {} is not closed by a ']'".format(line))
self.next_token()
selectors += expr_list
elif self.token_type() == '.':
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The '.' on line {} is not followed by an identifier".format(
self.last_line()))
selectors.append(identifier)
else:
break
return selectors
def IdentifierList(self):
identifier = self.identifier()
if not identifier:
return False
identifiers = [identifier]
while self.token_type() == ',':
self.next_token()
identifier = self.identifier()
if not identifier:
raise Parser_error("The ',' on line {} is not followed by an identifier".format(
self.last_line()))
identifiers.append(identifier)
return identifiers
def ExpressionList(self):
expression = self.Expression()
if not expression:
return False
expressions = [expression]
while self.token_type() == ',':
self.next_token()
expression = self.Expression()
if not expression:
raise Parser_error("The ',' on line {} is not followed by an expression".format(
self.last_line()))
expressions.append(expression)
return expressions
def identifier(self):
if not self.token_type() == 'identifier':
return False
identifier = self.token()
self.next_token()
return identifier
def integer(self):
if not self.token_type() == 'integer':
return False
constant = symbol_table.Constant(self.symbol_table.integer_singleton, int(self.token().data),
self.token_line())
number = syntax_tree.Number(constant, constant.line)
self.next_token()
return syntax_tree.Expression(number, constant.type_object, number.line)
|
mit
| 1,540,278,318,245,075,000
| 43.785354
| 101
| 0.617931
| false
| 4.133069
| false
| false
| false
|
schleichdi2/OpenNfr_E2_Gui-6.0
|
lib/python/Plugins/Extensions/MediaPortal/resources/update.py
|
1
|
7746
|
# -*- coding: utf-8 -*-
###############################################################################################
#
# MediaPortal for Dreambox OS
#
# Coded by MediaPortal Team (c) 2013-2017
#
# This plugin is open source but it is NOT free software.
#
# This plugin may only be distributed to and executed on hardware which
# is licensed by Dream Property GmbH. This includes commercial distribution.
# In other words:
# It's NOT allowed to distribute any parts of this plugin or its source code in ANY way
# to hardware which is NOT licensed by Dream Property GmbH.
# It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way
# on hardware which is NOT licensed by Dream Property GmbH.
#
# This applies to the source code as a whole as well as to parts of it, unless
# explicitely stated otherwise.
#
# If you want to use or modify the code or parts of it,
# you have to keep OUR license and inform us about the modifications, but it may NOT be
# commercially distributed other than under the conditions noted above.
#
# As an exception regarding execution on hardware, you are permitted to execute this plugin on VU+ hardware
# which is licensed by satco europe GmbH, if the VTi image is used on that hardware.
#
# As an exception regarding modifcations, you are NOT permitted to remove
# any copy protections implemented in this plugin or change them for means of disabling
# or working around the copy protections, unless the change has been explicitly permitted
# by the original authors. Also decompiling and modification of the closed source
# parts is NOT permitted.
#
# Advertising with this plugin is NOT allowed.
# For other uses, permission from the authors is necessary.
#
###############################################################################################
from Plugins.Extensions.MediaPortal.plugin import _
from imports import *
import mp_globals
from messageboxext import MessageBoxExt
from twagenthelper import twAgentGetPage
import random
gLogFile = None
class checkupdate:
def __init__(self, session):
self.session = session
def checkforupdate(self):
update_agent = getUserAgent()
update_url = getUpdateUrl()
twAgentGetPage(update_url, agent=update_agent, timeout=60).addCallback(self.gotUpdateInfo).addErrback(self.gotError)
def gotError(self, error=""):
printl(error,self,"E")
return
def gotUpdateInfo(self, html):
if re.search(".*?<html", html):
return
self.html = html
tmp_infolines = html.splitlines()
remoteversion_ipk = re.sub('\D', '', tmp_infolines[0])
remoteversion_deb = re.sub('\D', '', tmp_infolines[2])
try:
mirrors = self.updateurl = tmp_infolines[5].split(';')
mirror_rand = random.choice(mirrors)
except:
mirror_rand = None
if mp_globals.isDreamOS:
self.updateurl = tmp_infolines[3]
remoteversion = remoteversion_deb
else:
self.updateurl = tmp_infolines[1]
remoteversion = remoteversion_ipk
if mirror_rand:
mirror_replace = re.search('(sourceforge.net.*)', self.updateurl)
if mirror_replace:
self.updateurl = 'http://' + mirror_rand + '.dl.' + mirror_replace.group(1)
if int(config.mediaportal.version.value) < int(remoteversion):
if mirror_rand:
printl('Random update mirror selected: %s' % mirror_rand,self,'A')
printl('Found update url: %s' % self.updateurl,self,'A')
if mirror_replace:
printl('Generated update url: %s' % self.updateurl,self,'A')
self.session.openWithCallback(self.startUpdate,MessageBoxExt,_("An update is available for the MediaPortal Plugin!\nDo you want to download and install it now?"), MessageBoxExt.TYPE_YESNO, timeout=15, default=False)
return
else:
return
def startUpdate(self,answer):
if answer is True:
self.session.open(MPUpdateScreen,self.updateurl,self.html)
else:
return
class MPUpdateScreen(Screen):
def __init__(self, session, updateurl, html):
self.session = session
self.updateurl = updateurl
self.html = html
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/MP_Update.xml" % (self.skin_path, mp_globals.currentskin)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/MP_Update.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
self.ml = MenuList([])
self['mplog'] = self.ml
self.list = []
Screen.__init__(self, session)
self['title'] = Label("MediaPortal Update")
self.setTitle("MediaPortal Update")
self.onLayoutFinish.append(self.__onLayoutFinished)
def __onLayoutFinished(self):
height = self['mplog'].l.getItemSize().height()
try:
self.ml.l.setFont(gFont(mp_globals.font, height - 2 * mp_globals.sizefactor))
except:
pass
self.list.append(_("Starting update, please wait..."))
self.ml.setList(self.list)
self.ml.moveToIndex(len(self.list)-1)
self.ml.selectionEnabled(False)
self.startPluginUpdate()
def startPluginUpdate(self):
self.container=eConsoleAppContainer()
if mp_globals.isDreamOS:
self.container.appClosed_conn = self.container.appClosed.connect(self.finishedPluginUpdate)
self.container.stdoutAvail_conn = self.container.stdoutAvail.connect(self.mplog)
f = open("/etc/apt/apt.conf", "r")
arch = ''.join(f.readlines()).strip()
arch = re.findall('"(.*?)";', arch, re.S)[0]
tmp_infolines = self.html.splitlines()
files = ''
for i in range(0, len(tmp_infolines)):
if re.match(".*?/update/",tmp_infolines[i], re.S):
file = "wget -q -O /tmp/mediaportal/update/%s %s" % (tmp_infolines[i].split('/update/')[-1].replace('&&ARCH&&', arch), tmp_infolines[i].replace('&&ARCH&&', arch))
files = files + ' && ' + file
download = files.strip(' && ')
self.container.execute("mkdir -p /tmp/mediaportal/update && %s && cd /tmp/mediaportal/update/ && dpkg-scanpackages . | gzip -1c > Packages.gz && echo deb file:/tmp/mediaportal/update ./ > /etc/apt/sources.list.d/mediaportal.list && apt-get update && apt-get install -y --force-yes enigma2-plugin-extensions-mediaportal && rm -r /tmp/mediaportal/update && rm /etc/apt/sources.list.d/mediaportal.list" % download)
else:
self.container.appClosed.append(self.finishedPluginUpdate)
self.container.stdoutAvail.append(self.mplog)
self.container.execute("opkg update ; opkg install " + str(self.updateurl))
def finishedPluginUpdate(self,retval):
self.container.kill()
if retval == 0:
config.mediaportal.filter.value = "ALL"
config.mediaportal.filter.save()
configfile.save()
self.session.openWithCallback(self.restartGUI, MessageBoxExt, _("MediaPortal successfully updated!\nDo you want to restart the Enigma2 GUI now?"), MessageBoxExt.TYPE_YESNO)
else:
self.session.openWithCallback(self.returnGUI, MessageBoxExt, _("MediaPortal update failed! Check the update log carefully!"), MessageBoxExt.TYPE_ERROR)
def restartGUI(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 3)
self.close()
def returnGUI(self, answer):
self.close()
def mplog(self,str):
self.list.append(str)
self.ml.setList(self.list)
self.ml.moveToIndex(len(self.list)-1)
self.ml.selectionEnabled(False)
self.writeToLog(str)
def writeToLog(self, log):
global gLogFile
if gLogFile is None:
self.openLogFile()
now = datetime.datetime.now()
gLogFile.write(str(log) + "\n")
gLogFile.flush()
def openLogFile(self):
global gLogFile
baseDir = "/tmp"
logDir = baseDir + "/mediaportal"
now = datetime.datetime.now()
try:
os.makedirs(baseDir)
except OSError, e:
pass
try:
os.makedirs(logDir)
except OSError, e:
pass
gLogFile = open(logDir + "/MediaPortal_update_%04d%02d%02d_%02d%02d.log" % (now.year, now.month, now.day, now.hour, now.minute, ), "w")
|
gpl-2.0
| 1,712,898,901,760,975,400
| 35.023256
| 414
| 0.695894
| false
| 3.201323
| false
| false
| false
|
census-instrumentation/opencensus-python
|
opencensus/common/http_handler/__init__.py
|
1
|
1391
|
# Copyright 2018, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
# For Python 3.0 and later
from urllib.request import urlopen, Request
from urllib.error import HTTPError, URLError
except ImportError:
# Fall back to Python 2's urllib2
from urllib2 import urlopen, Request
from urllib2 import HTTPError, URLError
import socket
_REQUEST_TIMEOUT = 2 # in secs
def get_request(request_url, request_headers=dict()):
"""Execute http get request on given request_url with optional headers
"""
request = Request(request_url)
for key, val in request_headers.items():
request.add_header(key, val)
try:
response = urlopen(request, timeout=_REQUEST_TIMEOUT)
response_content = response.read()
except (HTTPError, URLError, socket.timeout):
response_content = None
return response_content
|
apache-2.0
| -4,971,914,294,319,869,000
| 31.348837
| 74
| 0.723221
| false
| 4.202417
| false
| false
| false
|
unicef/un-partner-portal
|
backend/unpp_api/apps/partner/migrations/0053_auto_20180115_0834.py
|
1
|
2538
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-01-15 08:34
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('common', '0007_auto_20171031_0715'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('partner', '0052_merge_20180115_0938'),
]
operations = [
migrations.CreateModel(
name='PartnerCapacityAssessment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('assessment_type', models.TextField(blank=True, choices=[('HAC', 'HACT micro-assessment'), ('OCH', 'OCHA CBPF (Country-Based Pooled Fund) capacity assessment'), ('UNH', 'UNHCR procurement pre-qualification assessment '), ('DFI', 'DFID pre-grant due diligence assessment'), ('EUE', 'EU/ECHO Framework Partnership Agreement (FPA) assessment'), ('Oth', 'Other formal capacity assessment')], null=True)),
('report_url', models.URLField(blank=True, null=True)),
('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='capacity_assessments', to=settings.AUTH_USER_MODEL)),
('partner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='capacity_assessments', to='partner.Partner')),
('report_file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='partner_capacity_assessments', to='common.CommonFile')),
],
options={
'ordering': ['id'],
},
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='assessment_report',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='assessments',
),
migrations.RemoveField(
model_name='partnerauditassessment',
name='capacity_assessment',
),
]
|
apache-2.0
| 7,876,868,366,261,741,000
| 50.795918
| 417
| 0.646178
| false
| 4.080386
| false
| false
| false
|
ClydeSpace-GroundStation/GroundStation
|
Utilities/Supporting_Libraries/gr-bruninga-master/python/ax25_fsk_mod.py
|
1
|
5030
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# TODO: Pickle -> JSON
import pickle
import numpy as np
import Queue
import time
from gnuradio import gr
from bruninga import packet
import pmt
class ax25_fsk_mod(gr.sync_block):
"""
A continuous phase FSK modulator for AX25 packets.
When given an AX25 Packet, this block converts it to an audio stream with
the given configured parameters. Two in question:
- Flag Count: How many flags to put before and after the packet
- Preamble Len (ms): How long to transmit a clock signal (01010101)
The default values for the mark, space, and baud rate are configurable to
allow for further experimentation. v.23 modems, for example, use 1300/2100
tones to generate 1200 baud signals.
"""
def __init__(self, samp_rate, preamble_len_ms, flag_count, mark_freq,
space_freq, baud_rate):
gr.sync_block.__init__(self,
name="ax25_fsk_mod",
in_sig=None,
out_sig=[np.float32])
self.samp_rate = samp_rate
self.flag_count = flag_count
self.mark_freq = mark_freq
self.space_freq = space_freq
self.baud_rate = baud_rate
self.preamble_len_bits = int((preamble_len_ms / 1000.0) * baud_rate / 2)
self.sps = int(1.0 * self.samp_rate / self.baud_rate)
self.outbox = Queue.Queue()
self.output_buffer = None
self.opb_idx = 0
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
def handle_msg(self, msg_pmt):
msg = pmt.to_python(msg_pmt)
if not (isinstance(msg, tuple) and len(msg) == 2):
print 'Invalid Message: Expected tuple of len 2'
print 'Dropping msg of type %s' % type(msg)
return
try:
msg = pickle.loads(msg[1])
except StandardError as e:
print 'Bad format: Expected pickled AX25Packet'
print str(e)
return
# TODO: Take list of AX25 packets VVVV
if not isinstance(msg, packet.AX25Packet):
print 'Expected AX25Packet, got %s' % type(msg)
return
self.outbox.put(msg)
def ax25_to_fsk(self, msg):
# TODO: Allow multiple messages to be strung together with
# one preamble
# Generate message
msg_bits = [0, 1] * self.preamble_len_bits
msg_bits += msg.hdlc_wrap(self.flag_count, self.flag_count)
# Calculate phase increments
mark_pinc = 2 * np.pi * self.mark_freq / self.samp_rate
space_pinc = 2 * np.pi * self.space_freq / self.samp_rate
phase = 0
opb = np.empty(len(msg_bits) * self.sps)
for i, bit in enumerate(msg_bits):
pinc = (mark_pinc if bit is 1 else space_pinc)
phase += pinc
tmp = np.arange(self.sps) * pinc + phase
opb[i*self.sps:(i+1)*self.sps] = np.sin(tmp)
phase = tmp[-1]
return opb
def work(self, input_items, output_items):
out = output_items[0]
idx = 0
# TODO: Transmit cooldown period
if self.output_buffer is None:
if self.outbox.empty():
# TODO: This is a bit of a hack to work around the ALSA Audio
# Sink being unhappy with underflows
out[0:] = 0
return len(out)
self.output_buffer = self.ax25_to_fsk(self.outbox.get())
self.opb_idx = 0
# How many samples do we have left for each buffer?
opb_left = len(self.output_buffer) - self.opb_idx
out_left = len(out) - idx
# Take the minimum, and copy them to out
cnt = min(opb_left, out_left)
out[idx:idx+cnt] = self.output_buffer[self.opb_idx:self.opb_idx+cnt]
# Update counters
idx += cnt
self.opb_idx += cnt
# If we run out of samples in the output buffer, we're done
if self.opb_idx >= len(self.output_buffer):
self.output_buffer = None
# Fill the remaining buffer with zeros. Hack to help the ALSA audio sink
# be happy.
if idx < len(out):
out[idx:] = 0
return len(out)
|
mit
| 7,835,768,773,258,997,000
| 31.662338
| 80
| 0.603777
| false
| 3.618705
| false
| false
| false
|
ollitapa/MMP-TracerApi
|
Tests/MeshTests/meshConeTest.py
|
1
|
1598
|
#
# Copyright 2015 Olli Tapaninen, VTT Technical Research Center of Finland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from meshpy.geometry import generate_extrusion
from matplotlib import pylab as plt
from mpl_toolkits.mplot3d import Axes3D
from meshpy.tet import MeshInfo, build
rz = [(0, 0), (1, 0), (1.5, 0.5), (2, 1), (0, 1)]
base = []
for theta in np.linspace(0, 2 * np.pi, 40):
x = np.sin(theta)
y = np.cos(theta)
base.extend([(x, y)])
(points, facets,
facet_holestarts, markers) = generate_extrusion(rz_points=rz, base_shape=base)
p_array = np.array(points)
xs = p_array[:, 0]
ys = p_array[:, 1]
zs = p_array[:, 2]
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xs, ys, zs)
for f in facets:
plt.plot(xs[list(f[0])], ys[list(f[0])], zs[list(f[0])])
plt.show()
for i_facet, poly_list in enumerate(facets):
print(poly_list)
mesh_info = MeshInfo()
mesh_info.set_points(points)
mesh_info.set_facets_ex(facets, facet_holestarts, markers)
mesh = build(mesh_info)
print(mesh.elements)
mesh.write_vtk('test.vtk')
|
apache-2.0
| 9,024,554,759,309,677,000
| 25.633333
| 79
| 0.702128
| false
| 2.94291
| false
| false
| false
|
bjoernricks/python-quilt
|
quilt/cli/series.py
|
1
|
1199
|
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 - 2017 Björn Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of python-quilt for details.
from quilt.cli.meta import Command
from quilt.cli.parser import OptionArgument
from quilt.db import Db, Series
class SeriesCommand(Command):
name = "series"
help = "Print the names of all patches in the series file."
v = OptionArgument(action="store_true", help="""indicate applied (+)
and topmost (=) patches""")
def run(self, args):
series = Series(self.get_patches_dir())
if args.v:
applied = Db(self.get_pc_dir()).patches()
for patch in applied[:-1]:
print("+ " + patch.get_name())
if applied:
print("= " + applied[-1].get_name())
patches = series.patches_after(applied[-1])
else:
patches = series.patches()
for patch in patches:
print(" " + patch.get_name())
else:
for patch in series.patches():
print(patch.get_name())
|
mit
| 6,167,082,070,145,650,000
| 31.378378
| 72
| 0.582638
| false
| 3.803175
| false
| false
| false
|
w0pke/oppgavegenerator
|
oppgavegen/generation_folder/generation.py
|
1
|
16870
|
"""
Handles task generation from templates.
"""
from random import uniform, shuffle, choice
import json
from sympy import sympify
from sympy.parsing.sympy_parser import (parse_expr, standard_transformations,
implicit_multiplication_application, convert_xor)
from oppgavegen.parsing.latex_translator import latex_to_sympy
from oppgavegen.models import Level
from oppgavegen.generation_folder.multifill import multifill
from oppgavegen.generation_folder.fill_in import fill_in_the_blanks
from oppgavegen.parsing.parenthesis_removal import *
from oppgavegen.utility.utility import *
from oppgavegen.generation_folder.calculate_parse_solution import parse_solution
from oppgavegen.generation_folder.get_question import get_question, get_level_question
@Debugger
def generate_task(user, template_extra, desired_type=''):
"""Makes a valid math question at the correct rating from a template in the database.
:param user: The user requesting a template
:param template_extra: (optional) A id used for requesting a specific template.
:param desired_type: (optional) A string for requesting a specific template type.
:return: Returns a complete math question with generated numbers.
"""
if template_extra == "":
get_question_dict = get_question(user, '') # Gets a question from the DB
else:
get_question_dict = get_question(user, template_extra)
q = get_question_dict['template']
if desired_type == '':
desired_type = get_question_dict['type']
if desired_type != 'normal':
if (desired_type == 'multiple' or desired_type == 'multifill') and not q.multiple_support:
return {'question': 'error'}
if desired_type == 'blanks' and not q.fill_in:
return {'question': 'error'}
# The domain of random numbers that can be generated for the question
random_domain_list = q.random_domain
task = str(q.question_text)
task = task.replace('\\\\', '\\') # Replaces double \\ with \
task = task.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
task = task.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_type = desired_type
choices = q.choices.replace('\\\\', '\\')
choices = choices.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
choices = choices.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
conditions = q.conditions.replace('\\\\', '\\')
dictionary = q.dictionary
answer = q.answer.replace('\\\\', '\\')
primary_key = q.pk
fill_in = q.fill_in.replace('\\\\', '\\')
fill_in = fill_in.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
fill_in = fill_in.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_specific = "" # A variable that holds the extra values for a given type. ie. choices for multiple.
variables_used = "" # Sends a splitable string since dictionaries can't be passed between layers.
replacing_words = '' # The words that got replaced, and the words that replaced them
graph = q.graph # took out .replace('\\\\', '\\')
if graph:
graph = json.loads(graph)
#task = add_phantom_minus(task)
#answer = add_phantom_minus(answer)
#choices = add_phantom_minus(choices)
new_choices = ''
new_task = ''
new_answer = ''
variable_dict = ''
valid_solution = False
while valid_solution is False: # Loop until we get a form of the task that has a valid solution
variable_dict = generate_valid_numbers(task, random_domain_list, conditions, False)
variables_used = dict_to_string(variable_dict) # Get a string with the variables used
new_task = string_replace(task, variable_dict)
new_answer = string_replace(answer, variable_dict)
new_choices = string_replace(choices, variable_dict)
for x in range(0, len(graph)):
graph[x] = string_replace(graph[x], variable_dict)
graph[x] = parse_solution(graph[x], q.random_domain)
if new_answer == 'error':
continue # Retry if the current template resulted in a error.
valid_solution = True
if template_type.lower() == 'multiple':
new_choices = new_choices.split('§')
for x in range(len(new_choices)):
new_choices[x] = parse_solution(new_choices[x], q.random_domain)
new_choices.append(parse_solution(new_answer, q.random_domain).replace('§', 'og'))
shuffle(new_choices) # Shuffles the choices so that the answer is not always in the same place.
new_choices = '§'.join(new_choices)
new_choices = parenthesis_removal(new_choices)
template_specific = new_choices
#template_specific = remove_pm_and_add_parenthesis(template_specific)
elif template_type == 'blanks':
fill_in_dict = fill_in_the_blanks(fill_in)
# new_task = new_task + '\n' + fill_in_dict['fill_in'].replace('\\n', '\n')
new_task = new_task + '§' + fill_in_dict['fill_in']
new_task = replace_variables_from_array(variables_used.split('§'), new_task)
new_task = parse_solution(new_task, q.random_domain)
template_specific = fill_in_dict['hole_positions']
elif template_type == 'multifill':
new_choices = choices + '§' + answer.replace('§', 'og')
new_choices = parenthesis_removal(new_choices)
template_specific = multifill(new_choices, variable_dict)
if dictionary is not None:
replace_words_dict = replace_words(new_task, dictionary)
new_task = replace_words_dict['sentence']
replacing_words = replace_words_dict['replace_string']
number_of_answers = len(new_answer.split('§'))
if graph != None and graph != '': # to prevent error if none
graph = json.dumps(graph)
new_task = parse_solution(new_task, q.random_domain)
#new_task = remove_pm_and_add_parenthesis(new_task)
new_task = parenthesis_removal(new_task)
return_dict = {'question': new_task,
'variable_dictionary': variables_used, 'template_type': template_type,
'template_specific': template_specific, 'primary_key': primary_key,
'number_of_answers': number_of_answers, 'replacing_words': replacing_words,
'graph': graph, 'graph_settings': q.graph_settings, 'graph_color': q.graph_color}
return return_dict
@Debugger
def generate_level(user, level_id):
"""Makes a valid math question at the correct rating from a template in the database.
:param user: The user requesting a template
:param template_extra: (optional) A id used for requesting a specific template.
:param desired_type: (optional) A string for requesting a specific template type.
:return: Returns a complete math question with generated numbers.
"""
level = Level.objects.get(pk=level_id)
get_question_dict = get_level_question(user, level) # Gets a template from the DB
q = get_question_dict['template']
desired_type = get_question_dict['type']
# The domain of random numbers that can be generated for the question
random_domain_list = q.random_domain
task = str(q.question_text)
task = task.replace('\\\\', '\\') # Replaces double \\ with \
task = task.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
task = task.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_type = desired_type
choices = q.choices.replace('\\\\', '\\')
choices = choices.replace('(', '+parenthesisleft+')
choices = choices.replace(')', '+parenthesisright+')
conditions = q.conditions.replace('\\\\', '\\')
dictionary = q.dictionary
answer = q.answer.replace('\\\\', '\\')
primary_key = q.pk
fill_in = q.fill_in.replace('\\\\', '\\')
fill_in = fill_in.replace('(', '+parenthesisleft+') # Done to preserve original parenthesis
fill_in = fill_in.replace(')', '+parenthesisright+') # Done to preserve original parenthesis
template_specific = "" # A variable that holds the extra values for a given type. ie. choices for multiple.
variables_used = ""
replacing_words = '' # The words that got replaced, and the words that replaced them
#task = add_phantom_minus(task)
#answer = add_phantom_minus(answer)
#choices = add_phantom_minus(choices)
new_choices = ''
new_task = ''
new_answer = ''
variable_dict = ''
graph = q.graph # took out .replace('\\\\', '\\')
if graph:
graph = json.loads(graph)
valid_solution = False
while valid_solution is False: # Loop until we get a form of the task that has a valid solution
variable_dict = generate_valid_numbers(task, random_domain_list, conditions, False)
variables_used = dict_to_string(variable_dict) # Get a string with the variables used
new_task = string_replace(task, variable_dict)
new_answer = string_replace(answer, variable_dict)
new_choices = string_replace(choices, variable_dict)
for x in range(0, len(graph)):
graph[x] = string_replace(graph[x], variable_dict)
graph[x] = parse_solution(graph[x], q.random_domain)
if new_answer == 'error':
continue # Retry if the current template resulted in a error.
valid_solution = True
if template_type.lower() == 'multiple':
new_choices = new_choices.split('§')
for x in range(len(new_choices)):
new_choices[x] = parse_solution(new_choices[x], q.random_domain)
new_choices.append(parse_solution(new_answer, q.random_domain).replace('§', 'og'))
shuffle(new_choices) # Shuffles the choices so that the answer is not always in the same place.
new_choices = '§'.join(new_choices)
new_choices = parenthesis_removal(new_choices)
template_specific = new_choices
#template_specific = remove_pm_and_add_parenthesis(template_specific)
elif template_type == 'blanks':
fill_in_dict = fill_in_the_blanks(fill_in)
# new_task = new_task + '\n' + fill_in_dict['fill_in'].replace('\\n', '\n')
new_task = new_task + '§' + fill_in_dict['fill_in']
new_task = replace_variables_from_array(variables_used.split('§'), new_task)
new_task = parse_solution(new_task, q.random_domain)
template_specific = fill_in_dict['hole_positions']
elif template_type == 'multifill':
new_choices = choices + '§' + answer.replace('§', 'og')
template_specific = multifill(new_choices, variable_dict)
if dictionary is not None:
replace_words_dict = replace_words(new_task, dictionary)
new_task = replace_words_dict['sentence']
replacing_words = replace_words_dict['replace_string']
number_of_answers = len(new_answer.split('§'))
if graph != None and graph != '': # to prevent error if none
graph = json.dumps(graph)
new_task = parse_solution(new_task, q.random_domain)
# new_task = remove_pm_and_add_parenthesis(new_task)
new_task = parenthesis_removal(new_task)
return_dict = {'question': new_task, 'variable_dictionary': variables_used, 'template_type': template_type,
'template_specific': template_specific, 'primary_key': primary_key,
'number_of_answers': number_of_answers, 'replacing_words': replacing_words,
'graph': graph, 'graph_settings': q.graph_settings, 'graph_color': q.graph_color}
return return_dict
@Debugger
def generate_valid_numbers(template, random_domain, conditions, test):
"""Generates valid numbers using each variables random domain.
Also makes sure all variables follows the given conditions.
:param template: The template used.
:param random_domain: dict used for random domains
:param conditions: The conditions the variable have to follow.
:param test: If true the function returns the domain_dict instead of variable_dict.
:return: The current generated variables used in the template.
"""
hardcoded_variables = ['R22R', 'R21R', 'R20R', 'R19R', 'R18R', 'R17R', 'R16R', 'R15R', 'R14R', 'R13R', 'R12R',
'R11R', 'R10R', 'R9R', 'R8R', 'R7R', 'R6R', 'R3R', 'R2R', 'R1R', 'R0R']
domain_dict = {}
domain_list = {}
variable_dict = {}
try:
random_domain = json.loads(random_domain)
# Loops through all possible variable names, and generate a random number for it.
# Adds the variables names and numbers to the 2 dictionaries and the string
for key in random_domain:
if random_domain[key][1]:
random_number = str(make_number_from_list(random_domain[key][0]))
else:
random_number = str(make_number(random_domain[key][0]))
domain_dict[key] = random_domain[key][0]
domain_list[key] = random_domain[key][1]
variable_dict[key] = random_number
except ValueError:
pass
if len(conditions) > 1:
variable_dict = check_conditions(conditions, variable_dict, domain_dict, domain_list)
# lesser_than('R0 * 2 < 3', domain_dict, variable_dict) #for testing purposes
if test:
return domain_dict
return variable_dict
@Debugger
def check_conditions(conditions, variable_dict, domain_dict, domain_list):
"""A function that checks if the generated variables pass the conditions and generates new ones until they do.
:param conditions: The conditions of the template.
:param variable_dict: List of variables.
:param domain_dict: the domain of the variables.
:param domain_list: a dict with the domain list.
:return: List of variables that pass the conditions of the given template.
"""
conditions = remove_unnecessary(conditions)
# Check conditions --> if false: change a variable -> check conditions
inserted_conditions = string_replace(conditions, variable_dict)
while not parse_expr(latex_to_sympy(inserted_conditions), transformations=standard_transformations +
(convert_xor, implicit_multiplication_application,), global_dict=None, evaluate=True):
variable_to_change = choice(list(variable_dict.keys())) # Chose a random key from variable_dict
if domain_list[variable_to_change]:
variable_dict[variable_to_change] = make_number_from_list(domain_dict[variable_to_change])
else:
variable_dict[variable_to_change] = new_random_value(variable_to_change, domain_dict)
inserted_conditions = string_replace(conditions, variable_dict)
return variable_dict
@Debugger
def get_variables_used(string, variable_dict):
"""Returns what variables are used in the given string as a list."""
used_variables = []
for key in variable_dict:
temp_string = string.replace(key, "")
if temp_string != string:
used_variables.append(key)
string = temp_string
return used_variables
@Debugger
def new_random_value(value, domain_dict, bonus=0, extra=''):
"""Creates a new random value for a given variable using its domain.
:param value: The value to change.
:param domain_dict: Domain of the variables, decides what range of the variable and number of decimals.
:param bonus: Used for limiting the domain for the variable further if needed.
:param extra: argument for different configurations of what approach to use for the new variable
:return: New value.
"""
domain = domain_dict[value]
# If bonus isn't between the domain values, changing the value won't fix the condition.
bonus -= 1 # This is because we use smaller than and not <=..
if extra == 'left': # Approach to use if on the left side of lesser than (<)
if int(domain[0]) <= bonus <= int(domain[1]):
domain[1] = bonus
new_value = randint(int(domain[0]), int(domain[1]))
elif extra == 'right': # Approach to use if on the right side of lesser than (<)
if int(domain[0]) <= bonus <= int(domain[1]):
domain[0] = bonus
new_value = randint(int(domain[0]), int(domain[1]))
else:
new_value = randint(int(domain[0]), int(domain[1]))
return new_value
def make_number_from_list(domain):
return sympify(latex_to_sympy(choice(domain)))
@Debugger
def make_number(domain):
"""Returns a random number within the range and decimal point of the domain given."""
number = uniform(float(domain[0]), float(domain[1]))
try:
number = round(number, int(domain[2]))
if number.is_integer():
number = round(number)
except IndexError:
number = round(number)
return number
|
bsd-3-clause
| -6,944,604,632,999,622,000
| 46.210084
| 114
| 0.654563
| false
| 3.80194
| false
| false
| false
|
dinhkhanh/trac
|
sample-plugins/Timestamp.py
|
1
|
1219
|
"""Inserts the current time (in seconds) into the wiki page."""
revision = "$Rev: 10617 $"
url = "$URL: https://svn.edgewall.org/repos/trac/tags/trac-1.0/sample-plugins/Timestamp.py $"
#
# The following shows the code for macro, old-style.
#
# The `execute` function serves no purpose other than to illustrate
# the example, it will not be used anymore.
#
# ---- (ignore in your own macro) ----
# --
import time # Trac before version 0.11 was using `time` module
def execute(hdf, txt, env):
t = time.localtime()
return "<b>%s</b>" % time.strftime('%c', t)
# --
# ---- (ignore in your own macro) ----
#
# The following is the converted new-style macro
#
# ---- (reuse for your own macro) ----
# --
from datetime import datetime
# Note: since Trac 0.11, datetime objects are used internally
from genshi.builder import tag
from trac.util.datefmt import format_datetime, utc
from trac.wiki.macros import WikiMacroBase
class TimestampMacro(WikiMacroBase):
_description = "Inserts the current time (in seconds) into the wiki page."
def expand_macro(self, formatter, name, args):
t = datetime.now(utc)
return tag.b(format_datetime(t, '%c'))
# --
# ---- (reuse for your own macro) ----
|
bsd-3-clause
| -3,748,584,367,646,097,400
| 27.348837
| 93
| 0.673503
| false
| 3.358127
| false
| false
| false
|
botswana-harvard/bcpp-subject
|
bcpp_subject/admin/postitive_participant_admin.py
|
1
|
2114
|
from django.contrib import admin
from django.utils.safestring import mark_safe
from edc_base.modeladmin_mixins import audit_fieldset_tuple
from ..admin_site import bcpp_subject_admin
from ..forms import PositiveParticipantForm
from ..models import PositiveParticipant
from .modeladmin_mixins import CrfModelAdminMixin
@admin.register(PositiveParticipant, site=bcpp_subject_admin)
class PositiveParticipantAdmin(CrfModelAdminMixin, admin.ModelAdmin):
form = PositiveParticipantForm
fieldsets = (
(None, {
'fields': (
'subject_visit',
'internalize_stigma',
'internalized_stigma',
'friend_stigma',
'family_stigma',
'enacted_talk_stigma',
'enacted_respect_stigma',
'enacted_jobs_tigma')}),
audit_fieldset_tuple,
)
radio_fields = {
'internalize_stigma': admin.VERTICAL,
'internalized_stigma': admin.VERTICAL,
'friend_stigma': admin.VERTICAL,
'family_stigma': admin.VERTICAL,
'enacted_talk_stigma': admin.VERTICAL,
'enacted_respect_stigma': admin.VERTICAL,
'enacted_jobs_tigma': admin.VERTICAL, }
additional_instructions = mark_safe(
'<h5>Note to Interviewer</h5>'
'Note The following supplemental questions '
'are only asked for respondents with known HIV infection. '
'SKIP for respondents without known HIV infection. '
'<H5><span style="color:orange;">Read to Participant</span></H5>'
'You let us know earlier that you '
'are HIV positive. I would now like to ask you a few '
'questions about your experiences living with HIV. '
'Please remember this interview and your responses '
'are private and confidential.In this section, '
'I\'m going to read you statements '
'about how you may feel about yourself and your '
'HIV/AIDS infection. I would like you to tell me '
'if you strongly agree, agree, disagree or strongly '
'disagree with each statement?')
|
gpl-3.0
| -1,579,892,250,554,721,500
| 37.436364
| 73
| 0.644749
| false
| 3.936685
| false
| false
| false
|
walac/build-mozharness
|
mozharness/mozilla/proxxy.py
|
1
|
6748
|
"""Proxxy module. Defines a Proxxy element that fetches files using local
proxxy instances (if available). The goal of Proxxy is to lower the traffic
from the cloud to internal servers.
"""
import urlparse
import socket
from mozharness.base.log import INFO, ERROR, LogMixin
from mozharness.base.script import ScriptMixin
# Proxxy {{{1
class Proxxy(ScriptMixin, LogMixin):
"""
Support downloading files from HTTP caching proxies
Current supports 'proxxy' instances, in which the caching proxy at
proxxy.domain.com will cache requests for ftp.mozilla.org when passed requests to
http://ftp.mozilla.org.proxxy.domain.com/...
self.config['proxxy']['urls'] defines the list of backend hosts we are currently caching, and
the hostname prefix to use for proxxy
self.config['proxxy']['instances'] lists current hostnames for proxxy instances. wildcard DNS
is set up so that *.proxxy.domain.com is a CNAME to the proxxy instance
"""
# Default configuration. Can be overridden via self.config
PROXXY_CONFIG = {
"urls": [
('http://ftp.mozilla.org', 'ftp.mozilla.org'),
('https://ftp.mozilla.org', 'ftp.mozilla.org'),
('https://ftp-ssl.mozilla.org', 'ftp.mozilla.org'),
('http://pvtbuilds.pvt.build.mozilla.org', 'pvtbuilds.mozilla.org'),
# tooltool
('http://tooltool.pvt.build.mozilla.org', 'tooltool.pvt.build.mozilla.org'),
# pypi
('http://pypi.pvt.build.mozilla.org', 'pypi.pvt.build.mozilla.org'),
('http://pypi.pub.build.mozilla.org', 'pypi.pub.build.mozilla.org'),
# taskcluster stuff
('https://queue.taskcluster.net', 'queue.taskcluster.net'),
],
"instances": [
'proxxy1.srv.releng.use1.mozilla.com',
'proxxy1.srv.releng.usw2.mozilla.com',
'proxxy1.srv.releng.scl3.mozilla.com',
],
"regions": [".use1.", ".usw2.", ".scl3"],
}
def __init__(self, config, log_obj):
# proxxy does not need the need the full configuration,
# just the 'proxxy' element
# if configuration has no 'proxxy' section use the default
# configuration instead
self.config = config.get('proxxy', self.PROXXY_CONFIG)
self.log_obj = log_obj
def get_proxies_for_url(self, url):
"""Maps url to its proxxy urls
Args:
url (str): url to be proxxied
Returns:
list: of proxy URLs to try, in sorted order.
please note that url is NOT included in this list.
"""
config = self.config
urls = []
self.info("proxxy config: %s" % config)
proxxy_urls = config.get('urls', [])
proxxy_instances = config.get('instances', [])
url_parts = urlparse.urlsplit(url)
url_path = url_parts.path
if url_parts.query:
url_path += "?" + url_parts.query
if url_parts.fragment:
url_path += "#" + url_parts.fragment
for prefix, target in proxxy_urls:
if url.startswith(prefix):
self.info("%s matches %s" % (url, prefix))
for instance in proxxy_instances:
if not self.query_is_proxxy_local(instance):
continue
new_url = "http://%s.%s%s" % (target, instance, url_path)
urls.append(new_url)
for url in urls:
self.info("URL Candidate: %s" % url)
return urls
def get_proxies_and_urls(self, urls):
"""Gets a list of urls and returns a list of proxied urls, the list
of input urls is appended at the end of the return values
Args:
urls (list, tuple): urls to be mapped to proxxy urls
Returns:
list: proxxied urls and urls. urls are appended to the proxxied
urls list and they are the last elements of the list.
"""
proxxy_list = []
for url in urls:
# get_proxies_for_url returns always a list...
proxxy_list.extend(self.get_proxies_for_url(url))
proxxy_list.extend(urls)
return proxxy_list
def query_is_proxxy_local(self, url):
"""Checks is url is 'proxxable' for the local instance
Args:
url (string): url to check
Returns:
bool: True if url maps to a usable proxxy,
False in any other case
"""
fqdn = socket.getfqdn()
config = self.config
regions = config.get('regions', [])
return any(r in fqdn and r in url for r in regions)
def download_proxied_file(self, url, file_name, parent_dir=None,
create_parent_dir=True, error_level=ERROR,
exit_code=3):
"""
Wrapper around BaseScript.download_file that understands proxies
retry dict is set to 3 attempts, sleeping time 30 seconds.
Args:
url (string): url to fetch
file_name (string, optional): output filename, defaults to None
if file_name is not defined, the output name is taken from
the url.
parent_dir (string, optional): name of the parent directory
create_parent_dir (bool, optional): if True, creates the parent
directory. Defaults to True
error_level (mozharness log level, optional): log error level
defaults to ERROR
exit_code (int, optional): return code to log if file_name
is not defined and it cannot be determined from the url
Returns:
string: file_name if the download has succeded, None in case of
error. In case of error, if error_level is set to FATAL,
this method interrupts the execution of the script
"""
urls = self.get_proxies_and_urls([url])
for url in urls:
self.info("trying %s" % url)
retval = self.download_file(
url, file_name=file_name, parent_dir=parent_dir,
create_parent_dir=create_parent_dir, error_level=ERROR,
exit_code=exit_code,
retry_config=dict(
attempts=3,
sleeptime=30,
error_level=INFO,
))
if retval:
return retval
self.log("Failed to download from all available URLs, aborting",
level=error_level, exit_code=exit_code)
return retval
|
mpl-2.0
| -458,509,179,348,131,500
| 38.232558
| 97
| 0.566835
| false
| 4.183509
| true
| false
| false
|
laijingtao/landlab
|
landlab/plot/imshow.py
|
1
|
20927
|
#! /usr/bin/env python
"""
Methods to plot data defined on Landlab grids.
Plotting functions
++++++++++++++++++
.. autosummary::
:toctree: generated/
~landlab.plot.imshow.imshow_grid
~landlab.plot.imshow.imshow_grid_at_cell
~landlab.plot.imshow.imshow_grid_at_node
"""
import numpy as np
import inspect
from landlab.field.scalar_data_fields import FieldError
try:
import matplotlib.pyplot as plt
except ImportError:
import warnings
warnings.warn('matplotlib not found', ImportWarning)
from landlab.grid import CLOSED_BOUNDARY
from landlab.grid.raster import RasterModelGrid
from landlab.grid.voronoi import VoronoiDelaunayGrid
from landlab.utils.decorators import deprecated
def imshow_grid_at_node(grid, values, **kwds):
"""Prepare a map view of data over all nodes in the grid.
Data is plotted as cells shaded with the value at the node at its center.
Outer edges of perimeter cells are extrapolated. Closed elements are
colored uniformly (default black, overridden with kwd 'color_for_closed');
other open boundary nodes get their actual values.
*values* can be a field name, a regular array, or a masked array. If a
masked array is provided, masked entries will be treated as if they were
Landlab CLOSED_BOUNDARYs. Used together with the color_at_closed=None
keyword (i.e., "transparent"), this can allow for construction of overlay
layers in a figure (e.g., only defining values in a river network, and
overlaying it on another landscape).
Use matplotlib functions like xlim, ylim to modify your plot after calling
:func:`imshow_grid`, as desired.
This function happily works with both regular and irregular grids.
Construction ::
imshow_grid_at_node(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=None,
symmetric_cbar=False, cmap='pink',
limits=(values.min(), values.max()),
vmin=values.min(), vmax=values.max(),
allow_colorbar=True,
norm=[linear], shrink=1.,
color_for_closed='black',
color_for_background=None,
show_elements=False, output=None)
Parameters
----------
grid : ModelGrid
Grid containing the field to plot, or describing the geometry of the
provided array.
values : array_like, masked_array, or str
Node values, or a field name as a string from which to draw the data.
plot_name : str, optional
String to put as the plot title.
var_name : str, optional
Variable name, to use as a colorbar label.
var_units : str, optional
Units for the variable being plotted, for the colorbar.
grid_units : tuple of str, optional
Units for y, and x dimensions. If None, component will look to the
gri property `axis_units` for this information. If no units are
specified there, no entry is made.
symmetric_cbar : bool
Make the colormap symetric about 0.
cmap : str
Name of a colormap
limits : tuple of float
Minimum and maximum of the colorbar.
vmin, vmax: floats
Alternatives to limits.
allow_colorbar : bool
If True, include the colorbar.
colorbar_label : str or None
The string with which to label the colorbar.
norm : matplotlib.colors.Normalize
The normalizing object which scales data, typically into the interval
[0, 1]. Ignore in most cases.
shrink : float
Fraction by which to shrink the colorbar.
color_for_closed : str or None
Color to use for closed nodes (default 'black'). If None, closed
(or masked) nodes will be transparent.
color_for_background : color str or other color declaration, or None
Color to use for closed elements (default None). If None, the
background will be transparent, and appear white.
show_elements : bool
If True, and grid is a Voronoi, the faces will be plotted in black
along with just the colour of the cell, defining the cell outlines
(defaults False).
output : None, string, or bool
If None (or False), the image is sent to the imaging buffer to await
an explicit call to show() or savefig() from outside this function.
If a string, the string should be the path to a save location, and the
filename (with file extension). The function will then call
plt.savefig([string]) itself. If True, the function will call
plt.show() itself once plotting is complete.
"""
if isinstance(values, str):
values_at_node = grid.at_node[values]
else:
values_at_node = values
if values_at_node.size != grid.number_of_nodes:
raise ValueError('number of values does not match number of nodes')
values_at_node = np.ma.masked_where(
grid.status_at_node == CLOSED_BOUNDARY, values_at_node)
try:
shape = grid.shape
except AttributeError:
shape = (-1, )
_imshow_grid_values(grid, values_at_node.reshape(shape), **kwds)
if isinstance(values, str):
plt.title(values)
@deprecated(use='imshow_grid_at_node', version='0.5')
def imshow_node_grid(grid, values, **kwds):
imshow_grid_at_node(grid, values, **kwds)
def imshow_grid_at_cell(grid, values, **kwds):
"""Map view of grid data over all grid cells.
Prepares a map view of data over all cells in the grid.
Method can take any of the same ``**kwds`` as :func:`imshow_grid_at_node`.
Construction ::
imshow_grid_at_cell(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=None,
symmetric_cbar=False, cmap='pink',
limits=(values.min(), values.max()),
vmin=values.min(), vmax=values.max(),
allow_colorbar=True, colorbar_label=None,
norm=[linear], shrink=1.,
color_for_closed='black',
color_for_background=None,
show_elements=False, output=None)
Parameters
----------
grid : ModelGrid
Grid containing the field to plot, or describing the geometry of the
provided array.
values : array_like, masked_array, or str
Values at the cells on the grid. Alternatively, can be a field name
(string) from which to draw the data from the grid.
plot_name : str, optional
String to put as the plot title.
var_name : str, optional
Variable name, to use as a colorbar label.
var_units : str, optional
Units for the variable being plotted, for the colorbar.
grid_units : tuple of str, optional
Units for y, and x dimensions. If None, component will look to the
gri property `axis_units` for this information. If no units are
specified there, no entry is made.
symmetric_cbar : bool
Make the colormap symetric about 0.
cmap : str
Name of a colormap
limits : tuple of float
Minimum and maximum of the colorbar.
vmin, vmax: floats
Alternatives to limits.
allow_colorbar : bool
If True, include the colorbar.
colorbar_label : str or None
The string with which to label the colorbar.
norm : matplotlib.colors.Normalize
The normalizing object which scales data, typically into the interval
[0, 1]. Ignore in most cases.
shrink : float
Fraction by which to shrink the colorbar.
color_for_closed : str or None
Color to use for closed elements (default 'black'). If None, closed
(or masked) elements will be transparent.
color_for_background : color str or other color declaration, or None
Color to use for closed elements (default None). If None, the
background will be transparent, and appear white.
show_elements : bool
If True, and grid is a Voronoi, the faces will be plotted in black
along with just the colour of the cell, defining the cell outlines
(defaults False).
output : None, string, or bool
If None (or False), the image is sent to the imaging buffer to await
an explicit call to show() or savefig() from outside this function.
If a string, the string should be the path to a save location, and the
filename (with file extension). The function will then call
plt.savefig([string]) itself. If True, the function will call
plt.show() itself once plotting is complete.
Raises
------
ValueError
If input grid is not uniform rectilinear.
"""
if isinstance(values, str):
try:
values_at_cell = grid.at_cell[values]
except FieldError:
values_at_cell = grid.at_node[values]
else:
values_at_cell = values
if values_at_cell.size == grid.number_of_nodes:
values_at_cell = values_at_cell[grid.node_at_cell]
if values_at_cell.size != grid.number_of_cells:
raise ValueError('number of values must match number of cells or '
'number of nodes')
values_at_cell = np.ma.asarray(values_at_cell)
values_at_cell.mask = True
values_at_cell.mask[grid.core_cells] = False
myimage = _imshow_grid_values(grid,
values_at_cell.reshape(grid.cell_grid_shape),
**kwds)
if isinstance(values, str):
plt.title(values)
return myimage
@deprecated(use='imshow_grid_at_cell', version='0.5')
def imshow_cell_grid(grid, values, **kwds):
imshow_grid_at_cell(grid, values, **kwds)
def _imshow_grid_values(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=(None, None),
symmetric_cbar=False, cmap='pink', limits=None,
allow_colorbar=True, vmin=None, vmax=None,
norm=None, shrink=1., color_for_closed='black',
color_for_background=None, show_elements=False,
output=None):
gridtypes = inspect.getmro(grid.__class__)
cmap = plt.get_cmap(cmap)
if color_for_closed is not None:
cmap.set_bad(color=color_for_closed)
else:
cmap.set_bad(alpha=0.)
if isinstance(grid, RasterModelGrid):
if values.ndim != 2:
raise ValueError('values must have ndim == 2')
y = np.arange(values.shape[0] + 1) * grid.dy - grid.dy * .5
x = np.arange(values.shape[1] + 1) * grid.dx - grid.dx * .5
kwds = dict(cmap=cmap)
(kwds['vmin'], kwds['vmax']) = (values.min(), values.max())
if (limits is None) and ((vmin is None) and (vmax is None)):
if symmetric_cbar:
(var_min, var_max) = (values.min(), values.max())
limit = max(abs(var_min), abs(var_max))
(kwds['vmin'], kwds['vmax']) = (- limit, limit)
elif limits is not None:
(kwds['vmin'], kwds['vmax']) = (limits[0], limits[1])
else:
if vmin is not None:
kwds['vmin'] = vmin
if vmax is not None:
kwds['vmax'] = vmax
if np.isclose(grid.dx, grid.dy):
if values.size == grid.number_of_nodes:
myimage = plt.imshow(
values.reshape(grid.shape), origin='lower',
extent=(x[0], x[-1], y[0], y[-1]), **kwds)
else: # this is a cell grid, and has been reshaped already...
myimage = plt.imshow(values, origin='lower',
extent=(x[0], x[-1], y[0], y[-1]), **kwds)
myimage = plt.pcolormesh(x, y, values, **kwds)
plt.gca().set_aspect(1.)
plt.autoscale(tight=True)
if allow_colorbar:
cb = plt.colorbar(norm=norm, shrink=shrink)
elif VoronoiDelaunayGrid in gridtypes:
# This is still very much ad-hoc, and needs prettifying.
# We should save the modifications needed to plot color all the way
# to the diagram edge *into* the grid, for faster plotting.
# (see http://stackoverflow.com/questions/20515554/...
# colorize-voronoi-diagram)
# (This technique is not implemented yet)
from scipy.spatial import voronoi_plot_2d
import matplotlib.colors as colors
import matplotlib.cm as cmx
cm = plt.get_cmap(cmap)
if (limits is None) and ((vmin is None) and (vmax is None)):
# only want to work with NOT CLOSED nodes
open_nodes = grid.status_at_node != 4
if symmetric_cbar:
(var_min, var_max) = (values.flat[
open_nodes].min(), values.flat[open_nodes].max())
limit = max(abs(var_min), abs(var_max))
(vmin, vmax) = (- limit, limit)
else:
(vmin, vmax) = (values.flat[
open_nodes].min(), values.flat[open_nodes].max())
elif limits is not None:
(vmin, vmax) = (limits[0], limits[1])
else:
open_nodes = grid.status_at_node != 4
if vmin is None:
vmin = values.flat[open_nodes].min()
if vmax is None:
vmax = values.flat[open_nodes].max()
cNorm = colors.Normalize(vmin, vmax)
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=cm)
colorVal = scalarMap.to_rgba(values)
if show_elements:
myimage = voronoi_plot_2d(grid.vor, show_vertices=False,
show_points=False)
# show_points to be supported in scipy0.18, but harmless for now
mycolors = (i for i in colorVal)
for order in grid.vor.point_region:
region = grid.vor.regions[order]
colortouse = next(mycolors)
if -1 not in region:
polygon = [grid.vor.vertices[i] for i in region]
plt.fill(*zip(*polygon), color=colortouse)
plt.gca().set_aspect(1.)
# plt.autoscale(tight=True)
# Tempting though it is to move the boundary outboard of the outermost
# nodes (e.g., to the outermost corners), this is a bad idea, as the
# outermost cells tend to have highly elongated shapes which make the
# plot look stupid
plt.xlim((np.min(grid.node_x), np.max(grid.node_x)))
plt.ylim((np.min(grid.node_y), np.max(grid.node_y)))
scalarMap.set_array(values)
if allow_colorbar:
cb = plt.colorbar(scalarMap, shrink=shrink)
if grid_units[1] is None and grid_units[0] is None:
grid_units = grid.axis_units
if grid_units[1] == '-' and grid_units[0] == '-':
plt.xlabel('X')
plt.ylabel('Y')
else:
plt.xlabel('X (%s)' % grid_units[1])
plt.ylabel('Y (%s)' % grid_units[0])
else:
plt.xlabel('X (%s)' % grid_units[1])
plt.ylabel('Y (%s)' % grid_units[0])
if plot_name is not None:
plt.title('%s' % (plot_name))
if var_name is not None or var_units is not None:
if var_name is not None:
assert type(var_name) is str
if var_units is not None:
assert type(var_units) is str
colorbar_label = var_name + ' (' + var_units + ')'
else:
colorbar_label = var_name
else:
assert type(var_units) is str
colorbar_label = '(' + var_units + ')'
assert type(colorbar_label) is str
assert allow_colorbar
cb.set_label(colorbar_label)
if color_for_background is not None:
plt.gca().set_axis_bgcolor(color_for_background)
if output is not None:
if type(output) is str:
plt.savefig(output)
plt.clf()
elif output:
plt.show()
def imshow_grid(grid, values, **kwds):
"""Prepare a map view of data over all nodes or cells in the grid.
Data is plotted as colored cells. If at='node', the surrounding cell is
shaded with the value at the node at its center. If at='cell', the cell
is shaded with its own value. Outer edges of perimeter cells are
extrapolated. Closed elements are colored uniformly (default black,
overridden with kwd 'color_for_closed'); other open boundary nodes get
their actual values.
*values* can be a field name, a regular array, or a masked array. If a
masked array is provided, masked entries will be treated as if they were
Landlab CLOSED_BOUNDARYs. Used together with the color_at_closed=None
keyword (i.e., "transparent"), this can allow for construction of overlay
layers in a figure (e.g., only defining values in a river network, and
overlaying it on another landscape).
Use matplotlib functions like xlim, ylim to modify your plot after calling
:func:`imshow_grid`, as desired.
This function happily works with both regular and irregular grids.
Construction ::
imshow_grid(grid, values, plot_name=None, var_name=None,
var_units=None, grid_units=None,
symmetric_cbar=False, cmap='pink',
limits=(values.min(), values.max()),
vmin=values.min(), vmax=values.max(),
allow_colorbar=True, colorbar_label=None,
norm=[linear], shrink=1.,
color_for_closed='black',
color_for_background=None,
show_elements=False)
Parameters
----------
grid : ModelGrid
Grid containing the field to plot, or describing the geometry of the
provided array.
values : array_like, masked_array, or str
Node or cell values, or a field name as a string from which to draw
the data.
at : str, {'node', 'cell'}
Tells plotter where values are defined.
plot_name : str, optional
String to put as the plot title.
var_name : str, optional
Variable name, to use as a colorbar label.
var_units : str, optional
Units for the variable being plotted, for the colorbar.
grid_units : tuple of str, optional
Units for y, and x dimensions. If None, component will look to the
gri property `axis_units` for this information. If no units are
specified there, no entry is made.
symmetric_cbar : bool
Make the colormap symetric about 0.
cmap : str
Name of a colormap
limits : tuple of float
Minimum and maximum of the colorbar.
vmin, vmax: floats
Alternatives to limits.
allow_colorbar : bool
If True, include the colorbar.
colorbar_label : str or None
The string with which to label the colorbar.
norm : matplotlib.colors.Normalize
The normalizing object which scales data, typically into the interval
[0, 1]. Ignore in most cases.
shrink : float
Fraction by which to shrink the colorbar.
color_for_closed : str or None
Color to use for closed elements (default 'black'). If None, closed
(or masked) elements will be transparent.
color_for_background : color str or other color declaration, or None
Color to use for closed elements (default None). If None, the
background will be transparent, and appear white.
show_elements : bool
If True, and grid is a Voronoi, the faces will be plotted in black
along with just the colour of the cell, defining the cell outlines
(defaults False).
output : None, string, or bool
If None (or False), the image is sent to the imaging buffer to await
an explicit call to show() or savefig() from outside this function.
If a string, the string should be the path to a save location, and the
filename (with file extension). The function will then call
plt.savefig([string]) itself. If True, the function will call
plt.show() itself once plotting is complete.
"""
show = kwds.pop('show', False)
values_at = kwds.pop('values_at', 'node')
values_at = kwds.pop('at', values_at)
if isinstance(values, str):
values = grid.field_values(values_at, values)
if isinstance(values, str):
values = grid.field_values(values_at, values)
if values_at == 'node':
imshow_grid_at_node(grid, values, **kwds)
elif values_at == 'cell':
imshow_grid_at_cell(grid, values, **kwds)
else:
raise TypeError('value location %s not understood' % values_at)
# retained for backwards compatibility:
if show:
plt.show()
|
mit
| -8,240,677,285,092,095,000
| 39.321773
| 79
| 0.60711
| false
| 4.039961
| false
| false
| false
|
OCA/l10n-spain
|
l10n_es_facturae/models/res_partner.py
|
1
|
1829
|
# © 2015 Omar Castiñeira (Comunitea)
# © 2017 Creu Blanca
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api, exceptions, _
class ResPartner(models.Model):
_inherit = "res.partner"
facturae = fields.Boolean('Factura electrónica')
facturae_version = fields.Selection([
('3_2', '3.2'),
('3_2_1', '3.2.1'),
('3_2_2', '3.2.2'),
])
organo_gestor = fields.Char('Órgano gestor', size=10)
unidad_tramitadora = fields.Char('Unidad tramitadora', size=10)
oficina_contable = fields.Char('Oficina contable', size=10)
organo_proponente = fields.Char('Órgano proponente', size=10)
invoice_integration_method_ids = fields.Many2many(
comodel_name='account.invoice.integration.method',
string="Integration Methods"
)
attach_invoice_as_annex = fields.Boolean()
def get_facturae_residence(self):
if not self.country_id:
return 'E'
if self.country_id.code == 'ES':
return 'R'
for group in self.country_id.country_group_ids:
if group.name == 'Europe':
return 'U'
return 'E'
@api.constrains('facturae', 'vat', 'state_id', 'country_id')
def check_facturae(self):
for record in self:
if record.facturae:
if not record.vat:
raise exceptions.ValidationError(_('Vat must be defined'))
if not record.country_id:
raise exceptions.ValidationError(
_('Country must be defined'))
if record.country_id.code_alpha3 == 'ESP':
if not record.state_id:
raise exceptions.ValidationError(
_('State must be defined'))
|
agpl-3.0
| 8,327,318,882,392,049,000
| 36.204082
| 78
| 0.575425
| false
| 3.553606
| false
| false
| false
|
jriguera/photoplace
|
photoplace/lib/PhotoPlace/UserInterface/commandUI.py
|
1
|
5140
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# commandUI.py
#
# Copyright 2010-2015 Jose Riguera Lopez <jriguera@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A command line implementation for a user interface.
"""
__program__ = "photoplace"
__author__ = "Jose Riguera Lopez <jriguera@gmail.com>"
__version__ = "0.6.1"
__date__ = "Dec 2014"
__license__ = "Apache 2.0"
__copyright__ ="(c) Jose Riguera"
import os
import sys
from PhotoPlace.definitions import *
from PhotoPlace.observerHandler import *
from PhotoPlace.stateHandler import *
from PhotoPlace.userFacade import *
from PhotoPlace.Plugins.Interface import *
from Interface import InterfaceUI
class PhotoPlaceCOM(InterfaceUI):
"""
GTK GUI for PhotoPlace
"""
_instance = None
# Singleton
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(PhotoPlaceCOM, cls).__new__(cls)
return cls._instance
def __init__(self, resourcedir=None):
InterfaceUI.__init__(self, resourcedir)
def init(self, userfacade):
self.userfacade = userfacade
self.plugins = dict()
self.plugins_error = []
self.num_photos_process = 0
# Make a new state
try:
self.userfacade.init()
except Error as e:
print(e)
self.userfacade.init(True)
def loadPlugins(self):
errors = self.userfacade.load_plugins()
for p, e in errors.iteritems():
print(e)
self.plugins_error = []
for p in self.userfacade.addons :
if not p in errors:
try:
error = self.userfacade.activate_plugin(p, None)
except Error as e:
self.plugins_error.append(p)
print(e)
else:
if error != None:
self.plugins_error.append(p)
print(error)
else:
self.plugins_error.append(p)
def unloadPlugins(self):
pass
def activate_plugins(self):
for plg, plgobj in self.userfacade.list_plugins().iteritems():
if plg in self.plugins or plg in self.plugins_error:
continue
if not plgobj.capabilities['UI']:
# Active all plugins
try:
self.userfacade.init_plugin(plg, '*', None)
except Error as e:
print(e)
self.plugins[plg] = (plgobj)
def deactivate_plugins(self):
for plg in self.plugins.keys():
plgobj = self.plugins[plg]
try:
self.userfacade.end_plugin(plg)
except Error as e:
print(e)
del self.plugins[plg]
self.plugins = dict()
def start(self, load_files=True):
self.activate_plugins()
if self.action_loadtemplates():
if self.action_loadphotos():
if self.userfacade.state['gpxinputfile']:
if self.action_readgpx():
self.action_geolocate()
try:
self.userfacade.goprocess(True)
except Error as e:
print(e)
self.deactivate_plugins()
def action_loadtemplates(self):
try:
loadtemplates = self.userfacade.DoTemplates()
if loadtemplates:
loadtemplates.run()
return True
else:
return False
except Error as e:
print(e)
return False
def action_loadphotos(self, directory=None):
try:
loadphotos = self.userfacade.LoadPhotos(directory)
if loadphotos:
loadphotos.run()
return True
else:
return False
except Error as e:
print(e)
return False
def action_readgpx(self, filename=None):
try:
readgpx = self.userfacade.ReadGPX(filename)
if readgpx:
readgpx.run()
return True
else:
return False
except Error as e:
print(e)
return False
def action_geolocate(self):
try:
geolocate = self.userfacade.Geolocate()
if geolocate:
geolocate.run()
else:
return False
except Error as e:
print(e)
return False
return True
# EOF
|
apache-2.0
| -2,436,587,359,322,661,400
| 26.486631
| 76
| 0.542412
| false
| 4.168694
| false
| false
| false
|
lightbase/WSCacicNeo
|
wscacicneo/views/orgaos.py
|
1
|
11498
|
#!/usr/env python
# -*- coding: utf-8 -*-
__author__ = 'eduardo'
import requests
import json
import datetime
from pyramid.response import Response
from pyramid.httpexceptions import HTTPFound, HTTPNotFound
from pyramid.view import view_config, forbidden_view_config
from wscacicneo.model import orgao as model_orgao
from wscacicneo.utils.utils import Utils
from wscacicneo.model.orgao import Orgao
from ..model import atividade
from liblightbase.lbutils import conv
from .. import config
from .. import search
import uuid
import ast
from pyramid.session import check_csrf_token
class Orgaos(object):
"""
Views de notificação
"""
def __init__(self, request):
"""
Método construtor
:param request: Requisição
"""
self.request = request
self.usuario_autenticado = Utils.retorna_usuario_autenticado(
self.request.session.get('userid'))
def listorgao(self):
orgao_obj = Utils.create_orgao_obj()
search = orgao_obj.search_list_orgaos()
return {'orgao_doc': search.results,
'usuario_autenticado': self.usuario_autenticado
}
def get_orgao_initial(self):
if Utils.check_has_orgao(): # se tiver orgao
return HTTPFound(location = self.request.route_url('login'))
return {'api_key': uuid.uuid4()}
def post_orgao_initial(self):
if Utils.check_has_orgao(): # se tiver orgao
return HTTPFound(location = self.request.route_url('login'))
return self.post_orgao()
def config_orgao(self):
sigla = self.request.matchdict['sigla']
search_obj = search.orgao.SearchOrgao(
param=sigla
)
orgao_obj = search_obj.search_by_name()
saida = orgao_obj.orgao_to_dict()
# Coloca algum valor na URL
if saida.get('url') is None:
saida['url'] = self.request.application_url
saida['usuario_autenticado'] = self.usuario_autenticado
return saida
def editorgao(self):
sigla = self.request.matchdict['sigla']
search_obj = search.orgao.SearchOrgao(
param=sigla
)
orgao_obj = search_obj.search_by_name()
saida = orgao_obj.orgao_to_dict()
if saida.get('url') is None:
saida['url'] = self.request.application_url
saida['usuario_autenticado'] = self.usuario_autenticado
return saida
def post_orgao(self):
"""
Post doc órgãos
"""
rest_url = config.REST_URL
orgaobase = model_orgao.OrgaoBase().lbbase
doc = self.request.json_body
nome_base = Utils.format_name(doc.get('sigla'))
orgao_obj = Orgao(
nome=nome_base,
pretty_name=doc.get('pretty_name'),
siorg=doc.get('siorg'),
cargo=doc.get('cargo'),
gestor=doc.get('gestor'),
coleta=int(doc.get('coleta')),
sigla=doc.get('sigla'),
endereco=doc.get('end'),
email=doc.get('email'),
telefone=doc.get('telefone'),
url=doc.get('url'),
habilitar_bot=ast.literal_eval(doc.get('habilitar_bot')),
api_key=doc.get('api_key')
)
try:
if self.usuario_autenticado is None:
user = 'Sistema'
else:
user = self.usuario_autenticado.nome
except IndexError:
user = 'Sistema'
at = atividade.Atividade(
tipo='insert',
usuario=user,
descricao='Cadastrou o órgão ' + nome_base,
data=datetime.datetime.now()
)
at.create_atividade()
id_doc = orgao_obj.create_orgao()
session = self.request.session
session.flash('Orgão cadastrado com sucesso', queue="success")
return Response(str(id_doc))
def put_orgao(self):
"""
Edita um doc apartir do id
"""
doc = self.request.json_body
sigla = doc['id']
nome_base = Utils.format_name(doc.get('sigla'))
orgao_obj = Orgao(
nome=nome_base,
pretty_name=doc.get('pretty_name'),
siorg=doc.get('siorg'),
gestor=doc.get('gestor'),
cargo=doc.get('cargo'),
coleta=int(doc.get('coleta')),
sigla=nome_base,
endereco=doc.get('end'),
email=doc.get('email'),
telefone=doc.get('telefone'),
url=doc.get('url'),
habilitar_bot=ast.literal_eval(doc.get('habilitar_bot')),
api_key=doc.get('api_key')
)
at = atividade.Atividade(
tipo='put',
usuario=self.usuario_autenticado.nome,
descricao='Alterou o órgão ' + nome_base,
data=datetime.datetime.now()
)
at.create_atividade()
orgao = orgao_obj.orgao_to_dict()
search = orgao_obj.search_orgao(sigla)
id = search.results[0]._metadata.id_doc
doc = json.dumps(orgao)
print(doc)
print(sigla)
edit = orgao_obj.edit_orgao(id, doc)
session = self.request.session
session.flash('Alteração realizado com sucesso', queue="success")
return Response(str(id))
def delete_orgao(self):
"""
Deleta doc apartir do id
"""
session = self.request.session
doc = self.request.params
sigla = self.request.matchdict['sigla']
orgao_obj = Utils.create_orgao_obj()
user_obj = Utils.create_user_obj()
at = atividade.Atividade(
tipo='delete',
usuario=self.usuario_autenticado.nome,
descricao='Removeu o órgão '+ sigla,
data=datetime.datetime.now()
)
at.create_atividade()
search = orgao_obj.search_orgao(sigla)
id = search.results[0]._metadata.id_doc
orgao_name = search.results[0].nome
lista_usuarios = Utils.verifica_orgaos(orgao_name)
list_admins = Utils.verifica_admin(lista_usuarios)
# Lista os nomes dos usuários administradores do sistema
list_admins_names = []
for x in list_admins:
list_admins_names.append(x.nome)
# Remove o órgão e seus usuários caso não exista administradores ligados ao mesmo.
if not list_admins:
for id_users in lista_usuarios:
delete_user = user_obj.delete_user(id_users)
delete_orgao = orgao_obj.delete_orgao(id)
if delete_orgao:
session.flash('Sucesso ao apagar o órgão e os usuários ligados a ele'+search.results[0].pretty_name, queue="success")
else:
session.flash('Ocorreu um erro ao apagar o órgão '+search.results[0].pretty_name, queue="error")
return HTTPFound(location=self.request.route_url('listorgao'))
else:
if len(list_admins) > 1:
session.flash('O órgão '+search.results[0].pretty_name+' não pode ser removido pois ainda há administradores ligados a ele.', queue="error")
session.flash('Os administradores ligados ao órgão '+search.results[0].pretty_name+' são: '+str(list_admins_names).strip("[]"), queue="error")
else:
session.flash('O órgão '+search.results[0].pretty_name+' não pode ser removido pois ainda há um administrador ligado a ele.', queue="error")
session.flash('O administrador ligado ao órgão '+search.results[0].pretty_name+' é: '+str(list_admins_names).strip("[]"), queue="error")
return HTTPFound(location=self.request.route_url('listorgao'))
# Views de Orgão
def orgao(self):
return {
'usuario_autenticado': self.usuario_autenticado,
'api_key': uuid.uuid4()
}
def valida_orgao(self):
"""
Valida cadastro do órgão
:return: JSON no seguinte formato
{
'result': True/False,
'message': 'Se houver erro'
'element': 'Id do elemento onde houve o erro'
}
"""
orgao = self.request.json_body
# 1 - Verifica nome do órgão
exists = search.orgao.orgao_base.element_exists('nome', orgao['sigla'])
if exists:
# Órgão já existe
return {
'result': False,
'message': 'Já existe um órgão com essa sigla',
'element': 'sigla'
}
# 2 - Nome tem que ser único
nome_base = Utils.format_name(orgao['sigla'])
exists = search.orgao.orgao_base.element_exists('nome', nome_base)
if exists:
# Email existe
return {
'result': False,
'message': 'Nome de órgão já cadastrado. '
'Números e caracteres especiais são desconsiderados',
'element': 'sigla'
}
# 3 - Verifica e-mail
exists = search.orgao.orgao_base.element_exists('email', orgao['email'])
if exists:
# Email existe
return {
'result': False,
'message': 'E-mail já cadastrado',
'element': 'email'
}
# Retorna verdadeiro com padrão
return {
'result': True
}
def valida_put_orgao(self):
"""
Valida cadastro do órgão
:return: JSON no seguinte formato
{
'result': True/False,
'message': 'Se houver erro'
'element': 'Id do elemento onde houve o erro'
}
"""
orgao = self.request.json_body
# 1 - Verifica nome do órgão
search_obj = search.orgao.SearchOrgao(
param=orgao['id']
)
orgao_obj = search_obj.search_by_name()
if orgao_obj is None:
# Órgão já existe
return {
'result': False,
'message': 'Órgão não encontrado',
'element': 'sigla'
}
# 2 - Nome tem que ser único
nome_base = Utils.format_name(orgao['sigla'])
exists = search.orgao.orgao_base.element_exists('nome', nome_base, orgao_obj.nome)
if exists:
# Email existe
return {
'result': False,
'message': 'Nome de órgão já cadastrado. '
'Números e caracteres especiais são desconsiderados',
'element': 'sigla'
}
# 3 - Verifica e-mail
exists = search.orgao.orgao_base.element_exists('email', orgao['email'], orgao_obj.nome)
if exists:
# Email existe
return {
'result': False,
'message': 'E-mail já cadastrado',
'element': 'email'
}
exists = search.orgao.orgao_base.element_exists('sigla', orgao['sigla'], orgao_obj.nome)
if exists:
# Email existe
return {
'result': False,
'message': 'Sigla já cadastrado. '
'Números e caracteres especiais são desconsiderados',
'element': 'sigla'
}
# Retorna verdadeiro com padrão
return {
'result': True
}
|
gpl-2.0
| -7,502,199,000,167,618,000
| 34.02454
| 158
| 0.547644
| false
| 3.428829
| false
| false
| false
|
noam09/kodi
|
xmlgen.py
|
1
|
4509
|
#!/usr/bin/env python
# *
# * Copyright (C) 2012-2013 Garrett Brown
# * Copyright (C) 2010 j48antialias
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with XBMC; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/copyleft/gpl.html
# *
# * Based on code by j48antialias:
# * https://anarchintosh-projects.googlecode.com/files/addons_xml_generator.py
""" addons.xml generator """
import os
import sys
# Compatibility with 3.0, 3.1 and 3.2 not supporting u"" literals
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
class Generator:
"""
Generates a new addons.xml file from each addons addon.xml file
and a new addons.xml.md5 hash file. Must be run from the root of
the checked-out repo. Only handles single depth folder structure.
"""
def __init__( self ):
# generate files
self._generate_addons_file()
self._generate_md5_file()
# notify user
print("Finished updating addons xml and md5 files")
def _generate_addons_file( self ):
# addon list
addons = os.listdir( "." )
excludedFolders = {'.svn':'.svn','.git':'.git','repo': 'repo', 'plugin.video.moviexil': 'plugin.video.moviexil',
'plugin.video.themarker.video':'plugin.video.themarker.video'
}
# final addons text
addons_xml = u("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n<addons>\n")
# loop thru and add each addons addon.xml file
for addon in addons:
try:
# skip any file or .svn folder or .git folder
if ( not os.path.isdir( addon ) or addon in excludedFolders): continue
# create path
_path = os.path.join( addon, "addon.xml" )
# split lines for stripping
xml_lines = open( _path, "r" ).read().splitlines()
# new addon
addon_xml = ""
# loop thru cleaning each line
for line in xml_lines:
# skip encoding format line
if ( line.find( "<?xml" ) >= 0 ): continue
# add line
if sys.version < '3':
addon_xml += unicode( line.rstrip() + "\n", "UTF-8" )
else:
addon_xml += line.rstrip() + "\n"
# we succeeded so add to our final addons.xml text
addons_xml += addon_xml.rstrip() + "\n\n"
except Exception as e:
# missing or poorly formatted addon.xml
print("Excluding %s for %s" % ( _path, e ))
# clean and add closing tag
addons_xml = addons_xml.strip() + u("\n</addons>\n")
# save file
self._save_file( addons_xml.encode( "UTF-8" ), file="addons.xml" )
def _generate_md5_file( self ):
# create a new md5 hash
try:
import md5
m = md5.new( open( "addons.xml", "r" ).read() ).hexdigest()
except ImportError:
import hashlib
m = hashlib.md5( open( "addons.xml", "r", encoding="UTF-8" ).read().encode( "UTF-8" ) ).hexdigest()
# save file
try:
self._save_file( m.encode( "UTF-8" ), file="addons.xml.md5" )
except Exception as e:
# oops
print("An error occurred creating addons.xml.md5 file!\n%s" % e)
def _save_file( self, data, file ):
try:
# write data to the file (use b for Python 3)
open( file, "wb" ).write( data )
except Exception as e:
# oops
print("An error occurred saving %s file!\n%s" % ( file, e ))
if ( __name__ == "__main__" ):
# start
Generator()
|
gpl-3.0
| -7,378,184,051,172,303,000
| 36.890756
| 120
| 0.554447
| false
| 3.811496
| false
| false
| false
|
ricardonhuang/blog
|
app/auth/views.py
|
1
|
6229
|
#coding=utf-8
'''
Created on 2016��10��20��
@author: huangning
'''
from flask import render_template, redirect, request, url_for, flash
from flask_login import login_user, logout_user, login_required, \
current_user
from . import auth
from .. import db
from ..models import User
from ..email import send_email
from .forms import LoginForm, RegistrationForm, ChangePasswordForm,\
PasswordResetRequestForm, PasswordResetForm, ChangeEmailForm
@auth.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.ping()
if not current_user.confirmed and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous or current_user.confirmed:
return redirect(url_for('main.index'))
return render_template('auth/unconfirmed.html')
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account',
'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('auth.login'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(current_user.email, 'Confirm Your Account',
'auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
@auth.route('/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_password.data):
current_user.password = form.password.data
db.session.add(current_user)
flash('Your password has been updated.')
return redirect(url_for('main.index'))
else:
flash('Invalid password.')
return render_template("auth/change_password.html", form=form)
@auth.route('/reset', methods=['GET', 'POST'])
def password_reset_request():
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
token = user.generate_reset_token()
send_email(user.email, 'Reset Your Password',
'auth/email/reset_password',
user=user, token=token,
next=request.args.get('next'))
flash('An email with instructions to reset your password has been '
'sent to you.')
return redirect(url_for('auth.login'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is None:
return redirect(url_for('main.index'))
if user.reset_password(token, form.password.data):
flash('Your password has been updated.')
return redirect(url_for('auth.login'))
else:
return redirect(url_for('main.index'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/change-email', methods=['GET', 'POST'])
@login_required
def change_email_request():
form = ChangeEmailForm()
if form.validate_on_submit():
if current_user.verify_password(form.password.data):
new_email = form.email.data
token = current_user.generate_email_change_token(new_email)
send_email(new_email, 'Confirm your email address',
'auth/email/change_email',
user=current_user, token=token)
flash('An email with instructions to confirm your new email '
'address has been sent to you.')
return redirect(url_for('main.index'))
else:
flash('Invalid email or password.')
return render_template("auth/change_email.html", form=form)
@auth.route('/change-email/<token>')
@login_required
def change_email(token):
if current_user.change_email(token):
flash('Your email address has been updated.')
else:
flash('Invalid request.')
return redirect(url_for('main.index'))
|
gpl-3.0
| 5,823,037,768,428,690,000
| 34.145349
| 78
| 0.623774
| false
| 3.907605
| false
| false
| false
|
YuxingZhang/prescription
|
rnn_model/batch.py
|
1
|
10241
|
import numpy as np
import random
import cPickle as pkl
from collections import OrderedDict
class Batch():
def __init__(self, lhs, rel, rhs, batch_size=128):
self.lhs = lhs
self.rel = rel
self.rhs = rhs
self.batch_size = batch_size
self.prepare()
self.reset()
def prepare(self):
self.indices = np.arange(len(self.lhs))
self.curr_indices = np.random.permutation(self.indices)
def reset(self):
self.curr_indices = np.random.permutation(self.indices)
self.curr_pos = 0
self.curr_remaining = len(self.indices)
def next(self):
if self.curr_pos >= len(self.indices):
self.reset()
raise StopIteration()
# current batch size
curr_batch_size = np.minimum(self.batch_size, self.curr_remaining)
# indices for current batch
curr_indices = self.curr_indices[self.curr_pos:self.curr_pos+curr_batch_size]
self.curr_pos += curr_batch_size
self.curr_remaining -= curr_batch_size
# data and targets for current batch
lhs_batch = [self.lhs[ii] for ii in curr_indices]
rel_batch = [self.rel[ii] for ii in curr_indices]
rhs_batch = [self.rhs[ii] for ii in curr_indices]
return lhs_batch, rel_batch, rhs_batch
def __iter__(self):
return self
def prepare_data(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars, use_beos=False):
"""
Prepare the data for training - add masks and remove infrequent characters, used for training
"""
batch_size = len(lhs_b)
lhs_list = lhs_dict.keys()
rand_idx = np.random.choice(len(lhs_list), batch_size)
lhsn_b = []
for i in range(batch_size):
lhsn_b.append([lhs_list[rand_idx[i]] if lhs_b[i] != lhs_list[rand_idx[i]] else lhs_list[np.random.randint(len(lhs_list))]])
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
lhsn_in, lhsn_mask = prepare_lhs(lhsn_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] for yy in rhs_b] # convert each right hand side to its index
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
# random index as the negative triples
rhsn_in = np.random.randint(len(rhs_dict), size=batch_size).astype('int32')
return lhs_in, lhs_mask, lhsn_in, lhsn_mask, rel_in, rhs_in, rhsn_in
def prepare_data_nn(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars, use_beos=False):
"""
Prepare the data for training - add masks and remove infrequent characters, used for training
"""
batch_size = len(lhs_b)
lhs_list = lhs_dict.keys()
rand_idx = np.random.choice(len(lhs_list), batch_size)
lhsn_b = []
for i in range(batch_size):
lhsn_b.append([lhs_list[rand_idx[i]] if lhs_b[i] != lhs_list[rand_idx[i]] else lhs_list[np.random.randint(len(lhs_list))]])
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
lhsn_in, lhsn_mask = prepare_lhs(lhsn_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] for yy in rhs_b] # convert each right hand side to its index
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
# random index as the negative triples
rhsn_in = np.random.randint(len(rhs_dict), size=batch_size).astype('int32')
lhsn_emb_in = np.random.randint(len(lhs_dict) + 1, size=batch_size).astype('int32')
return lhs_in, lhs_mask, lhsn_in, lhsn_mask, lhs_emb_in, lhsn_emb_in, rel_in, rhs_in, rhsn_in
def prepare_data_tr(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars, use_beos=False):
"""
Prepare the data for training - add masks and remove infrequent characters, used for training
"""
batch_size = len(lhs_b)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] for yy in rhs_b] # convert each right hand side to its index
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
# random index as the negative triples
rhsn_in = np.random.randint(len(rhs_dict), size=batch_size).astype('int32')
lhsn_emb_in = np.random.randint(len(lhs_dict) + 1, size=batch_size).astype('int32')
return lhs_emb_in, lhsn_emb_in, rel_in, rhs_in, rhsn_in
def prepare_vs_tr(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars):
'''
prepare data without generating negative triples, used for validation and testing
'''
batch_size = len(lhs_b)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] if yy in rhs_dict else 0 for yy in rhs_b] # convert each right hand side to its index, 0 if not in dict
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
return lhs_emb_in, rel_in, rhs_in
def prepare_vs_nn(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars):
'''
prepare data without generating negative triples, used for validation and testing
'''
batch_size = len(lhs_b)
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] if yy in rhs_dict else 0 for yy in rhs_b] # convert each right hand side to its index, 0 if not in dict
lhs_idx = [(lhs_dict[yy] + 1) if yy in lhs_dict else 0 for yy in lhs_b] # if not in dict, set to 0
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
lhs_emb_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
lhs_emb_in[idx] = lhs_idx[idx]
return lhs_in, lhs_mask, lhs_emb_in, rel_in, rhs_in
def prepare_vs(lhs_b, rel_b, rhs_b, chardict, lhs_dict, rel_dict, rhs_dict, n_chars):
'''
prepare data without generating negative triples, used for validation and testing
'''
batch_size = len(lhs_b)
lhs_in, lhs_mask = prepare_lhs(lhs_b, chardict, n_chars)
# rel and rhs
rel_idx = [rel_dict[yy] for yy in rel_b] # convert each relation to its index
rhs_idx = [rhs_dict[yy] if yy in rhs_dict else 0 for yy in rhs_b] # convert each right hand side to its index, 0 if not in dict
rel_in = np.zeros((batch_size)).astype('int32')
rhs_in = np.zeros((batch_size)).astype('int32')
for idx in range(batch_size):
rel_in[idx] = rel_idx[idx]
rhs_in[idx] = rhs_idx[idx]
return lhs_in, lhs_mask, rel_in, rhs_in
def prepare_lhs(lhs_b, chardict, n_chars):
'''
prepare left hand side (or negative left hand side) given a list of words, used as a subroutine of prepare_data
'''
lhs_idx = []
for cc in lhs_b:
current = list(cc)
lhs_idx.append([chardict[c] if c in chardict and chardict[c] <= n_chars else 0 for c in current])
len_lhs = [len(s) for s in lhs_idx]
max_length = max(len_lhs)
n_samples = len(lhs_idx)
# positive lhs
lhs_in = np.zeros((n_samples,max_length)).astype('int32')
lhs_mask = np.zeros((n_samples,max_length)).astype('float32')
for idx, lhs_idx_i in enumerate(lhs_idx):
lhs_in[idx,:len_lhs[idx]] = lhs_idx_i
lhs_mask[idx,:len_lhs[idx]] = 1.
return lhs_in, lhs_mask
def build_char_dictionary(text):
"""
Build a character dictionary
"""
charcount = OrderedDict()
for cc in text:
chars = list(cc)
for c in chars:
if c not in charcount:
charcount[c] = 0
charcount[c] += 1
chars = charcount.keys()
freqs = charcount.values()
sorted_idx = np.argsort(freqs)[::-1]
chardict = OrderedDict()
for idx, sidx in enumerate(sorted_idx):
chardict[chars[sidx]] = idx + 1
return chardict, charcount
def build_entity_dictionary(targets):
"""
Build a label dictionary
"""
labelcount = OrderedDict()
for l in targets:
if l not in labelcount:
labelcount[l] = 0
labelcount[l] += 1
labels = labelcount.keys()
freqs = labelcount.values()
sorted_idx = np.argsort(freqs)[::-1]
labeldict = OrderedDict()
for idx, sidx in enumerate(sorted_idx):
labeldict[labels[sidx]] = idx
return labeldict, labelcount
def save_dictionary(worddict, wordcount, loc):
"""
Save a dictionary to the specified location
"""
with open(loc, 'w') as f:
pkl.dump(worddict, f)
pkl.dump(wordcount, f)
def load_labeled_entities(f): # split each line into lhs, rel and rhs
lhs = []
rel = []
rhs = []
for line in f:
entities = line.rstrip().split('\t')
if len(entities) != 3:
continue
lhs.append(entities[0])
rel.append(entities[1])
rhs.append(entities[2])
return lhs, rel, rhs
|
bsd-3-clause
| 157,359,887,290,219,680
| 36.375912
| 131
| 0.620252
| false
| 3.087428
| false
| false
| false
|
ustclug/lug-vpn-web
|
scripts/migrate.py
|
1
|
1829
|
#!/usr/bin/env python3
# encoding: utf-8
import MySQLdb
import random
import hashlib
import string
db = MySQLdb.connect(host=input('host:'),
user=input('user:'),
passwd=input('password:'),
db=input('db:'))
db.autocommit(True)
cur = db.cursor()
cur.execute("rename table `user` to `user_bak`")
cur.execute("""
CREATE TABLE `user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(63) DEFAULT NULL,
`passwordhash` varchar(127) NOT NULL,
`salt` varchar(127) NOT NULL,
`active` tinyint(1) DEFAULT NULL,
`admin` tinyint(1) DEFAULT NULL,
`status` enum('none','applying','pass','reject','banned') DEFAULT NULL,
`name` varchar(127) DEFAULT NULL,
`studentno` varchar(127) DEFAULT NULL,
`phone` varchar(127) DEFAULT NULL,
`reason` text,
`applytime` datetime DEFAULT NULL,
`vpnpassword` varchar(127) DEFAULT NULL,
`rejectreason` text,
`banreason` text,
PRIMARY KEY (`id`),
UNIQUE KEY `email` (`email`)
) CHARSET=utf8
""")
cur.execute("""
insert into user
(`id`,`email`,`active`,`admin`,`status`,`name`,`studentno`,`phone`,`reason`,`applytime`,`vpnpassword`,`passwordhash`,`salt`)
select `id`,`email`,`active`,`admin`,`apply`,`name`,`studentno`,`phone`,`reason`,`applytime`,
(select `value` from `radcheck` where username=user_bak.email),'',''
from user_bak
where 1
""")
cur.execute('select id,password from user_bak')
for row in cur.fetchall():
id = row[0]
p = row[1]
salt = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for i in range(10))
s = hashlib.sha256()
s.update(p.encode('utf-8'))
s.update(salt.encode('utf-8'))
passwordhash = s.hexdigest()
cur.execute('update user set passwordhash=%s,salt=%s where id=%s', (passwordhash, salt, id))
db.close()
|
agpl-3.0
| -2,951,162,330,318,354,400
| 28.031746
| 124
| 0.651722
| false
| 3.16436
| false
| false
| false
|
goujonpa/chateaumagondeau
|
website/magondeau/settings.py
|
1
|
2870
|
"""
Django settings for magondeau project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i)v6vy5a8*c%ndm3)3%0knp-a#tg7iyczh^7muntb-%qbrb(d9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
'news',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'magondeau.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'magondeau.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'magondeau',
'USER': 'Polo',
'PASSWORD': '',
'HOST': '',
'PORT': '5432',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'fr-FR'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
|
mit
| 8,085,411,588,434,776,000
| 24.175439
| 71
| 0.672125
| false
| 3.424821
| false
| false
| false
|
amkusmec/snptools
|
src/filter.py
|
1
|
10883
|
# -*- coding: utf-8 -*-
"""
Created on Wed May 27 14:47:00 2015
@author: aaron
"""
import argparse
import textwrap
import timeit
import os
from snptools import *
#########################################################
#### Need to add retention list filtering for DSF and PED
#########################################################
###############################################################################
def version():
v0 = """
############################################################################
filter V1.1
(c) 2015 Aaron Kusmec
N.B. VCF functionality is experimental. Use at your own risk.
Filter SNPs based on missing rates/minor allele frequencies.
Input modes,
1 = .dsf
2 = .hmp.txt
3 = .ped (PLINK)
4 = .vcf
Usage: python3 filter.py -s example.stat -i example.dsf -o filtered -mi 1 -n 0.6 -f 0.05
NOTE1: Retaining SNPs through a SNP list is currently only supported for HMP
files.
NOTE2: Using a SNP list cannot currently be combined with MAF/miss filtering.
############################################################################
"""
return v0
#############################################################################
def get_parser():
parser = argparse.ArgumentParser(
formatter_class = argparse.RawDescriptionHelpFormatter,
description = textwrap.dedent(version()))
parser.add_argument('-p', '--path', help = 'Path of the input file', \
nargs = '?', default = os.getcwd())
parser.add_argument('-s', '--stat', help = 'Stat file', type = str)
parser.add_argument('-i', '--input', help = 'Input file', type = str)
parser.add_argument('-o', '--output', help = 'Output file (no ext)', type = str)
parser.add_argument('-mi', '--modei', help = 'Input (and output) mode', type = int)
parser.add_argument('-n', '--miss', help = 'Max missing rate', \
type = float, default = 1.0)
parser.add_argument('-f', '--maf', help = 'Minimum minor allele frequency',\
type = float, default = 0.0)
parser.add_argument('-ht', '--het', help = 'Maximum heterozygosity', type = float, default = 1.0)
parser.add_argument('-r', '--retain', help = 'List of SNPs to retain', type = str, default = None)
return parser
###############################################################################
def getStats(filename):
print("Reading [ ", filename, " ].")
stats = {}
with open(filename, 'r') as infile:
header = infile.readline()
for line in infile:
line = line.split()
stats[line[0]] = [float(line[5]), float(line[6]), float(line[7])]
return stats
###############################################################################
def filterDsf(inname, outname, stats, miss, maf, het):
print("Filtering [ ", inname, " ].")
infile = open(inname, 'r')
keepfile = open(outname + ".dsf", 'w')
filtfile = open(outname + "_filtered.dsf", 'w')
header = infile.readline().split()
keepfile.write('\t'.join(header) + '\n')
filtfile.write('\t'.join(header) + '\n')
kept = filt = counter = 0
for snp in infile:
snp = snp.split()
if snp[0] not in stats:
warning(snp[0] + " is not present in .stat file.")
# Filter or keep
if stats[snp[0]][0] <= miss and stats[snp[0]][1] >= maf and stats[snp[0]][2] <= het:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
infile.close()
keepfile.close()
filtfile.close()
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".dsf", " ].")
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.dsf", " ].")
###############################################################################
def filterHmp(inname, outname, stats, miss, maf, het, retain):
print("Filtering [ ", inname, " ].")
infile = open(inname, 'r')
keepfile = open(outname + ".hmp.txt", 'w')
filtfile = open(outname + "_filtered.hmp.txt", 'w')
header = infile.readline().split()
keepfile.write('\t'.join(header) + '\n')
filtfile.write('\t'.join(header) + '\n')
kept = filt = counter = 0
for snp in infile:
snp = snp.split()
if snp[0] not in stats:
warning(snp[0] + " is not present in .stat file.")
if retain is not None:
if snp[0] in retain:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
else:
# Filter or keep
if stats[snp[0]][0] <= miss and stats[snp[0]][1] >= maf and stats[snp[0]][2] <= het:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
infile.close()
keepfile.close()
filtfile.close()
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".hmp.txt", " ].")
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.hmp.txt", " ].")
###############################################################################
def filterPed(inname, outname, stats, miss, maf, het):
# Read the .map file and verify that it contains the same SNPs
# as the .stat file.
mapname = inname.split('.')[0] + ".map"
print("Verifying [ ", mapname, " ].")
smap = []
with open(mapname, 'r') as mapfile:
for line in mapfile:
line = line.split()
if line[1] in stats:
smap.append(line)
else:
warning(line[1] + " is not present in .stat file.")
# Read the entire .ped file into memory and transpose
snps = []
print("Reading [ ", inname, " ].")
with open(inname, 'r') as infile:
for line in infile:
snps.append(line.strip().split('\t'))
snps = zip(*snps)
# Setup the output lists and process the metadata
ksnps = []; kmap = []
fsnps = []; fmap = []
for _ in range(6):
m = next(snps)
ksnps.append(m)
fsnps.append(m)
# Filter or keep
kept = filt = counter = 0
for index, value in enumerate(snps):
if stats[smap[index][1]][0] <= miss and stats[smap[index][1]][1] >= maf and stats[smap[index][1]][2] <= het:
ksnps.append(value)
kmap.append(smap[index])
kept += 1
else:
fsnps.append(value)
fmap.append(smap[index])
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
# Report the results and write the output
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".ped", " ].")
ksnps = zip(*ksnps)
with open(outname + ".ped", 'w') as outfile:
for k in ksnps:
outfile.write('\t'.join(k) + '\n')
with open(outname + ".map", 'w') as outfile:
for k in kmap:
outfile.write('\t'.join(k) + '\n')
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.ped", " ].")
fsnps = zip(*fsnps)
with open(outname + "_filtered.ped", 'w') as outfile:
for f in fsnps:
outfile.write('\t'.join(f) + '\n')
with open(outname + "_filtered.map", 'w') as outfile:
for f in fmap:
outfile.write('\t'.join(f) + '\n')
###############################################################################
def filterVcf(inname, outname, stats, miss, maf, het):
print("Filtering [ ", inname, " ].")
infile = open(inname, 'r')
keepfile = open(outname + ".vcf", 'w')
filtfile = open(outname + "_filtered.vcf", 'w')
kept = filt = counter = 0
for snp in infile:
snp = snp.strip()
if snp[0] == "#":
keepfile.write(snp + '\n')
filtfile.write(snp + '\n')
continue
# Filter or keep
snp = snp.split()
if snp[2] not in stats:
warning(snp[2] + " is not present in .stat file.")
if stats[snp[2]][0] <= miss and stats[snp[2]][1] >= maf and stats[snp[2]][2] <= het:
keepfile.write('\t'.join(snp) + '\n')
kept += 1
else:
filtfile.write('\t'.join(snp) + '\n')
filt += 1
counter += 1
if counter % 1e5 == 0:
print("Processed [ ", str(counter), " ] SNPs.", end = '\r')
infile.close()
keepfile.close()
filtfile.close()
print()
print("Kept [ ", str(kept), " ] SNPs in [ ", outname + ".vcf", " ].")
print("Removed [ ", str(filt), " ] SNPs to [ ", outname + "_filtered.vcf", " ].")
###############################################################################
def getRetain(filename):
retain = {}
with open(filename, 'r') as infile:
for line in infile:
retain[line.strip()] = True
return retain
###############################################################################
if __name__ == '__main__':
parser = get_parser()
args = vars(parser.parse_args())
# Change the working directory if necessary
if args['path'] is not None:
os.chdir(args['path'])
if args['input'] is None:
warning("No input file.")
if args['output'] is None:
warning("No output file.")
print(version())
st = timeit.default_timer()
# Check input file
checkFile(args['input'], args['modei'])
stats = getStats(args['stat'])
if args['retain'] is not None:
retain = getRetain(args['retain'])
else:
retain = None
if args['modei'] == 1:
filterDsf(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'])
elif args['modei'] == 2:
filterHmp(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'], retain)
elif args['modei'] == 3:
filterPed(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'])
elif args['modei'] == 4:
filterVcf(args['input'], args['output'], stats, args['miss'], args['maf'], args['het'])
else:
warning("Unrecognized input mode.")
et = timeit.default_timer()
print("Filtering finished.")
print("Time: %.2f min." % ((et - st)/60))
|
mit
| -6,806,972,596,291,616,000
| 32.798137
| 116
| 0.473307
| false
| 3.639799
| false
| false
| false
|
dsnopek/anki-sync-server
|
tests/test_rest_app.py
|
1
|
22180
|
# -*- coding: utf-8 -*-
import os
import shutil
import tempfile
import unittest
import logging
import time
from pprint import pprint
import mock
from mock import MagicMock
import AnkiServer
from AnkiServer.collection import CollectionManager
from AnkiServer.apps.rest_app import RestApp, RestHandlerRequest, CollectionHandler, ImportExportHandler, NoteHandler, ModelHandler, DeckHandler, CardHandler
from CollectionTestBase import CollectionTestBase
from webob.exc import *
import anki
import anki.storage
class RestAppTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.collection_manager = CollectionManager()
self.rest_app = RestApp(self.temp_dir, collection_manager=self.collection_manager)
# disable all but critical errors!
logging.disable(logging.CRITICAL)
def tearDown(self):
self.collection_manager.shutdown()
self.collection_manager = None
self.rest_app = None
shutil.rmtree(self.temp_dir)
def test_list_collections(self):
os.mkdir(os.path.join(self.temp_dir, 'test1'))
os.mkdir(os.path.join(self.temp_dir, 'test2'))
with open(os.path.join(self.temp_dir, 'test1', 'collection.anki2'), 'wt') as fd:
fd.write('Testing!')
self.assertEqual(self.rest_app.list_collections(), ['test1'])
def test_parsePath(self):
tests = [
('collection/user', ('collection', 'index', ['user'])),
('collection/user/handler', ('collection', 'handler', ['user'])),
('collection/user/note/123', ('note', 'index', ['user', '123'])),
('collection/user/note/123/handler', ('note', 'handler', ['user', '123'])),
('collection/user/deck/name', ('deck', 'index', ['user', 'name'])),
('collection/user/deck/name/handler', ('deck', 'handler', ['user', 'name'])),
#('collection/user/deck/name/card/123', ('card', 'index', ['user', 'name', '123'])),
#('collection/user/deck/name/card/123/handler', ('card', 'handler', ['user', 'name', '123'])),
('collection/user/card/123', ('card', 'index', ['user', '123'])),
('collection/user/card/123/handler', ('card', 'handler', ['user', '123'])),
# the leading slash should make no difference!
('/collection/user', ('collection', 'index', ['user'])),
]
for path, result in tests:
self.assertEqual(self.rest_app._parsePath(path), result)
def test_parsePath_not_found(self):
tests = [
'bad',
'bad/oaeu',
'collection',
'collection/user/handler/bad',
'',
'/',
]
for path in tests:
self.assertRaises(HTTPNotFound, self.rest_app._parsePath, path)
def test_getCollectionPath(self):
def fullpath(collection_id):
return os.path.normpath(os.path.join(self.temp_dir, collection_id, 'collection.anki2'))
# This is simple and straight forward!
self.assertEqual(self.rest_app._getCollectionPath('user'), fullpath('user'))
# These are dangerous - the user is trying to hack us!
dangerous = ['../user', '/etc/passwd', '/tmp/aBaBaB', '/root/.ssh/id_rsa']
for collection_id in dangerous:
self.assertRaises(HTTPBadRequest, self.rest_app._getCollectionPath, collection_id)
def test_getHandler(self):
def handlerOne():
pass
def handlerTwo():
pass
handlerTwo.hasReturnValue = False
self.rest_app.add_handler('collection', 'handlerOne', handlerOne)
self.rest_app.add_handler('deck', 'handlerTwo', handlerTwo)
(handler, hasReturnValue) = self.rest_app._getHandler('collection', 'handlerOne')
self.assertEqual(handler, handlerOne)
self.assertEqual(hasReturnValue, True)
(handler, hasReturnValue) = self.rest_app._getHandler('deck', 'handlerTwo')
self.assertEqual(handler, handlerTwo)
self.assertEqual(hasReturnValue, False)
# try some bad handler names and types
self.assertRaises(HTTPNotFound, self.rest_app._getHandler, 'collection', 'nonExistantHandler')
self.assertRaises(HTTPNotFound, self.rest_app._getHandler, 'nonExistantType', 'handlerOne')
def test_parseRequestBody(self):
req = MagicMock()
req.body = '{"key":"value"}'
data = self.rest_app._parseRequestBody(req)
self.assertEqual(data, {'key': 'value'})
self.assertEqual(data.keys(), ['key'])
self.assertEqual(type(data.keys()[0]), str)
# test some bad data
req.body = '{aaaaaaa}'
self.assertRaises(HTTPBadRequest, self.rest_app._parseRequestBody, req)
class CollectionHandlerTest(CollectionTestBase):
def setUp(self):
super(CollectionHandlerTest, self).setUp()
self.handler = CollectionHandler()
def execute(self, name, data):
ids = ['collection_name']
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_list_decks(self):
data = {}
ret = self.execute('list_decks', data)
# It contains only the 'Default' deck
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0]['name'], 'Default')
def test_select_deck(self):
data = {'deck': 1}
ret = self.execute('select_deck', data)
self.assertEqual(ret, None);
def test_create_dynamic_deck_simple(self):
self.add_default_note(5)
data = {
'name': 'Dyn deck',
'mode': 'random',
'count': 2,
'query': "deck:\"Default\" (tag:'Tag1' or tag:'Tag2') (-tag:'Tag3')",
}
ret = self.execute('create_dynamic_deck', data)
self.assertEqual(ret['name'], 'Dyn deck')
self.assertEqual(ret['dyn'], True)
cards = self.collection.findCards('deck:"Dyn deck"')
self.assertEqual(len(cards), 2)
def test_list_models(self):
data = {}
ret = self.execute('list_models', data)
# get a sorted name list that we can actually check
names = [model['name'] for model in ret]
names.sort()
# These are the default models created by Anki in a new collection
default_models = [
'Basic',
'Basic (and reversed card)',
'Basic (optional reversed card)',
'Cloze'
]
self.assertEqual(names, default_models)
def test_find_model_by_name(self):
data = {'model': 'Basic'}
ret = self.execute('find_model_by_name', data)
self.assertEqual(ret['name'], 'Basic')
def test_find_notes(self):
ret = self.execute('find_notes', {})
self.assertEqual(ret, [])
# add a note programatically
self.add_default_note()
# get the id for the one note on this collection
note_id = self.collection.findNotes('')[0]
ret = self.execute('find_notes', {})
self.assertEqual(ret, [{'id': note_id}])
ret = self.execute('find_notes', {'query': 'tag:Tag1'})
self.assertEqual(ret, [{'id': note_id}])
ret = self.execute('find_notes', {'query': 'tag:TagX'})
self.assertEqual(ret, [])
ret = self.execute('find_notes', {'preload': True})
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0]['id'], note_id)
self.assertEqual(ret[0]['model']['name'], 'Basic')
def test_add_note(self):
# make sure there are no notes (yet)
self.assertEqual(self.collection.findNotes(''), [])
# add a note programatically
note = {
'model': 'Basic',
'fields': {
'Front': 'The front',
'Back': 'The back',
},
'tags': "Tag1 Tag2",
}
self.execute('add_note', note)
notes = self.collection.findNotes('')
self.assertEqual(len(notes), 1)
note_id = notes[0]
note = self.collection.getNote(note_id)
self.assertEqual(note.model()['name'], 'Basic')
self.assertEqual(note['Front'], 'The front')
self.assertEqual(note['Back'], 'The back')
self.assertEqual(note.tags, ['Tag1', 'Tag2'])
def test_list_tags(self):
ret = self.execute('list_tags', {})
self.assertEqual(ret, [])
self.add_default_note()
ret = self.execute('list_tags', {})
ret.sort()
self.assertEqual(ret, ['Tag1', 'Tag2'])
def test_set_language(self):
import anki.lang
self.assertEqual(anki.lang._('Again'), 'Again')
try:
data = {'code': 'pl'}
self.execute('set_language', data)
self.assertEqual(anki.lang._('Again'), u'Znowu')
finally:
# return everything to normal!
anki.lang.setLang('en')
def test_reset_scheduler(self):
self.add_default_note(3)
ret = self.execute('reset_scheduler', {'deck': 'Default'})
self.assertEqual(ret, {
'new_cards': 3,
'learning_cards': 0,
'review_cards': 0,
})
def test_next_card(self):
ret = self.execute('next_card', {})
self.assertEqual(ret, None)
# add a note programatically
self.add_default_note()
# get the id for the one card and note on this collection
note_id = self.collection.findNotes('')[0]
card_id = self.collection.findCards('')[0]
self.collection.sched.reset()
ret = self.execute('next_card', {})
self.assertEqual(ret['id'], card_id)
self.assertEqual(ret['nid'], note_id)
self.assertEqual(ret['css'], '<style>.card {\n font-family: arial;\n font-size: 20px;\n text-align: center;\n color: black;\n background-color: white;\n}\n</style>')
self.assertEqual(ret['question'], 'The front')
self.assertEqual(ret['answer'], 'The front\n\n<hr id=answer>\n\nThe back')
self.assertEqual(ret['answer_buttons'], [
{'ease': 1,
'label': 'Again',
'string_label': 'Again',
'interval': 60,
'string_interval': '<1 minute'},
{'ease': 2,
'label': 'Good',
'string_label': 'Good',
'interval': 600,
'string_interval': '<10 minutes'},
{'ease': 3,
'label': 'Easy',
'string_label': 'Easy',
'interval': 345600,
'string_interval': '4 days'}])
def test_next_card_translation(self):
# add a note programatically
self.add_default_note()
# get the card in Polish so we can test translation too
anki.lang.setLang('pl')
try:
ret = self.execute('next_card', {})
finally:
anki.lang.setLang('en')
self.assertEqual(ret['answer_buttons'], [
{'ease': 1,
'label': 'Again',
'string_label': u'Znowu',
'interval': 60,
'string_interval': '<1 minuta'},
{'ease': 2,
'label': 'Good',
'string_label': u'Dobra',
'interval': 600,
'string_interval': '<10 minut'},
{'ease': 3,
'label': 'Easy',
'string_label': u'Łatwa',
'interval': 345600,
'string_interval': '4 dni'}])
def test_next_card_five_times(self):
self.add_default_note(5)
for idx in range(0, 5):
ret = self.execute('next_card', {})
self.assertTrue(ret is not None)
def test_answer_card(self):
import time
self.add_default_note()
# instantiate a deck handler to get the card
card = self.execute('next_card', {})
self.assertEqual(card['reps'], 0)
self.execute('answer_card', {'id': card['id'], 'ease': 2, 'timerStarted': time.time()})
# reset the scheduler and try to get the next card again - there should be none!
self.collection.sched.reset()
card = self.execute('next_card', {})
self.assertEqual(card['reps'], 1)
def test_suspend_cards(self):
# add a note programatically
self.add_default_note()
# get the id for the one card on this collection
card_id = self.collection.findCards('')[0]
# suspend it
self.execute('suspend_cards', {'ids': [card_id]})
# test that getting the next card will be None
card = self.collection.sched.getCard()
self.assertEqual(card, None)
# unsuspend it
self.execute('unsuspend_cards', {'ids': [card_id]})
# test that now we're getting the next card!
self.collection.sched.reset()
card = self.collection.sched.getCard()
self.assertEqual(card.id, card_id)
def test_cards_recent_ease(self):
self.add_default_note()
card_id = self.collection.findCards('')[0]
# answer the card
self.collection.reset()
card = self.collection.sched.getCard()
card.startTimer()
# answer multiple times to see that we only get the latest!
self.collection.sched.answerCard(card, 1)
self.collection.sched.answerCard(card, 3)
self.collection.sched.answerCard(card, 2)
# pull the latest revision
ret = self.execute('cards_recent_ease', {})
self.assertEqual(ret[0]['id'], card_id)
self.assertEqual(ret[0]['ease'], 2)
class ImportExportHandlerTest(CollectionTestBase):
export_rows = [
['Card front 1', 'Card back 1', 'Tag1 Tag2'],
['Card front 2', 'Card back 2', 'Tag1 Tag3'],
]
def setUp(self):
super(ImportExportHandlerTest, self).setUp()
self.handler = ImportExportHandler()
def execute(self, name, data):
ids = ['collection_name']
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def generate_text_export(self):
# Create a simple export file
export_data = ''
for row in self.export_rows:
export_data += '\t'.join(row) + '\n'
export_path = os.path.join(self.temp_dir, 'export.txt')
with file(export_path, 'wt') as fd:
fd.write(export_data)
return (export_data, export_path)
def check_import(self):
note_ids = self.collection.findNotes('')
notes = [self.collection.getNote(note_id) for note_id in note_ids]
self.assertEqual(len(notes), len(self.export_rows))
for index, test_data in enumerate(self.export_rows):
self.assertEqual(notes[index]['Front'], test_data[0])
self.assertEqual(notes[index]['Back'], test_data[1])
self.assertEqual(' '.join(notes[index].tags), test_data[2])
def test_import_text_data(self):
(export_data, export_path) = self.generate_text_export()
data = {
'filetype': 'text',
'data': export_data,
}
ret = self.execute('import_file', data)
self.check_import()
def test_import_text_url(self):
(export_data, export_path) = self.generate_text_export()
data = {
'filetype': 'text',
'url': 'file://' + os.path.realpath(export_path),
}
ret = self.execute('import_file', data)
self.check_import()
class NoteHandlerTest(CollectionTestBase):
def setUp(self):
super(NoteHandlerTest, self).setUp()
self.handler = NoteHandler()
def execute(self, name, data, note_id):
ids = ['collection_name', note_id]
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_index(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
ret = self.execute('index', {}, note_id)
self.assertEqual(ret['id'], note_id)
self.assertEqual(len(ret['fields']), 2)
self.assertEqual(ret['flags'], 0)
self.assertEqual(ret['model']['name'], 'Basic')
self.assertEqual(ret['tags'], ['Tag1', 'Tag2'])
self.assertEqual(ret['string_tags'], 'Tag1 Tag2')
self.assertEqual(ret['usn'], -1)
def test_update(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
data = self.execute('index', {}, note_id)
data['fields']['Front'] = 'The new front'
data['fields']['Back'] = 'The new back'
data['tags'] = ['new1', 'new2']
self.execute('update', data, note_id)
note = self.collection.getNote(note_id)
self.assertEqual(note['Front'], data['fields']['Front'])
self.assertEqual(note['Back'], data['fields']['Back'])
self.assertEqual(note.tags, data['tags'])
def test_delete(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
res = self.collection.findNotes('nid:%s' % note_id)
self.assertNotEqual(res, [])
self.execute('delete', {}, note_id)
res = self.collection.findNotes('nid:%s' % note_id)
self.assertEqual(res, [])
def test_add_tags(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertFalse('NT1' in note.tags)
self.assertFalse('NT2' in note.tags)
time.sleep(1)
self.execute('add_tags', {'tags': ['NT1', 'NT2']}, note_id)
note = self.collection.getNote(note_id)
self.assertTrue('NT1' in note.tags)
self.assertTrue('NT2' in note.tags)
self.assertTrue(note.mod > old_mod)
def test_add_tags_no_mod_update(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertFalse('NT1' in note.tags)
self.assertFalse('NT2' in note.tags)
time.sleep(1)
self.execute('add_tags', {'tags': ['NT1', 'NT2'], 'update_mod': False}, note_id)
note = self.collection.getNote(note_id)
self.assertTrue('NT1' in note.tags)
self.assertTrue('NT2' in note.tags)
self.assertEqual(note.mod, old_mod)
def test_remove_tags(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertTrue('Tag1' in note.tags)
self.assertTrue('Tag2' in note.tags)
time.sleep(1)
self.execute('remove_tags', {'tags': ['Tag1', 'Tag2']}, note_id)
note = self.collection.getNote(note_id)
self.assertFalse('Tag1' in note.tags)
self.assertFalse('Tag2' in note.tags)
self.assertTrue(note.mod > old_mod)
def test_remove_tags_no_mod_update(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
note = self.collection.getNote(note_id)
old_mod = note.mod
self.assertTrue('Tag1' in note.tags)
self.assertTrue('Tag2' in note.tags)
time.sleep(1)
self.execute('remove_tags', {'tags': ['Tag1', 'Tag2'], 'update_mod': False}, note_id)
note = self.collection.getNote(note_id)
self.assertFalse('Tag1' in note.tags)
self.assertFalse('Tag2' in note.tags)
self.assertEqual(note.mod, old_mod)
class DeckHandlerTest(CollectionTestBase):
def setUp(self):
super(DeckHandlerTest, self).setUp()
self.handler = DeckHandler()
def execute(self, name, data):
ids = ['collection_name', '1']
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_index(self):
ret = self.execute('index', {})
#pprint(ret)
self.assertEqual(ret['name'], 'Default')
self.assertEqual(ret['id'], 1)
self.assertEqual(ret['dyn'], False)
def test_next_card(self):
self.mock_app.execute_handler.return_value = None
ret = self.execute('next_card', {})
self.assertEqual(ret, None)
self.mock_app.execute_handler.assert_called_with('collection', 'next_card', self.collection, RestHandlerRequest(self.mock_app, {'deck': '1'}, ['collection_name'], {}))
def test_get_conf(self):
ret = self.execute('get_conf', {})
#pprint(ret)
self.assertEqual(ret['name'], 'Default')
self.assertEqual(ret['id'], 1)
self.assertEqual(ret['dyn'], False)
class CardHandlerTest(CollectionTestBase):
def setUp(self):
super(CardHandlerTest, self).setUp()
self.handler = CardHandler()
def execute(self, name, data, card_id):
ids = ['collection_name', card_id]
func = getattr(self.handler, name)
req = RestHandlerRequest(self.mock_app, data, ids, {})
return func(self.collection, req)
def test_index_simple(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
card_id = self.collection.findCards('')[0]
ret = self.execute('index', {}, card_id)
self.assertEqual(ret['id'], card_id)
self.assertEqual(ret['nid'], note_id)
self.assertEqual(ret['did'], 1)
self.assertFalse(ret.has_key('note'))
self.assertFalse(ret.has_key('deck'))
def test_index_load(self):
self.add_default_note()
note_id = self.collection.findNotes('')[0]
card_id = self.collection.findCards('')[0]
ret = self.execute('index', {'load_note': 1, 'load_deck': 1}, card_id)
self.assertEqual(ret['id'], card_id)
self.assertEqual(ret['nid'], note_id)
self.assertEqual(ret['did'], 1)
self.assertEqual(ret['note']['id'], note_id)
self.assertEqual(ret['note']['model']['name'], 'Basic')
self.assertEqual(ret['deck']['name'], 'Default')
if __name__ == '__main__':
unittest.main()
|
agpl-3.0
| 4,325,671,205,340,881,400
| 33.546729
| 175
| 0.579061
| false
| 3.711345
| true
| false
| false
|
chrislit/abydos
|
abydos/distance/_damerau_levenshtein.py
|
1
|
7982
|
# Copyright 2014-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.distance._damerau_levenshtein.
Damerau-Levenshtein distance
"""
from sys import maxsize
from typing import Any, Callable, List, Tuple, cast
from numpy import int_ as np_int
from numpy import zeros as np_zeros
from ._distance import _Distance
__all__ = [
'DamerauLevenshtein',
]
class DamerauLevenshtein(_Distance):
"""Damerau-Levenshtein distance.
This computes the Damerau-Levenshtein distance :cite:`Damerau:1964`.
Damerau-Levenshtein code is based on Java code by Kevin L. Stern
:cite:`Stern:2014`, under the MIT license:
https://github.com/KevinStern/software-and-algorithms/blob/master/src/main/java/blogspot/software_and_algorithms/stern_library/string/DamerauLevenshteinAlgorithm.java
"""
def __init__(
self,
cost: Tuple[float, float, float, float] = (1, 1, 1, 1),
normalizer: Callable[[List[float]], float] = max,
**kwargs: Any
):
"""Initialize Levenshtein instance.
Parameters
----------
cost : tuple
A 4-tuple representing the cost of the four possible edits:
inserts, deletes, substitutions, and transpositions, respectively
(by default: (1, 1, 1, 1))
normalizer : function
A function that takes an list and computes a normalization term
by which the edit distance is divided (max by default). Another
good option is the sum function.
**kwargs
Arbitrary keyword arguments
.. versionadded:: 0.4.0
"""
super(DamerauLevenshtein, self).__init__(**kwargs)
self._cost = cost
self._normalizer = normalizer
def dist_abs(self, src: str, tar: str) -> float:
"""Return the Damerau-Levenshtein distance between two strings.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
Returns
-------
int (may return a float if cost has float values)
The Damerau-Levenshtein distance between src & tar
Raises
------
ValueError
Unsupported cost assignment; the cost of two transpositions must
not be less than the cost of an insert plus a delete.
Examples
--------
>>> cmp = DamerauLevenshtein()
>>> cmp.dist_abs('cat', 'hat')
1
>>> cmp.dist_abs('Niall', 'Neil')
3
>>> cmp.dist_abs('aluminum', 'Catalan')
7
>>> cmp.dist_abs('ATCG', 'TAGC')
2
.. versionadded:: 0.1.0
.. versionchanged:: 0.3.6
Encapsulated in class
"""
ins_cost, del_cost, sub_cost, trans_cost = self._cost
if src == tar:
return 0
if not src:
return len(tar) * ins_cost
if not tar:
return len(src) * del_cost
if 2 * trans_cost < ins_cost + del_cost:
raise ValueError(
'Unsupported cost assignment; the cost of two transpositions '
+ 'must not be less than the cost of an insert plus a delete.'
)
d_mat = np_zeros((len(src), len(tar)), dtype=np_int)
if src[0] != tar[0]:
d_mat[0, 0] = min(sub_cost, ins_cost + del_cost)
src_index_by_character = {src[0]: 0}
for i in range(1, len(src)):
del_distance = d_mat[i - 1, 0] + del_cost
ins_distance = (i + 1) * del_cost + ins_cost
match_distance = i * del_cost + (
0 if src[i] == tar[0] else sub_cost
)
d_mat[i, 0] = min(del_distance, ins_distance, match_distance)
for j in range(1, len(tar)):
del_distance = (j + 1) * ins_cost + del_cost
ins_distance = d_mat[0, j - 1] + ins_cost
match_distance = j * ins_cost + (
0 if src[0] == tar[j] else sub_cost
)
d_mat[0, j] = min(del_distance, ins_distance, match_distance)
for i in range(1, len(src)):
max_src_letter_match_index = 0 if src[i] == tar[0] else -1
for j in range(1, len(tar)):
candidate_swap_index = (
-1
if tar[j] not in src_index_by_character
else src_index_by_character[tar[j]]
)
j_swap = max_src_letter_match_index
del_distance = d_mat[i - 1, j] + del_cost
ins_distance = d_mat[i, j - 1] + ins_cost
match_distance = d_mat[i - 1, j - 1]
if src[i] != tar[j]:
match_distance += sub_cost
else:
max_src_letter_match_index = j
if candidate_swap_index != -1 and j_swap != -1:
i_swap = candidate_swap_index
if i_swap == 0 and j_swap == 0:
pre_swap_cost = 0
else:
pre_swap_cost = d_mat[
max(0, i_swap - 1), max(0, j_swap - 1)
]
swap_distance = (
pre_swap_cost
+ (i - i_swap - 1) * del_cost
+ (j - j_swap - 1) * ins_cost
+ trans_cost
)
else:
swap_distance = maxsize
d_mat[i, j] = min(
del_distance, ins_distance, match_distance, swap_distance
)
src_index_by_character[src[i]] = i
return cast(float, d_mat[len(src) - 1, len(tar) - 1])
def dist(self, src: str, tar: str) -> float:
"""Return the Damerau-Levenshtein similarity of two strings.
Damerau-Levenshtein distance normalized to the interval [0, 1].
The Damerau-Levenshtein distance is normalized by dividing the
Damerau-Levenshtein distance by the greater of
the number of characters in src times the cost of a delete and
the number of characters in tar times the cost of an insert.
For the case in which all operations have :math:`cost = 1`, this is
equivalent to the greater of the length of the two strings src & tar.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
Returns
-------
float
The normalized Damerau-Levenshtein distance
Examples
--------
>>> cmp = DamerauLevenshtein()
>>> round(cmp.dist('cat', 'hat'), 12)
0.333333333333
>>> round(cmp.dist('Niall', 'Neil'), 12)
0.6
>>> cmp.dist('aluminum', 'Catalan')
0.875
>>> cmp.dist('ATCG', 'TAGC')
0.5
.. versionadded:: 0.1.0
.. versionchanged:: 0.3.6
Encapsulated in class
"""
if src == tar:
return 0.0
ins_cost, del_cost = self._cost[:2]
return self.dist_abs(src, tar) / (
self._normalizer([len(src) * del_cost, len(tar) * ins_cost])
)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
gpl-3.0
| -4,152,594,237,314,092,000
| 31.713115
| 170
| 0.534578
| false
| 3.924287
| false
| false
| false
|
yeming233/rally
|
rally/task/trigger.py
|
1
|
2283
|
# Copyright 2016: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from rally.common.i18n import _
from rally.common import logging
from rally.common.plugin import plugin
from rally.common import validation
configure = plugin.configure
LOG = logging.getLogger(__name__)
@validation.add_default("jsonschema")
@plugin.base()
@six.add_metaclass(abc.ABCMeta)
class Trigger(plugin.Plugin, validation.ValidatablePluginMixin):
"""Factory for trigger classes."""
CONFIG_SCHEMA = {"type": "null"}
def __init__(self, context, task, hook_cls):
self.context = context
self.config = self.context["trigger"]["args"]
self.task = task
self.hook_cls = hook_cls
self._runs = []
@abc.abstractmethod
def get_listening_event(self):
"""Returns event type to listen."""
def on_event(self, event_type, value=None):
"""Launch hook on specified event."""
LOG.info(_("Hook %s is triggered for Task %s by %s=%s")
% (self.hook_cls.__name__, self.task["uuid"],
event_type, value))
hook = self.hook_cls(self.task, self.context.get("args", {}),
{"event_type": event_type, "value": value})
hook.run_async()
self._runs.append(hook)
def get_results(self):
results = {"config": self.context,
"results": [],
"summary": {}}
for hook in self._runs:
hook_result = hook.result()
results["results"].append(hook_result)
results["summary"].setdefault(hook_result["status"], 0)
results["summary"][hook_result["status"]] += 1
return results
|
apache-2.0
| 4,417,696,180,981,200,400
| 32.573529
| 78
| 0.624617
| false
| 3.998249
| false
| false
| false
|
Ghini/ghini.desktop
|
bauble/error.py
|
1
|
2187
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2005,2006,2007,2008,2009 Brett Adams <brett@belizebotanic.org>
# Copyright (c) 2012-2015 Mario Frasca <mario@anche.no>
#
# This file is part of ghini.desktop.
#
# ghini.desktop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ghini.desktop is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ghini.desktop. If not, see <http://www.gnu.org/licenses/>.
#
# all bauble exceptions and errors
#
class BaubleError(Exception):
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
if self.msg is None:
return str(type(self).__name__)
else:
return '%s: %s' % (type(self).__name__, self.msg)
return self.msg
class CommitException(Exception):
def __init__(self, exc, row):
self.row = row # the model we were trying to commit
self.exc = exc # the exception thrown while committing
def __str__(self):
return str(self.exc)
class NoResultException(BaubleError):
## use this exception if the caller should return None
pass
class DatabaseError(BaubleError):
pass
class EmptyDatabaseError(DatabaseError):
pass
class MetaTableError(DatabaseError):
pass
class TimestampError(DatabaseError):
pass
class RegistryError(DatabaseError):
pass
class VersionError(DatabaseError):
def __init__(self, version):
super().__init__()
self.version = version
class SQLAlchemyVersionError(BaubleError):
pass
class CheckConditionError(BaubleError):
pass
def check(condition, msg=None):
"""
Check that condition is true. If not then raise
CheckConditionError(msg)
"""
if not condition:
raise CheckConditionError(msg)
|
gpl-2.0
| -2,004,390,259,977,985,500
| 22.516129
| 78
| 0.683128
| false
| 3.850352
| false
| false
| false
|
point97/hapifis
|
server/apps/survey/migrations/0058_auto__add_field_question_skip_condition.py
|
1
|
10477
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Question.skip_condition'
db.add_column(u'survey_question', 'skip_condition',
self.gf('django.db.models.fields.CharField')(max_length=254, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Question.skip_condition'
db.delete_column(u'survey_question', 'skip_condition')
models = {
u'survey.location': {
'Meta': {'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '7'}),
'lng': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '7'}),
'respondant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Respondant']", 'null': 'True', 'blank': 'True'}),
'response': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Response']"})
},
u'survey.locationanswer': {
'Meta': {'object_name': 'LocationAnswer'},
'answer': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Location']"})
},
u'survey.multianswer': {
'Meta': {'object_name': 'MultiAnswer'},
'answer_label': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'answer_text': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'response': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Response']"})
},
u'survey.option': {
'Meta': {'object_name': 'Option'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rows': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'integer'", 'max_length': '20'})
},
u'survey.page': {
'Meta': {'ordering': "['survey', 'question__order']", 'object_name': 'Page'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Question']", 'null': 'True', 'blank': 'True'}),
'survey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Survey']", 'null': 'True', 'blank': 'True'})
},
u'survey.question': {
'Meta': {'ordering': "['order']", 'object_name': 'Question'},
'allow_other': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cols': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'filterBy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'filter_questions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'filter_questions_rel_+'", 'null': 'True', 'to': u"orm['survey.Question']"}),
'foreach_question': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'foreach'", 'null': 'True', 'to': u"orm['survey.Question']"}),
'grid_cols': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'grid_cols'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['survey.Option']"}),
'hoist_answers': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'hoisted'", 'null': 'True', 'to': u"orm['survey.Question']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'integer_max': ('django.db.models.fields.IntegerField', [], {'default': '365', 'null': 'True', 'blank': 'True'}),
'integer_min': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'lat': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'lng': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'min_zoom': ('django.db.models.fields.IntegerField', [], {'default': '10', 'null': 'True', 'blank': 'True'}),
'modalQuestion': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modal_question'", 'null': 'True', 'to': u"orm['survey.Question']"}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['survey.Option']", 'null': 'True', 'blank': 'True'}),
'options_from_previous_answer': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'options_json': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'randomize_groups': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'report_type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '20', 'null': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rows': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'skip_condition': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'term_condition': ('django.db.models.fields.CharField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'}),
'visualize': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'zoom': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'survey.respondant': {
'Meta': {'object_name': 'Respondant'},
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'county': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'default': 'None', 'max_length': '254', 'null': 'True', 'blank': 'True'}),
'last_question': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'locations': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'responses': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'responses'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['survey.Response']"}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '240', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'survey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Survey']"}),
'ts': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 2, 0, 0)'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'01496547-962e-4773-a38d-bd6dacdc25ca'", 'max_length': '36', 'primary_key': 'True'})
},
u'survey.response': {
'Meta': {'object_name': 'Response'},
'answer': ('django.db.models.fields.TextField', [], {}),
'answer_raw': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Question']"}),
'respondant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['survey.Respondant']", 'null': 'True', 'blank': 'True'}),
'ts': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 7, 2, 0, 0)'})
},
u'survey.survey': {
'Meta': {'object_name': 'Survey'},
'anon': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'offline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'questions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['survey.Question']", 'null': 'True', 'through': u"orm['survey.Page']", 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '254'}),
'states': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['survey']
|
gpl-3.0
| -4,969,207,650,817,675,000
| 80.224806
| 207
| 0.549203
| false
| 3.574548
| false
| false
| false
|
MadeiraCloud/salt
|
sources/salt/modules/pkgutil.py
|
1
|
9536
|
# -*- coding: utf-8 -*-
'''
Pkgutil support for Solaris
'''
# Import python libs
import copy
# Import salt libs
import salt.utils
from salt.exceptions import CommandExecutionError, MinionError
def __virtual__():
'''
Set the virtual pkg module if the os is Solaris
'''
if 'os' in __grains__ and __grains__['os'] == 'Solaris':
return 'pkgutil'
return False
def refresh_db():
'''
Updates the pkgutil repo database (pkgutil -U)
CLI Example:
.. code-block:: bash
salt '*' pkgutil.refresh_db
'''
return __salt__['cmd.retcode']('/opt/csw/bin/pkgutil -U > /dev/null 2>&1') == 0
def upgrade_available(name):
'''
Check if there is an upgrade available for a certain package
CLI Example:
.. code-block:: bash
salt '*' pkgutil.upgrade_available CSWpython
'''
version_num = None
cmd = '/opt/csw/bin/pkgutil -c --parse --single {0} 2>/dev/null'.format(
name)
out = __salt__['cmd.run_stdout'](cmd)
if out:
version_num = out.split()[2].strip()
if version_num:
if version_num == "SAME":
return ''
else:
return version_num
return ''
def list_upgrades(refresh=True):
'''
List all available package upgrades on this system
CLI Example:
.. code-block:: bash
salt '*' pkgutil.list_upgrades
'''
if salt.utils.is_true(refresh):
refresh_db()
upgrades = {}
lines = __salt__['cmd.run_stdout'](
'/opt/csw/bin/pkgutil -A --parse').splitlines()
for line in lines:
comps = line.split('\t')
if comps[2] == "SAME":
continue
if comps[2] == "not installed":
continue
upgrades[comps[0]] = comps[1]
return upgrades
def upgrade(refresh=True, **kwargs):
'''
Upgrade all of the packages to the latest available version.
Returns a dict containing the changes::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkgutil.upgrade
'''
if salt.utils.is_true(refresh):
refresh_db()
old = list_pkgs()
# Install or upgrade the package
# If package is already installed
cmd = '/opt/csw/bin/pkgutil -yu'
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def list_pkgs(versions_as_list=False, **kwargs):
'''
List the packages currently installed as a dict::
{'<package_name>': '<version>'}
CLI Example:
.. code-block:: bash
salt '*' pkg.list_pkgs
salt '*' pkg.list_pkgs versions_as_list=True
'''
versions_as_list = salt.utils.is_true(versions_as_list)
# 'removed' not yet implemented or not applicable
if salt.utils.is_true(kwargs.get('removed')):
return {}
if 'pkg.list_pkgs' in __context__:
if versions_as_list:
return __context__['pkg.list_pkgs']
else:
ret = copy.deepcopy(__context__['pkg.list_pkgs'])
__salt__['pkg_resource.stringify'](ret)
return ret
ret = {}
cmd = '/usr/bin/pkginfo -x'
# Package information returned two lines per package. On even-offset
# lines, the package name is in the first column. On odd-offset lines, the
# package version is in the second column.
lines = __salt__['cmd.run'](cmd).splitlines()
for index, line in enumerate(lines):
if index % 2 == 0:
name = line.split()[0].strip()
if index % 2 == 1:
version_num = line.split()[1].strip()
__salt__['pkg_resource.add_pkg'](ret, name, version_num)
__salt__['pkg_resource.sort_pkglist'](ret)
__context__['pkg.list_pkgs'] = copy.deepcopy(ret)
if not versions_as_list:
__salt__['pkg_resource.stringify'](ret)
return ret
def version(*names, **kwargs):
'''
Returns a version if the package is installed, else returns an empty string
CLI Example:
.. code-block:: bash
salt '*' pkgutil.version CSWpython
'''
return __salt__['pkg_resource.version'](*names, **kwargs)
def latest_version(*names, **kwargs):
'''
Return the latest version of the named package available for upgrade or
installation. If more than one package name is specified, a dict of
name/version pairs is returned.
If the latest version of a given package is already installed, an empty
string will be returned for that package.
CLI Example:
.. code-block:: bash
salt '*' pkgutil.latest_version CSWpython
salt '*' pkgutil.latest_version <package1> <package2> <package3> ...
'''
refresh = salt.utils.is_true(kwargs.pop('refresh', True))
if not names:
return ''
ret = {}
# Initialize the dict with empty strings
for name in names:
ret[name] = ''
# Refresh before looking for the latest version available
if refresh:
refresh_db()
pkgs = list_pkgs()
cmd = '/opt/csw/bin/pkgutil -a --parse {0}'.format(' '.join(names))
output = __salt__['cmd.run_all'](cmd).get('stdout', '').splitlines()
for line in output:
try:
name, version_rev = line.split()[1:3]
except ValueError:
continue
if name in names:
cver = pkgs.get(name, '')
nver = version_rev.split(',')[0]
if not cver or salt.utils.compare_versions(ver1=cver,
oper='<',
ver2=nver):
# Remove revision for version comparison
ret[name] = version_rev
# Return a string if only one package name passed
if len(names) == 1:
return ret[names[0]]
return ret
# available_version is being deprecated
available_version = latest_version
def install(name=None, refresh=False, version=None, pkgs=None, **kwargs):
'''
Install packages using the pkgutil tool.
CLI Example:
.. code-block:: bash
salt '*' pkg.install <package_name>
salt '*' pkg.install SMClgcc346
Multiple Package Installation Options:
pkgs
A list of packages to install from OpenCSW. Must be passed as a python
list.
CLI Example:
.. code-block:: bash
salt '*' pkg.install pkgs='["foo", "bar"]'
salt '*' pkg.install pkgs='["foo", {"bar": "1.2.3"}]'
Returns a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
'''
if refresh:
refresh_db()
try:
# Ignore 'sources' argument
pkg_params = __salt__['pkg_resource.parse_targets'](name,
pkgs,
**kwargs)[0]
except MinionError as exc:
raise CommandExecutionError(exc)
if pkg_params is None or len(pkg_params) == 0:
return {}
if pkgs is None and version and len(pkg_params) == 1:
pkg_params = {name: version}
targets = []
for param, pkgver in pkg_params.iteritems():
if pkgver is None:
targets.append(param)
else:
targets.append('{0}-{1}'.format(param, pkgver))
cmd = '/opt/csw/bin/pkgutil -yu {0}'.format(' '.join(targets))
old = list_pkgs()
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def remove(name=None, pkgs=None, **kwargs):
'''
Remove a package and all its dependencies which are not in use by other
packages.
name
The name of the package to be deleted.
Multiple Package Options:
pkgs
A list of packages to delete. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.remove <package name>
salt '*' pkg.remove <package1>,<package2>,<package3>
salt '*' pkg.remove pkgs='["foo", "bar"]'
'''
try:
pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs)[0]
except MinionError as exc:
raise CommandExecutionError(exc)
old = list_pkgs()
targets = [x for x in pkg_params if x in old]
if not targets:
return {}
cmd = '/opt/csw/bin/pkgutil -yr {0}'.format(' '.join(targets))
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
def purge(name=None, pkgs=None, **kwargs):
'''
Package purges are not supported, this function is identical to
``remove()``.
name
The name of the package to be deleted.
Multiple Package Options:
pkgs
A list of packages to delete. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed.
.. versionadded:: 0.16.0
Returns a dict containing the changes.
CLI Example:
.. code-block:: bash
salt '*' pkg.purge <package name>
salt '*' pkg.purge <package1>,<package2>,<package3>
salt '*' pkg.purge pkgs='["foo", "bar"]'
'''
return remove(name=name, pkgs=pkgs)
|
apache-2.0
| 7,205,687,913,548,600,000
| 25.197802
| 83
| 0.570889
| false
| 3.97499
| true
| false
| false
|
tensorflow/federated
|
tensorflow_federated/python/common_libs/golden_test.py
|
1
|
2657
|
# Copyright 2020, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for `golden` library."""
from absl.testing import absltest
from absl.testing import flagsaver
from tensorflow_federated.python.common_libs import golden
class GoldenTest(absltest.TestCase):
def test_check_string_succeeds(self):
golden.check_string('test_check_string_succeeds.expected',
'foo\nbar\nbaz\nfizzbuzz')
def test_check_string_fails(self):
with self.assertRaises(golden.MismatchedGoldenError):
golden.check_string('test_check_string_fails.expected',
'not\nwhat\nyou\nexpected')
def test_check_string_updates(self):
filename = 'test_check_string_updates.expected'
golden_path = golden._filename_to_golden_path(filename)
old_contents = 'old\ndata\n'
new_contents = 'new\ndata\n'
# Attempt to reset the contents of the file to their checked-in state.
try:
with open(golden_path, 'w') as f:
f.write(old_contents)
except (OSError, PermissionError):
# We're running without `--test_strategy=local`, and so can't test
# updates properly because these files are read-only.
return
# Check for a mismatch when `--update_goldens` isn't passed.
with self.assertRaises(golden.MismatchedGoldenError):
golden.check_string(filename, new_contents)
# Rerun with `--update_goldens`.
with flagsaver.flagsaver(update_goldens=True):
golden.check_string(filename, new_contents)
# Check again without `--update_goldens` now that they have been updated.
try:
golden.check_string(filename, new_contents)
except golden.MismatchedGoldenError as e:
self.fail(f'Unexpected mismatch after update: {e}')
# Reset the contents of the file to their checked-in state.
with open(golden_path, 'w') as f:
f.write(old_contents)
def test_check_raises_traceback(self):
with golden.check_raises_traceback('test_check_raises_traceback.expected',
RuntimeError):
raise RuntimeError()
if __name__ == '__main__':
absltest.main()
|
apache-2.0
| -5,081,523,066,329,102,000
| 38.073529
| 78
| 0.697779
| false
| 3.779516
| true
| false
| false
|
cgpotts/pypragmods
|
embeddedscalars/fragment.py
|
1
|
7029
|
#!/usr/bin/env python
"""
The logical grammar (base lexicon) used throughout the paper. The
code in grammar.py messes with the namespace that it establishes, in
order to implement lexical uncertainty in an intuitive way.
"""
__author__ = "Christopher Potts"
__version__ = "2.0"
__license__ = "GNU general public license, version 3"
__maintainer__ = "Christopher Potts"
__email__ = "See the author's website"
from itertools import product
import sys
from pypragmods.embeddedscalars.settings import a, b, c, s1, s2
from pypragmods.utils import powerset
######################################################################
def define_lexicon(player=[], shot=[], worlds=[]):
D_et = powerset(player+shot)
relational_hit = [[w, x, y] for w, x, y in product(worlds, player, shot) if y in shot[: w[player.index(x)]]]
lex = {
# Concessions to tractability -- these are defined extensionally (invariant across worlds):
"some": [[X, Y] for X, Y in product(D_et, repeat=2) if len(set(X) & set(Y)) > 0],
"exactly_one": [[X, Y] for X, Y in product(D_et, repeat=2) if len(set(X) & set(Y)) == 1],
"every": [[X, Y] for X, Y in product(D_et, repeat=2) if set(X) <= set(Y)],
"no": [[X, Y] for X, Y in product(D_et, repeat=2) if len(set(X) & set(Y)) == 0],
"PlayerA": [X for X in powerset(player) if a in X],
"PlayerB": [X for X in powerset(player) if b in X],
"PlayerC": [X for X in powerset(player) if c in X],
# Tempting to intensionalize these, but that means using intensional quantifiers,
# which are intractable on this set-theoretic formulation. Our goal is to understand
# refinement and lexical uncertainty, which we can study using verbs and extensional
# quantifiers, so this limitation seems well worth it.
"player": player,
"shot": shot,
# Intensional predicates:
"scored": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) > 0],
"aced": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) > 1],
"missed": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) == 0],
"hit" : [[w, x, y] for w, x, y in product(worlds, player, shot) if y in shot[: w[player.index(x)]]],
# More concessions to tractability -- we'll refine these rather than the determiners;
# this should have no effect because of the limited class of predicates -- no predicate
# is true of both players and shots, and player and shot have the same extensions in all
# worlds.
"some_player": [Y for Y in powerset(player) if len(set(player) & set(Y)) > 0],
"some_shot": [Y for Y in powerset(shot) if len(set(shot) & set(Y)) > 0],
"exactly_one_player": [Y for Y in powerset(player) if len(set(player) & set(Y)) == 1],
"exactly_one_shot": [Y for Y in D_et if len(set(shot) & set(Y)) == 1],
"every_player": [Y for Y in D_et if set(player) <= set(Y)],
"every_shot": [Y for Y in D_et if set(shot) <= set(Y)],
"no_player": [Y for Y in D_et if len(set(player) & set(Y)) == 0],
"no_shot": [Y for Y in D_et if len(set(shot) & set(Y)) == 0],
# Mainly for specifying refinements:
"not_every_player": [Y for Y in D_et if not(set(player) <= set(Y))],
"not_every_shot": [Y for Y in D_et if not(set(shot) <= set(Y))],
"scored_not_aced": [[w, x] for w, x in product(worlds, player) if len(shot[: w[player.index(x)]]) == 1],
"only_PlayerA": [X for X in powerset(player) if a in X and len(X) == 1],
"only_PlayerB": [X for X in powerset(player) if b in X and len(X) == 1],
"only_PlayerC": [X for X in powerset(player) if c in X and len(X) == 1],
# For disjunctive examples (limited compositionality to keep the examples tractable):
"hit_shot1": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] in (1, 3)],
"hit_shot2": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] in (2, 3)],
"hit_shot1_or_shot2": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] != 0],
"hit_shot1_and_shot2": [[w, x] for w, x in product(worlds, player) if w[player.index(x)] == 3]
}
return lex
def fa(A, b):
"""Muskens-like function application -- in a list [(x,y), ...], we get
back the second projection limited to the pairs where the first is b."""
return [y for x, y in A if x == b]
def iv(Q, X):
"""Returns a proposition as function true of a world w iff the set of
entities X-at-w is a member of the quantifier (set of sets) Q."""
return (lambda w : fa(X, w) in Q)
def tv(V, Q, worlds, subjects):
"""Funcion composition taking the intensional relation on entities V
and combining it with the set of sets Q to return an intensional
property. The dependence on worlds and subjects is unfortunate but
I don't see how to avoid it."""
return [[w,x] for w, x in product(worlds, subjects)
if [y for w_prime, x_prime, y in V if w_prime == w and x_prime == x] in Q]
def coord(f, X, Y):
for x, y, z in f:
if x==X and y==Y:
return z
return []
######################################################################
def get_worlds(basic_states=(0,1,2), length=3, increasing=False):
worlds = list(product(basic_states, repeat=length))
# Remove sequences in which the elements dom't appear in
# increasing order. We don't care about order, so this just one
# way of removing conceptual duplicates.
if increasing:
worlds = [w for w in worlds if tuple(sorted(w)) == w]
return worlds
def worldname(w):
return "".join(["NSA"[i] for i in w])
######################################################################
if __name__ == '__main__':
# Domain set up:
player = [a, b, c]
shot = [s1, s2]
worlds = get_worlds((0,1,2), length=len(player), increasing=True)
lex = define_lexicon(player=player, shot=shot, worlds=worlds)
# Import the lexicon into this namespace:
for word, sem in list(lex.items()):
setattr(sys.modules[__name__], word, sem)
# Examples:
for d1, d2 in product(("some", "exactly_one", "every", "no"), repeat=2):
msg = "%s(player)(hit(%s(shot)))" % (d1, d2)
formula = "iv(fa(%s, player), tv(hit, fa(%s, shot), worlds, player))" % (d1, d2)
print(msg, [worldname(w) for w in worlds if eval(formula)(w)])
# Examples:
for pn, pred in product(('PlayerA', 'PlayerB', 'PlayerC'), ("missed", "scored", "aced")):
msg = "%s(%s)" % (pn, pred)
formula = "iv(%s, %s)" % (pn, pred)
print(msg, [worldname(w) for w in worlds if eval(formula)(w)])
|
gpl-3.0
| -2,607,856,894,522,690,000
| 49.568345
| 115
| 0.565372
| false
| 3.133749
| false
| false
| false
|
ageneau/fishnet
|
test.py
|
1
|
3617
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the lichess.org fishnet client.
# Copyright (C) 2016-2017 Niklas Fiekas <niklas.fiekas@backscattering.de>
# See LICENSE.txt for licensing information.
import fishnet
import argparse
import unittest
import logging
import sys
import multiprocessing
try:
import configparser
except ImportError:
import ConfigParser as configparser
STARTPOS = "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"
class WorkerTest(unittest.TestCase):
def setUp(self):
conf = configparser.ConfigParser()
conf.add_section("Fishnet")
conf.set("Fishnet", "Key", "testkey")
fishnet.get_stockfish_command(conf, update=True)
self.worker = fishnet.Worker(conf,
threads=multiprocessing.cpu_count(),
memory=32,
progress_reporter=None)
self.worker.start_stockfish()
def tearDown(self):
self.worker.stop()
def test_bestmove(self):
job = {
"work": {
"type": "move",
"id": "abcdefgh",
"level": 8,
},
"game_id": "hgfedcba",
"variant": "standard",
"position": STARTPOS,
"moves": "f2f3 e7e6 g2g4",
}
response = self.worker.bestmove(job)
self.assertEqual(response["move"]["bestmove"], "d8h4")
def test_zh_bestmove(self):
job = {
"work": {
"type": "move",
"id": "hihihihi",
"level": 1,
},
"game_id": "ihihihih",
"variant": "crazyhouse",
"position": "rnbqk1nr/ppp2ppp/3b4/3N4/4p1PP/5P2/PPPPP3/R1BQKBNR/P b KQkq - 9 5",
"moves": "d6g3",
}
response = self.worker.bestmove(job)
self.assertEqual(response["move"]["bestmove"], "P@f2") # only move
def test_3check_bestmove(self):
job = {
"work": {
"type": "move",
"id": "3c3c3c3c",
"level": 8,
},
"game_id": "c3c3c3c3",
"variant": "threecheck",
"position": "r1b1kbnr/pppp1ppp/2n2q2/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 4 4 +2+0",
"moves": "f1c4 d7d6",
}
response = self.worker.bestmove(job)
self.assertEqual(response["move"]["bestmove"], "c4f7")
def test_analysis(self):
job = {
"work": {
"type": "analysis",
"id": "12345678",
},
"game_id": "87654321",
"variant": "standard",
"position": STARTPOS,
"moves": "f2f3 e7e6 g2g4 d8h4",
"skipPositions": [1],
}
response = self.worker.analysis(job)
result = response["analysis"]
self.assertTrue(0 <= result[0]["score"]["cp"] <= 90)
self.assertTrue(result[1]["skipped"])
self.assertEqual(result[3]["score"]["mate"], 1)
self.assertTrue(result[3]["pv"].startswith("d8h4"))
self.assertEqual(result[4]["score"]["mate"], 0)
class UnitTests(unittest.TestCase):
def test_parse_bool(self):
self.assertEqual(fishnet.parse_bool("yes"), True)
self.assertEqual(fishnet.parse_bool("no"), False)
self.assertEqual(fishnet.parse_bool(""), False)
self.assertEqual(fishnet.parse_bool("", default=True), True)
if __name__ == "__main__":
if "-v" in sys.argv or "--verbose" in sys.argv:
fishnet.setup_logging(3)
else:
fishnet.setup_logging(0)
unittest.main()
|
gpl-3.0
| 584,502,897,233,279,600
| 26.610687
| 96
| 0.535803
| false
| 3.380374
| true
| false
| false
|
joaormatos/anaconda
|
Chowdren/chowdren/shaderheader.py
|
1
|
3599
|
import sys
sys.path.append('..')
from chowdren.shaders import SHADERS
from mmfparser.gperf import get_hash_function
from chowdren.common import get_method_name, get_base_path
from chowdren.codewriter import CodeWriter
import os
def write_shader_param():
header = CodeWriter(os.path.join(get_base_path(), 'shaderparam.h'))
code = CodeWriter(os.path.join(get_base_path(), 'shaderparam.cpp'))
parameters = []
for shader in SHADERS:
for param in shader.uniforms:
parameters.append(param[0])
if shader.tex_param:
parameters.append(shader.tex_param)
parameters = list(set(parameters))
hash_data = get_hash_function('hash_shader_parameter', parameters,
False)
code.putln(hash_data.code.replace('inline ', ''))
header.start_guard('CHOWDREN_SHADERPARAM_H')
header.putln('unsigned int hash_shader_parameter(const char * str, '
'unsigned int len);')
header.putln('')
for k, v in hash_data.strings.iteritems():
name = 'SHADER_PARAM_%s' % get_method_name(k).upper()
header.putdefine(name, v)
header.close_guard('CHOWDREN_SHADERPARAM_H')
header.close()
code.close()
def write_shaders():
code = CodeWriter(os.path.join(get_base_path(), 'shaders.cpp'))
for shader in SHADERS:
shader_name = '%sShader' % shader.name
code.putlnc('class %s : public BaseShader', shader_name)
code.start_brace()
code.put_access('public')
for uniform in shader.uniforms:
code.putlnc('static int %s;', uniform[0])
if shader.uniforms:
code.putln('')
asset_name = 'SHADER_%s' % shader.asset_name.upper()
args = [asset_name]
options = []
if shader.has_back:
options.append('SHADER_HAS_BACK')
if shader.has_tex_size:
options.append('SHADER_HAS_TEX_SIZE')
if not options:
if shader.tex_param:
args.append('0')
else:
args.append(' | '.join(options))
if shader.tex_param:
args.append('"%s"' % shader.tex_param)
code.putlnc('%s()', shader_name)
code.putlnc(': BaseShader(%s)', ', '.join(args))
code.start_brace()
code.end_brace()
code.putln('')
code.putmeth('void initialize_parameters')
for uniform in shader.uniforms:
code.putlnc('%s = get_uniform(%r);', uniform[0], uniform[0],
cpp=False)
code.end_brace()
code.putln('')
code.putmeth('static void set_parameters', 'FrameObject * instance')
for uniform in shader.uniforms:
param = 'SHADER_PARAM_%s' % uniform[0].upper()
code.putlnc('BaseShader::set_%s(instance, %s, %s);', uniform[1],
param, uniform[0])
if shader.tex_param:
param = 'SHADER_PARAM_%s' % shader.tex_param.upper()
code.putlnc('BaseShader::set_image(instance, %s);', param)
code.end_brace()
code.end_brace(True)
for uniform in shader.uniforms:
code.putlnc('int %s::%s;', shader_name, uniform[0])
code.putln('')
# write static init code
for shader in SHADERS:
shader_type = '%sShader' % shader.name
shader_name = '%s_shader' % shader.name.lower()
code.putlnc('%s %s;', shader_type, shader_name)
code.close()
def main():
write_shader_param()
write_shaders()
if __name__ == '__main__':
main()
|
gpl-3.0
| -1,006,155,459,375,083,800
| 29.508475
| 76
| 0.572103
| false
| 3.653807
| false
| false
| false
|
dhocker/athomepowerlineserver
|
CommandHandler.py
|
1
|
6289
|
#
# AtHomePowerlineServer - networked server for CM11/CM11A/XTB-232 X10 controllers
# Copyright © 2014, 2021 Dave Hocker
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# See the LICENSE file for more details.
#
import json
import datetime
import logging
import commands.ServerCommand
import commands.ServerCommand
import commands.StatusRequest
import commands.DeviceOn
import commands.DeviceOff
import commands.DeviceDim
import commands.DeviceBright
import commands.DeviceAllUnitsOff
import commands.DeviceAllLightsOff
import commands.DeviceAllLightsOn
import commands.GetTime
import commands.SetTime
import commands.GetSunData
import commands.define_device
import commands.query_devices
import commands.update_device
import commands.define_program
import commands.update_program
import commands.query_device_programs
import commands.query_device_program
import commands.delete_device
import commands.delete_device_program
import commands.all_devices_on
import commands.all_devices_off
import commands.query_available_devices
import commands.discover_devices
import commands.query_available_programs
import commands.query_programs
import commands.assign_program
import commands.delete_program
import commands.query_action_groups
import commands.query_action_group
import commands.query_action_group_devices
import commands.update_action_group
import commands.define_group
import commands.group_on
import commands.group_off
import commands.delete_group
import commands.query_available_group_devices
import commands.assign_device
import commands.delete_group_device
import commands.assign_program_to_group
logger = logging.getLogger("server")
class CommandHandler:
call_sequence = 1
# Error codes
NotImplemented = 404
UnhandledException = 405
COMMAND_HANDLER_LIST = {
"deviceon": commands.DeviceOn.DeviceOn,
"on": commands.DeviceOn.DeviceOn,
"deviceoff": commands.DeviceOff.DeviceOff,
"off": commands.DeviceOff.DeviceOff,
"dim": commands.DeviceDim.DeviceDim,
"bright": commands.DeviceBright.DeviceBright,
"statusrequest": commands.StatusRequest.StatusRequest,
"gettime": commands.GetTime.GetTime,
"settime": commands.SetTime.SetTime,
"getsundata": commands.GetSunData.GetSunData,
"definedevice": commands.define_device.DefineDevice,
"querydevices": commands.query_devices.QueryDevices,
"queryavailabledevices": commands.query_available_devices.QueryAvailableDevices,
"discoverdevices": commands.discover_devices.DiscoverDevices,
"queryavailableprograms": commands.query_available_programs.QueryAvailablePrograms,
"updatedevice": commands.update_device.UpdateDevice,
"deletedevice": commands.delete_device.DeleteDevice,
"queryprograms": commands.query_programs.QueryPrograms,
"defineprogram": commands.define_program.DefineProgram,
"updateprogram": commands.update_program.UpdateProgram,
"deleteprogram": commands.delete_program.DeleteProgram,
"deletedeviceprogram": commands.delete_device_program.DeleteDeviceProgram,
"querydeviceprograms": commands.query_device_programs.QueryDevicePrograms,
"querydeviceprogram": commands.query_device_program.QueryDeviceProgram,
"assignprogram": commands.assign_program.AssignProgram,
"assignprogramtogroup": commands.assign_program_to_group.AssignProgramToGroup,
"defineactiongroup": commands.define_group.DefineGroup,
"deleteactiongroup": commands.delete_group.DeleteGroup,
"queryactiongroups": commands.query_action_groups.QueryActionGroups,
"queryactiongroup": commands.query_action_group.QueryActionGroup,
"updateactiongroup": commands.update_action_group.UpdateActionGroup,
"queryactiongroupdevices": commands.query_action_group_devices.QueryActionGroupDevices,
"queryavailablegroupdevices": commands.query_available_group_devices.QueryAvailableGroupDevices,
"assigndevice": commands.assign_device.AssignDevice,
"deleteactiongroupdevice": commands.delete_group_device.DeleteActionGroupDevice,
"groupon": commands.group_on.GroupOn,
"groupoff": commands.group_off.GroupOff,
"alldeviceson": commands.all_devices_on.AllDevicesOn,
"alldevicesoff": commands.all_devices_off.AllDevicesOff
}
def GetHandler(self, command):
"""
Return an instance of the handler for a given command
:param command: API command as a string
:return: Instance of class that executes the command
"""
logger.info("GetHandler for command: %s", command)
ci_command = command.lower()
if ci_command in self.COMMAND_HANDLER_LIST.keys():
handler = self.COMMAND_HANDLER_LIST[ci_command]()
else:
handler = None
return handler
#######################################################################
# Execute the command specified by the incoming request
def Execute(self, request):
handler = self.GetHandler(request["request"])
if handler is not None:
response = handler.Execute(request)
response['call-sequence'] = CommandHandler.call_sequence
else:
logger.error("No handler for command: %s", request["request"])
response = CommandHandler.CreateErrorResponse(request["request"], CommandHandler.NotImplemented,
"Command is not recognized or implemented", "")
CommandHandler.call_sequence += 1
return response
@classmethod
def CreateErrorResponse(cls, request_command, result_code, error_msg, extra_data):
r = commands.ServerCommand.ServerCommand.CreateResponse(request_command)
r['result-code'] = result_code
r['error'] = error_msg
r['call-sequence'] = cls.call_sequence
r['data'] = extra_data
return r
|
gpl-3.0
| -6,893,993,220,547,688,000
| 39.92
| 108
| 0.708651
| false
| 4.263051
| false
| false
| false
|
Nolski/airmozilla
|
airmozilla/roku/tests/test_views.py
|
1
|
8166
|
import datetime
from django.conf import settings
from django.utils import timezone
from django.core.files import File
from funfactory.urlresolvers import reverse
from nose.tools import eq_, ok_
from airmozilla.main.models import (
Event,
Channel,
Template,
Picture,
EventHitStats,
Approval,
)
from airmozilla.base.tests.testbase import DjangoTestCase
class TestRoku(DjangoTestCase):
"""These tests are deliberately very UN-thorough.
That's because this whole app is very much an experiment.
"""
fixtures = ['airmozilla/manage/tests/main_testdata.json']
main_image = 'airmozilla/manage/tests/firefox.png'
def test_categories_feed(self):
url = reverse('roku:categories_feed')
main_channel = Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)
main_url = reverse('roku:channel_feed', args=(main_channel.slug,))
trending_url = reverse('roku:trending_feed')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(main_url in response.content)
ok_(trending_url in response.content)
def test_categories_feed_live_events(self):
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
url = reverse('roku:categories_feed')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
now = timezone.now()
event.start_time = now - datetime.timedelta(seconds=3600)
event.archive_time = None
event.save()
assert not event.archive_time
assert event in Event.objects.live()
edgecast_hls = Template.objects.create(
content='something {{ file }}',
name='EdgeCast hls'
)
event.template = edgecast_hls
event.template_environment = {'file': 'abc123'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
# but it really has to have that 'file' attribute
event.template_environment = {'something': 'else'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
def test_channel_feed(self):
main_channel = Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)
main_url = reverse('roku:channel_feed', args=(main_channel.slug,))
response = self.client.get(main_url)
eq_(response.status_code, 200)
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
ok_(event.title not in response.content)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
# if the *needs* approval, it shouldn't appear
app = Approval.objects.create(event=event)
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
app.processed = True
app.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title not in response.content)
app.approved = True
app.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
def test_channel_feed_with_no_placeholder(self):
main_channel = Channel.objects.get(slug=settings.DEFAULT_CHANNEL_SLUG)
main_url = reverse('roku:channel_feed', args=(main_channel.slug,))
event = Event.objects.get(title='Test event')
with open(self.main_image) as fp:
picture = Picture.objects.create(file=File(fp))
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.picture = picture
event.placeholder_img = None
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(main_url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
def test_event_feed(self):
event = Event.objects.get(title='Test event')
start_time = event.start_time
start_time = start_time.replace(year=2014)
start_time = start_time.replace(month=9)
start_time = start_time.replace(day=13)
event.start_time = start_time
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
event = Event.objects.get(title='Test event')
ok_(event.title not in response.content)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('%s - Sep 13 2014' % event.title in response.content)
def test_event_feed_escape_description(self):
event = Event.objects.get(title='Test event')
event.description = (
'Check out <a href="http://peterbe.com">peterbe</a> '
"and <script>alert('xss')</script> this."
)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Check out peterbe and' in response.content)
ok_('alert('xss') this' in response.content)
def test_event_duration(self):
event = Event.objects.get(title='Test event')
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<runtime>3600</runtime>' in response.content)
event.duration = 12
event.save()
self._attach_file(event, self.main_image)
url = reverse('roku:event_feed', args=(event.id,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<runtime>12</runtime>' in response.content)
def test_trending_feed(self):
url = reverse('roku:trending_feed')
response = self.client.get(url)
eq_(response.status_code, 200)
event = Event.objects.get(title='Test event')
self._attach_file(event, self.main_image)
ok_(event.title not in response.content)
vidly = Template.objects.create(
name="Vid.ly Test",
content="test"
)
event.template = vidly
event.template_environment = {'tag': 'xyz123'}
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
# because it's not trending
ok_(event.title not in response.content)
EventHitStats.objects.create(
event=event,
total_hits=1000,
)
# This save will trigger to disrupt the cache used inside
# get_featured_events() since it'll change the modified time.
event.save()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(event.title in response.content)
|
bsd-3-clause
| 1,651,720,650,497,076,500
| 34.350649
| 78
| 0.605927
| false
| 3.791086
| true
| false
| false
|
RedhawkSDR/integration-gnuhawk
|
components/file_sink_c/tests/test_file_sink_c.py
|
1
|
4065
|
#!/usr/bin/env python
#
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of GNUHAWK.
#
# GNUHAWK is free software: you can redistribute it and/or modify is under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see http://www.gnu.org/licenses/.
#
import unittest
import ossie.utils.testing
import os
from omniORB import any
class ComponentTests(ossie.utils.testing.ScaComponentTestCase):
"""Test for all component implementations in file_sink_c"""
def testScaBasicBehavior(self):
#######################################################################
# Launch the component with the default execparams
execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False)
execparams = dict([(x.id, any.from_any(x.value)) for x in execparams])
self.launch(execparams)
#######################################################################
# Verify the basic state of the component
self.assertNotEqual(self.comp, None)
self.assertEqual(self.comp.ref._non_existent(), False)
self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True)
#######################################################################
# Validate that query returns all expected parameters
# Query of '[]' should return the following set of properties
expectedProps = []
expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True))
expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True))
props = self.comp.query([])
props = dict((x.id, any.from_any(x.value)) for x in props)
# Query may return more than expected, but not less
for expectedProp in expectedProps:
self.assertEquals(props.has_key(expectedProp.id), True)
#######################################################################
# Verify that all expected ports are available
for port in self.scd.get_componentfeatures().get_ports().get_uses():
port_obj = self.comp.getPort(str(port.get_usesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True)
for port in self.scd.get_componentfeatures().get_ports().get_provides():
port_obj = self.comp.getPort(str(port.get_providesname()))
self.assertNotEqual(port_obj, None)
self.assertEqual(port_obj._non_existent(), False)
self.assertEqual(port_obj._is_a(port.get_repid()), True)
#######################################################################
# Make sure start and stop can be called without throwing exceptions
self.comp.start()
self.comp.stop()
#######################################################################
# Simulate regular component shutdown
self.comp.releaseObject()
# TODO Add additional tests here
#
# See:
# ossie.utils.bulkio.bulkio_helpers,
# ossie.utils.bluefile.bluefile_helpers
# for modules that will assist with testing components with BULKIO ports
if __name__ == "__main__":
ossie.utils.testing.main("../file_sink_c.spd.xml") # By default tests all implementations
|
gpl-3.0
| -1,006,469,763,631,082,900
| 46.823529
| 133
| 0.591636
| false
| 4.428105
| true
| false
| false
|
sergey-dryabzhinsky/dedupsqlfs
|
dedupsqlfs/db/mysql/table/subvolume.py
|
1
|
7299
|
# -*- coding: utf8 -*-
__author__ = 'sergey'
import hashlib
from time import time
from dedupsqlfs.db.mysql.table import Table
class TableSubvolume( Table ):
_table_name = "subvolume"
def create( self ):
c = self.getCursor()
# Create table
c.execute(
"CREATE TABLE IF NOT EXISTS `%s` (" % self.getName()+
"`id` INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, "+
"`hash` BINARY(16) NOT NULL, "+
"`name` BLOB NOT NULL, "+
"`stats` TEXT, "+
"`root_diff` TEXT, "+
"`readonly` TINYINT UNSIGNED NOT NULL DEFAULT 0, "+
"`stats_at` INT UNSIGNED, "+
"`root_diff_at` INT UNSIGNED, "+
"`created_at` INT UNSIGNED NOT NULL, "+
"`mounted_at` INT UNSIGNED, "+
"`updated_at` INT UNSIGNED"+
")"+
self._getCreationAppendString()
)
self.createIndexIfNotExists('hash', ('hash',), True)
return
def insert( self, name, created_at, mounted_at=None, updated_at=None, stats_at=None, stats=None, root_diff_at=None, root_diff=None ):
"""
:param name: str - name for subvolume/snapshot
:param created_at: int - creation time
:param mounted_at: int|None - subvolume mounted
:param updated_at: int|None - subvolume updated
:return: int
"""
self.startTimer()
cur = self.getCursor()
digest = hashlib.new('md5', name).digest()
cur.execute(
"INSERT INTO `%s` " % self.getName()+
" (`hash`,`name`,`created_at`, `mounted_at`, `updated_at`, `stats_at`, `stats`, `root_diff_at`, `root_diff`) "+
"VALUES (X%(hash)s, X%(name)s, %(created)s, %(mounted)s, %(updated)s, %(statsed)s, %(stats)s, %(diffed)s, %(root_diff)s)",
{
"hash": digest.hex(),
"name": name.hex(),
"created": int(created_at),
"mounted": mounted_at,
"updated": updated_at,
"statsed": stats_at,
"stats": stats,
"diffed": root_diff_at,
"root_diff": root_diff
}
)
item = cur.lastrowid
self.stopTimer('insert')
return item
def get_count(self):
self.startTimer()
cur = self.getCursor()
cur.execute("SELECT COUNT(1) as `cnt` FROM `%s`" % self.getName())
item = cur.fetchone()
if item:
item = item["cnt"]
else:
item = 0
self.stopTimer('get_count')
return item
def readonly(self, subvol_id, flag=True):
self.startTimer()
if flag:
flag = 1
else:
flag = 0
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `readonly`=%(readonly)s WHERE `id`=%(id)s",
{
"readonly": flag,
"id": subvol_id
}
)
self.stopTimer('readonly')
return cur.rowcount
def mount_time(self, subvol_id, mtime=None):
self.startTimer()
if mtime is None:
mtime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `mounted_at`=%(mounted)s WHERE `id`=%(id)s",
{
"mounted": int(mtime),
"id": subvol_id
}
)
self.stopTimer('mount_time')
return cur.rowcount
def update_time(self, subvol_id, utime=None):
self.startTimer()
if utime is None:
utime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `updated_at`=%(updated)s WHERE `id`=%(id)s",
{
"updated": int(utime),
"id": subvol_id
}
)
self.stopTimer('update_time')
return cur.rowcount
def stats_time(self, subvol_id, stime=None):
self.startTimer()
if stime is None:
stime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `stats_at`=%(stime)s WHERE `id`=%(id)s",
{
"stime": int(stime),
"id": subvol_id
}
)
self.stopTimer('stats_time')
return cur.rowcount
def set_stats(self, subvol_id, stats):
self.startTimer()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `stats`=%(stats)s WHERE `id`=%(id)s",
{
"stats": stats,
"id": subvol_id
}
)
self.stopTimer('set_stats')
return cur.rowcount
def root_diff_time(self, subvol_id, rtime=None):
self.startTimer()
if rtime is None:
rtime = time()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `root_diff_at`=%(rtime)s WHERE `id`=%(id)s",
{
"rtime": int(rtime),
"id": subvol_id
}
)
self.stopTimer('stats_time')
return cur.rowcount
def set_root_diff(self, subvol_id, root_diff):
self.startTimer()
cur = self.getCursor()
cur.execute(
"UPDATE `%s` " % self.getName()+
" SET `root_diff`=%(rdiff)s WHERE `id`=%(id)s",
{
"rdiff": root_diff,
"id": subvol_id
}
)
self.stopTimer('set_stats')
return cur.rowcount
def delete(self, subvol_id):
self.startTimer()
cur = self.getCursor()
cur.execute(
"DELETE FROM `%s` " % self.getName()+
" WHERE `id`=%(id)s",
{
"id": subvol_id
}
)
item = cur.rowcount
self.stopTimer('delete')
return item
def get(self, subvol_id):
self.startTimer()
cur = self.getCursor()
cur.execute(
"SELECT * FROM `%s` " % self.getName()+
" WHERE `id`=%(id)s",
{
"id": int(subvol_id)
}
)
item = cur.fetchone()
self.stopTimer('get')
return item
def find(self, name):
self.startTimer()
cur = self.getCursor()
digest = hashlib.new('md5', name).digest()
cur.execute(
"SELECT * FROM `%s` " % self.getName()+
" WHERE `hash`=X%(hash)s",
{
"hash": digest.hex()
}
)
item = cur.fetchone()
self.stopTimer('find')
return item
def get_ids(self, order_by=None, order_dir="ASC"):
self.startTimer()
cur = self.getCursor()
sql = "SELECT id FROM `%s`" % self.getName()
if order_by:
sql += " ORDER BY `%s` %s" % (order_by, order_dir,)
cur.execute(sql)
items = (item["id"] for item in cur.fetchall())
self.stopTimer('get_ids')
return items
pass
|
mit
| 8,267,020,845,199,836,000
| 28.550607
| 137
| 0.461981
| false
| 3.835523
| false
| false
| false
|
luwei0917/awsemmd_script
|
script/CalcRg.py
|
1
|
2726
|
#!/usr/bin/python
# ----------------------------------------------------------------------
# Copyright (2010) Aram Davtyan and Garegin Papoian
# Papoian's Group, University of Maryland at Collage Park
# http://papoian.chem.umd.edu/
# Last Update: 03/04/2011
# ----------------------------------------------------------------------
import sys
from VectorAlgebra import *
atom_type = {'1' : 'C', '2' : 'N', '3' : 'O', '4' : 'C', '5' : 'H', '6' : 'C'}
atom_desc = {'1' : 'C-Alpha', '2' : 'N', '3' : 'O', '4' : 'C-Beta', '5' : 'H-Beta', '6' : 'C-Prime'}
PDB_type = {'1' : 'CA', '2' : 'N', '3' : 'O', '4' : 'CB', '5' : 'HB', '6' : 'C' }
class Atom:
No = 0
ty = ''
x = 0.0
y = 0.0
z = 0.0
desc = ''
def __init__(self, No, ty, No_m, x, y, z, desc=''):
self.No = No
self.ty = ty
self.No_m = No_m
self.x = x
self.y = y
self.z = z
self.desc = desc
def write_(self, f):
f.write(str(self.No))
f.write(' ')
f.write(PDB_type[self.No_m])
f.write(' ')
f.write(str(round(self.x,8)))
f.write(' ')
f.write(str(round(self.y,8)))
f.write(' ')
f.write(str(round(self.z,8)))
f.write(' ')
f.write(self.desc)
f.write('\n')
if len(sys.argv)!=3:
print "\nCalcQValue.py Input_file Output_file\n"
sys.exit()
input_file = sys.argv[1]
output_file = ""
if len(sys.argv)>2: output_file = sys.argv[2]
n_atoms = 0
i_atom = 0
item = ''
step = 0
ca_atoms = []
box = []
A = []
out = open(output_file, 'w')
def computeRg():
if len(ca_atoms)==0:
print "Error. Empty snapshot"
exit()
N = len(ca_atoms)
Rg = 0.0
for ia in range(0, N):
for ja in range(ia+1, N):
rv = vector(ca_atoms[ia], ca_atoms[ja])
rsq = pow(rv[0],2)+pow(rv[1],2)+pow(rv[2],2)
Rg = Rg + rsq
Rg = sqrt(Rg/N/N)
return Rg
lfile = open(input_file)
for l in lfile:
l = l.strip()
if l[:5]=="ITEM:":
item = l[6:]
else:
if item == "TIMESTEP":
if len(ca_atoms)>0:
rg = computeRg()
out.write(str(round(rg,5)))
out.write(' ')
n_atoms = len(ca_atoms)
step = int(l)
ca_atoms = []
box = []
A = []
elif item == "NUMBER OF ATOMS":
n_atoms = int(l)
elif item[:10] == "BOX BOUNDS":
box.append(l)
l = l.split()
A.append([float(l[0]), float(l[1])])
elif item[:5] == "ATOMS":
l = l.split()
i_atom = l[0]
x = float(l[2])
y = float(l[3])
z = float(l[4])
x = (A[0][1] - A[0][0])*x + A[0][0]
y = (A[1][1] - A[1][0])*y + A[1][0]
z = (A[2][1] - A[2][0])*z + A[2][0]
desc = atom_desc[l[1]]
if desc=='C-Alpha':
# atom = Atom(i_atom, atom_type[l[1]], l[1], x, y, z, desc)
atom = [x,y,z]
ca_atoms.append(atom)
lfile.close()
if len(ca_atoms)>0:
rg = computeRg()
out.write(str(round(rg,5)))
out.write(' ')
n_atoms = len(ca_atoms)
out.close()
|
mit
| -197,910,826,636,064,500
| 20.296875
| 100
| 0.495231
| false
| 2.172112
| false
| false
| false
|
Aloomaio/googleads-python-lib
|
examples/adwords/v201802/extensions/add_site_links.py
|
1
|
5277
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adds sitelinks to a campaign using the CampaignExtensionSettingService.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from datetime import datetime
from googleads import adwords
from googleads import errors
from pytz import timezone
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
def main(client, campaign_id):
# Initialize appropriate services.
campaign_extension_setting_service = client.GetService(
'CampaignExtensionSettingService', version='v201802')
customer_service = client.GetService('CustomerService', version='v201802')
# Find the matching customer and its time zone. The getCustomers method will
# return a single Customer object corresponding to the configured
# clientCustomerId.
customer = customer_service.getCustomers()[0]
customer_tz = timezone(customer['dateTimeZone'])
time_fmt = '%s %s' % ('%Y%m%d %H%M%S', customer_tz)
print ('Found customer ID %d with time zone "%s".'
% (customer['customerId'], customer['dateTimeZone']))
# Create the sitelinks
sitelink1 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Store Hours',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/storehours']}
}
# Show the Thanksgiving specials link only from 20 - 27 Nov.
sitelink2 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Thanksgiving Specials',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/thanksgiving']},
# The time zone of the start and end date/times must match the time zone
# of the customer.
'startTime': datetime(datetime.now().year, 11, 20, 0, 0, 0, 0,
customer_tz).strftime(time_fmt),
'endTime': datetime(datetime.now().year, 11, 27, 23, 59, 59, 59,
customer_tz).strftime(time_fmt),
# Target this sitelink for United States only. For valid geolocation
# codes, see:
# https://developers.google.com/adwords/api/docs/appendix/geotargeting
'geoTargeting': {'id': 2840},
# Restrict targeting only to people physically within the United States.
# Otherwise, this could also show to people interested in the United
# States, but not physically located there.
'geoTargetingRestriction': {
'geoRestriction': 'LOCATION_OF_PRESENCE'
}
}
# Show the wifi details primarily for high end mobile users.
sitelink3 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Wifi Available',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/mobile/wifi']},
# See https://developers.google.com/adwords/api/docs/appendix/platforms
# for device criteria IDs.
'devicePreference': {'devicePreference': '30001'},
# Target this sitelink only when the ad is triggered by the keyword
# "free wifi."
'keywordTargeting': {
'text': 'free wifi',
'matchType': 'BROAD'
}
}
# Show the happy hours link only during Mon - Fri 6PM to 9PM.
sitelink4 = {
'xsi_type': 'SitelinkFeedItem',
'sitelinkText': 'Happy hours',
'sitelinkFinalUrls': {'urls': ['http://www.example.com/happyhours']},
'scheduling': {
'feedItemSchedules': [
{
'dayOfWeek': day,
'startHour': '18',
'startMinute': 'ZERO',
'endHour': '21',
'endMinute': 'ZERO'
} for day in ['MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY',
'FRIDAY']
]
}
}
# Create your Campaign Extension Settings. This associates the sitelinks
# to your campaign.
campaign_extension_setting = {
'campaignId': campaign_id,
'extensionType': 'SITELINK',
'extensionSetting': {
'extensions': [sitelink1, sitelink2, sitelink3, sitelink4]
}
}
operation = {
'operator': 'ADD',
'operand': campaign_extension_setting
}
# Add the extensions.
response = campaign_extension_setting_service.mutate([operation])
if 'value' in response:
print ('Extension setting with type "%s" was added to campaignId "%d".' %
(response['value'][0]['extensionType'],
response['value'][0]['campaignId']))
else:
raise errors.GoogleAdsError('No extension settings were added.')
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID)
|
apache-2.0
| 1,125,957,785,789,636,000
| 35.143836
| 78
| 0.653781
| false
| 3.823913
| false
| false
| false
|
izolight/django-seoultransport
|
busgokr/models.py
|
1
|
2624
|
from django.db import models
class RouteType(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=20)
color = models.CharField(max_length=10)
def __str__(self):
return self.name
class Location(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
class BusStation(models.Model):
id = models.IntegerField(primary_key=True)
arsid = models.IntegerField(unique=True, null=True)
name = models.ForeignKey(Location, null=True)
latitude = models.DecimalField(max_digits=18, decimal_places=15, null=True)
longitude = models.DecimalField(max_digits=18, decimal_places=15, null=True)
def __str__(self):
if self.name:
return str(self.name)
return str(self.id)
class Corporation(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
class BusRoute(models.Model):
id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=20)
length = models.DecimalField(max_digits=4, decimal_places=1)
route_type = models.ForeignKey(RouteType)
first_time = models.DateTimeField()
last_time = models.DateTimeField()
first_station = models.ForeignKey('BusStation', related_name='first_station', null=True)
last_station = models.ForeignKey('BusStation', related_name='last_station', null=True)
interval = models.IntegerField()
first_low_time = models.DateTimeField(null=True)
last_low_time = models.DateTimeField(null=True)
corporation = models.ForeignKey('Corporation')
def __str__(self):
return self.name
class SearchedLive(models.Model):
busroute = models.ForeignKey(BusRoute)
def __str__(self):
return str(self.busroute)
class Section(models.Model):
id = models.IntegerField(primary_key=True)
distance = models.DecimalField(max_digits=8, decimal_places=3)
speed = models.IntegerField()
def __str__(self):
return str(self.id)
class Sequence(models.Model):
number = models.IntegerField()
section = models.ForeignKey('Section', null=True)
turnstation = models.ForeignKey('BusStation', related_name='turnstation')
station = models.ForeignKey('BusStation')
is_turnstation = models.BooleanField(default=False)
route = models.ForeignKey('BusRoute')
direction = models.ForeignKey(Location, null=True)
first_time = models.TimeField(null=True)
last_time = models.TimeField(null=True)
def __str__(self):
return str(self.route) + '-' + str(self.number)
|
bsd-2-clause
| 1,073,920,046,047,536,800
| 29.523256
| 92
| 0.6875
| false
| 3.748571
| false
| false
| false
|
serein7/openag_brain
|
src/openag_brain/software_modules/video_writer.py
|
1
|
6194
|
#!/usr/bin/env python
"""
Stitches together the images from a recipe run and stores them as a video on
the recipe_start data point
"""
import os
import time
import rospy
import tempfile
import subprocess
from openag.cli.config import config as cli_config
from openag.couch import Server
from openag.db_names import ENVIRONMENTAL_DATA_POINT
from openag.var_types import RECIPE_START, RECIPE_END, AERIAL_IMAGE
class VideoWriter(object):
def __init__(self, server, environment, variable):
self.image_dir = tempfile.mkdtemp()
self.data_db = server[ENVIRONMENTAL_DATA_POINT]
self.environment = environment
self.variable = variable
self.start_doc = None
self.end_doc = None
# Initialize change feeds
self.last_seq_by_var = {}
last_db_seq = self.data_db.changes(
limit=1, descending=True
)['last_seq']
for var in [RECIPE_START, RECIPE_END, self.variable]:
self.last_seq_by_var[var] = last_db_seq
# Figure out when the most recent recipe started
start_view = self.data_db.view("openag/by_variable", startkey=[
self.environment, "desired", RECIPE_START.name
], endkey=[
self.environment, "desired", RECIPE_START.name, {}
], group_level=3)
if len(start_view) == 0:
# No recipe has ever been run on this machine
return
self.start_doc = start_view.rows[0].value
# Make sure the recipe hasn't ended yet
end_view = self.data_db.view("openag/by_variable", startkey=[
self.environment, "desired", RECIPE_END.name
], endkey=[
self.environment, "desired", RECIPE_END.name, {}
], group_level=3)
if len(end_view):
self.end_doc = end_view.rows[0].value
if (self.end_doc["timestamp"] > self.start_doc["timestamp"]):
return
# Download all of the images from the recipe run so far
image_view = self.data_db.view("openag/by_variable", startkey=[
self.environment, "measured", AERIAL_IMAGE.name,
self.start_doc["timestamp"]
], endkey=[
self.environment, "measured", AERIAL_IMAGE.name, {}
], group_level=4)
for row in image_view:
self.download_image(row.value)
self.update_video()
def __del__(self):
import shutil
shutil.rmtree(self.image_dir)
def run(self):
while True:
time.sleep(5)
if rospy.is_shutdown():
break
if self.start_doc and (not self.end_doc or self.start_doc["timestamp"] > self.end_doc["timestamp"]):
# A recipe is running
# Check if it has ended
end_docs = self.get_variable_changes(RECIPE_END)
for end_doc in end_docs:
if end_doc["timestamp"] > self.end_doc["timestamp"]:
self.end_doc = end_doc
# Update the timelapse
res = self.get_variable_changes(self.variable)
should_update_video = False
for img in res:
if img["timestamp"] > self.start_doc["timestamp"]:
self.download_image(img)
should_update_video = True
if should_update_video:
self.update_video()
else:
# No recipe is running
# Check if a recipe has started
res = self.get_variable_changes(RECIPE_START)
if len(res):
self.start_doc = res[-1]
def get_variable_changes(self, variable):
"""
Get a list of all new environmental data points of the given variable
since the last time this function was called with that variable
"""
res = self.data_db.changes(
since=self.last_seq_by_var.get(variable, 0),
filter="openag/by_variable", variables=[variable],
include_docs=True
)
self.last_seq_by_var[variable] = res["last_seq"]
return [x["doc"] for x in res["results"]]
def download_image(self, doc):
"""
Downloads the image stored as a attachment on the given document and
stores it in the folder with the rest of the images for the current
recipe run
"""
image = self.data_db.get_attachment(doc, "image")
if image is None:
# We might see the document before the attachment is uploaded. Wait
# a little while and try again
time.sleep(1)
image = self.data_db.get_attachment(doc, "image")
file_name = str(int(doc["timestamp"])) + ".png"
file_path = os.path.join(self.image_dir, file_name)
with open(file_path, "w+") as f:
f.write(image.read())
def update_video(self):
"""
Constructs a video from the images already downloaded and stores it in
the RECIPE_START document for the current recipe run
"""
out_file = os.path.join(self.image_dir, "out.mp4")
if os.path.isfile(out_file):
os.remove(out_file)
if subprocess.call([
"ffmpeg", "-framerate", "1", "-pattern_type", "glob", "-i",
"*.png", "-c:v", "libx264", "out.mp4"
], cwd=self.image_dir):
raise RuntimeError("Failed to update video")
with open(out_file) as f:
print self.data_db.put_attachment(
self.start_doc, f, "timelapse", "video/mp4"
)
if __name__ == '__main__':
db_server = cli_config["local_server"]["url"]
if not db_server:
raise RuntimeError("No local database specified")
server = Server(db_server)
rospy.init_node("video_writer")
namespace = rospy.get_namespace()
if namespace == '/':
raise RuntimeError(
"Video writer cannot be run in the global namespace. Please "
"designate an environment for this module."
)
environment = namespace.split('/')[-2]
mod = VideoWriter(server, environment, AERIAL_IMAGE)
mod.run()
|
gpl-3.0
| 2,755,527,196,217,202,700
| 37.7125
| 112
| 0.570552
| false
| 3.957827
| false
| false
| false
|
HybridF5/jacket
|
jacket/compute/conf/vnc.py
|
1
|
7868
|
# Copyright (c) 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
vnc_group = cfg.OptGroup(
'vnc',
title='VNC options',
help="""
Virtual Network Computer (VNC) can be used to provide remote desktop
console access to instances for tenants and/or administrators.""")
enabled = cfg.BoolOpt(
'enabled',
default=True,
deprecated_group='DEFAULT',
deprecated_name='vnc_enabled',
help="""Enable VNC related features.
Guests will get created with graphical devices to support this. Clients
(for example Horizon) can then establish a VNC connection to the guest.
Possible values:
* True: Enables the feature
* False: Disables the feature
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
keymap = cfg.StrOpt(
'keymap',
default='en-us',
deprecated_group='DEFAULT',
deprecated_name='vnc_keymap',
help="""Keymap for VNC.
The keyboard mapping (keymap) determines which keyboard layout a VNC
session should use by default.
Possible values:
* A keyboard layout which is supported by the underlying hypervisor on
this node. This is usually an 'IETF language tag' (for example
'en-us'). If you use QEMU as hypervisor, you should find the list
of supported keyboard layouts at ``/usr/share/qemu/keymaps``.
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
# TODO(sfinucan): This should be an IPOpt
vncserver_listen = cfg.StrOpt(
'vncserver_listen',
default='127.0.0.1',
deprecated_group='DEFAULT',
help="""
The IP address on which an instance should listen to for incoming VNC
connection requests on this node.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
# TODO(sfinucan): This should be an IPOpt
vncserver_proxyclient_address = cfg.StrOpt(
'vncserver_proxyclient_address',
default='127.0.0.1',
deprecated_group='DEFAULT',
help="""
Private, internal address of VNC console proxy.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients.
This option sets the private address to which proxy clients, such as
``compute-xvpvncproxy``, should connect to.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* None
""")
# TODO(sfinucan): This should be an IPOpt
novncproxy_host = cfg.StrOpt(
'novncproxy_host',
default='0.0.0.0',
deprecated_group='DEFAULT',
help="""
IP address that the noVNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. noVNC provides
VNC support through a websocket-based client.
This option sets the private address to which the noVNC console proxy
service should bind to.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* novncproxy_port
* novncproxy_base_url
""")
# TODO(sfinucan): This should be a PortOpt
novncproxy_port = cfg.IntOpt(
'novncproxy_port',
default=6080,
min=1,
max=65535,
deprecated_group='DEFAULT',
help="""
Port that the noVNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. noVNC provides
VNC support through a websocket-based client.
This option sets the private port to which the noVNC console proxy
service should bind to.
Possible values:
* A port number
Services which consume this:
* ``compute-compute``
Related options:
* novncproxy_host
* novncproxy_base_url
""")
novncproxy_base_url = cfg.StrOpt(
'novncproxy_base_url',
default='http://127.0.0.1:6080/vnc_auto.html',
deprecated_group='DEFAULT',
help="""
Public address of noVNC VNC console proxy.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. noVNC provides
VNC support through a websocket-based client.
This option sets the public base URL to which client systems will
connect. noVNC clients can use this address to connect to the noVNC
instance and, by extension, the VNC sessions.
Possible values:
* A URL
Services which consume this:
* ``compute-compute``
Related options:
* novncproxy_host
* novncproxy_port
""")
# TODO(sfinucan): This should be an IPOpt
xvpvncproxy_host = cfg.StrOpt(
'xvpvncproxy_host',
default='0.0.0.0',
deprecated_group='DEFAULT',
help="""
IP address that the XVP VNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. Xen provides
the Xenserver VNC Proxy, or XVP, as an alternative to the
websocket-based noVNC proxy used by Libvirt. In contrast to noVNC,
XVP clients are Java-based.
This option sets the private address to which the XVP VNC console proxy
service should bind to.
Possible values:
* An IP address
Services which consume this:
* ``compute-compute``
Related options:
* xvpvncproxy_port
* xvpvncproxy_base_url
""")
# TODO(sfinucan): This should be a PortOpt
xvpvncproxy_port = cfg.IntOpt(
'xvpvncproxy_port',
default=6081,
min=1,
max=65535,
deprecated_group='DEFAULT',
help="""
Port that the XVP VNC console proxy should bind to.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. Xen provides
the Xenserver VNC Proxy, or XVP, as an alternative to the
websocket-based noVNC proxy used by Libvirt. In contrast to noVNC,
XVP clients are Java-based.
This option sets the private port to which the XVP VNC console proxy
service should bind to.
Possible values:
* A port number
Services which consume this:
* ``compute-compute``
Related options:
* xvpvncproxy_host
* xvpvncproxy_base_url
""")
xvpvncproxy_base_url = cfg.StrOpt(
'xvpvncproxy_base_url',
default='http://127.0.0.1:6081/console',
deprecated_group='DEFAULT',
help="""
Public address of XVP VNC console proxy.
The VNC proxy is an OpenStack component that enables compute service
users to access their instances through VNC clients. Xen provides
the Xenserver VNC Proxy, or XVP, as an alternative to the
websocket-based noVNC proxy used by Libvirt. In contrast to noVNC,
XVP clients are Java-based.
This option sets the public base URL to which client systems will
connect. XVP clients can use this address to connect to the XVP
instance and, by extension, the VNC sessions.
Possible values:
* A URL
Services which consume this:
* ``compute-compute``
Related options:
* xvpvncproxy_host
* xvpvncproxy_port
""")
ALL_OPTS = [
enabled,
keymap,
vncserver_listen,
vncserver_proxyclient_address,
novncproxy_host,
novncproxy_port,
novncproxy_base_url,
xvpvncproxy_host,
xvpvncproxy_port,
xvpvncproxy_base_url]
CLI_OPTS = [
novncproxy_host,
novncproxy_port]
def register_opts(conf):
conf.register_group(vnc_group)
conf.register_opts(ALL_OPTS, group=vnc_group)
def register_cli_opts(conf):
conf.register_cli_opts(CLI_OPTS, group=vnc_group)
def list_opts():
return {vnc_group: ALL_OPTS}
|
apache-2.0
| 3,083,081,730,921,231,400
| 22.486567
| 78
| 0.734875
| false
| 3.528251
| false
| false
| false
|
thpmacedo/granjaSucker
|
granjaUpdateStatistics.py
|
1
|
11815
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
import logging
import time
import sqlite3
from os.path import basename
################################################################################
# STATIC DEF
################################################################################
PATH_GRANJA_DB = 'sqlite/granjaResult.sqlite'
################################################################################
# GLOBAL DEF
################################################################################
################################################################################
################################################################################
def updateStatistics():
func_name = sys._getframe().f_code.co_name
logger = logging.getLogger(func_name)
logger.debug(PATH_GRANJA_DB)
dbConnection = sqlite3.connect(PATH_GRANJA_DB)
dbCursor = dbConnection.cursor()
####################
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KGV RACE TRACKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KGV RACE TRANCKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KVG RACE TRACKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "KGV RACE TRANKS", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "CIRUITO", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "CIRCUITO", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "CRICUITO", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = replace(trackConfig, "-", "");''')
dbCursor.execute('''UPDATE RACES SET trackConfig = trim(trackConfig);''')
dbCursor.execute('''UPDATE RACES SET trackConfig = ltrim(trackConfig, '0');''')
dbCursor.execute('''UPDATE RACES SET trackConfig = trim(trackConfig);''')
dbConnection.commit()
####################
dbCursor.execute('''DROP TABLE IF EXISTS LAST_RACES;''')
dbCursor.execute('''CREATE TABLE LAST_RACES AS
SELECT raceId,driverClass,trackConfig,COUNT(kartNumber) AS gridSize
FROM races GROUP BY raceId ORDER BY raceId DESC LIMIT 100;''')
dbCursor.execute('''DROP VIEW IF EXISTS VIEW_LAST_RACES;''')
dbCursor.execute('''CREATE VIEW VIEW_LAST_RACES AS
SELECT driverClass,COUNT(raceId) AS qtRaces,MAX(raceId) as lastRaceId
FROM LAST_RACES GROUP BY driverClass;''')
dbCursor.execute('''DROP VIEW IF EXISTS VIEW_LAST_RACES_PER_TRACK;''')
dbCursor.execute('''CREATE VIEW VIEW_LAST_RACES_PER_TRACK AS
SELECT driverClass,trackConfig,COUNT(raceId) AS qtRaces,MAX(raceId) as lastRaceId
FROM LAST_RACES GROUP BY driverClass,trackConfig;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS INDOOR_RANKING_LAPTIME_C_MODA;''')
dbCursor.execute('''CREATE TABLE INDOOR_RANKING_LAPTIME_C_MODA AS
SELECT kartNumber, driverName, MIN(bestLapTime) AS 'BEST_LAP', AVG(bestLapTime) AS 'AVG_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass = 'INDOOR'
AND trackConfig IN (SELECT trackConfig FROM VIEW_LAST_RACES_PER_TRACK WHERE driverClass = 'INDOOR' ORDER BY qtRaces DESC LIMIT 1)
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber
ORDER BY BEST_LAP;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS PAROLIN_RANKING_LAPTIME_C_MODA;''')
dbCursor.execute('''CREATE TABLE PAROLIN_RANKING_LAPTIME_C_MODA AS
SELECT kartNumber, driverName, MIN(bestLapTime) AS 'BEST_LAP', AVG(bestLapTime) AS 'AVG_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass = 'PAROLIN'
AND trackConfig IN (SELECT trackConfig FROM VIEW_LAST_RACES_PER_TRACK WHERE driverClass = 'PAROLIN' ORDER BY qtRaces DESC LIMIT 1)
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber
ORDER BY BEST_LAP;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS GERAL_RANKING_LAPTIME_C_MODA;''')
dbCursor.execute('''CREATE TABLE GERAL_RANKING_LAPTIME_C_MODA AS
SELECT driverClass, driverName, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE
trackConfig IN (SELECT trackConfig FROM (SELECT trackConfig,COUNT(*) AS qt FROM RACES ORDER BY qt DESC LIMIT 1))
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY driverClass
ORDER BY BEST_LAP;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS GERAL_RANKING_LAPTIME;''')
dbCursor.execute('''CREATE TABLE GERAL_RANKING_LAPTIME AS
SELECT trackConfig, driverName, driverClass, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE
(driverClass='INDOOR' OR driverClass='PAROLIN')
AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY trackConfig;''')
####################
dbCursor.execute('''DROP TABLE IF EXISTS ALLTIME_RANKING_LAPTIME;''')
dbCursor.execute('''CREATE TABLE ALLTIME_RANKING_LAPTIME AS
SELECT trackConfig, driverName, driverClass, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
GROUP BY trackConfig;''')
dbCursor.execute('''DROP TABLE IF EXISTS ALLTIME_RANKING_LAPTIME_INDOOR;''')
dbCursor.execute('''CREATE TABLE ALLTIME_RANKING_LAPTIME_INDOOR AS
SELECT trackConfig, driverName, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass='INDOOR'
GROUP BY trackConfig;''')
dbCursor.execute('''DROP TABLE IF EXISTS ALLTIME_RANKING_LAPTIME_PAROLIN;''')
dbCursor.execute('''CREATE TABLE ALLTIME_RANKING_LAPTIME_PAROLIN AS
SELECT trackConfig, driverName, MIN(bestLapTime) AS 'BEST_LAP', COUNT(*) AS LAPS
FROM races
WHERE driverClass='PAROLIN'
GROUP BY trackConfig;''')
dbConnection.commit()
####################
# CKC_BI_INDOOR
####################
dbCursor.execute('''DROP TABLE IF EXISTS INDOOR_KART_POS_FINISH;''')
dbCursor.execute('''CREATE TABLE INDOOR_KART_POS_FINISH AS
SELECT kartNumber, positionFinish, COUNT(*) AS posCount
FROM races
WHERE driverClass='INDOOR' AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber, positionFinish;''')
dbCursor.execute('''DROP TABLE IF EXISTS INDOOR_RANKING_PODIUM;''')
dbCursor.execute('''CREATE TABLE INDOOR_RANKING_PODIUM AS
SELECT
*
,(0.40*ifnull(qt1,0) + 0.25*ifnull(qt2,0) + 0.15*ifnull(qt3,0) + 0.10*ifnull(qt4,0) + 0.07*ifnull(qt5,0) + 0.03*ifnull(qt6,0)) / qtRaces AS PODIUM_RATE
,ifnull(1.0*qt1,0) / qtRaces AS p1ratio
,ifnull(1.0*qt2,0) / qtRaces AS p2ratio
,ifnull(1.0*qt3,0) / qtRaces AS p3ratio
,ifnull(1.0*qt4,0) / qtRaces AS p4ratio
,ifnull(1.0*qt5,0) / qtRaces AS p5ratio
,ifnull(1.0*qt6,0) / qtRaces AS p6ratio
FROM (
SELECT kartNumber,
SUM(posCount) AS qtRaces
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=1) AS qt1
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=2) AS qt2
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=3) AS qt3
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=4) AS qt4
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=5) AS qt5
,(SELECT i.posCount FROM INDOOR_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=6) AS qt6
FROM INDOOR_KART_POS_FINISH e
GROUP BY kartNumber
)
WHERE qtRaces > 30
ORDER BY PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_INDOOR_RANKING_PODIUM AS
SELECT * FROM INDOOR_RANKING_PODIUM A ORDER BY A.PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_INDOOR_RANKING_LAPTIME_C_MODA AS
SELECT * FROM INDOOR_RANKING_LAPTIME_C_MODA A ORDER BY A.BEST_LAP ASC;''')
dbCursor.execute('''DROP TABLE IF EXISTS CKC_BI_INDOOR;''')
dbCursor.execute('''CREATE TABLE CKC_BI_INDOOR AS
SELECT P.kartNumber
,P.qt1,P.qt2,P.qt3,P.qt4,P.qt5,P.qt6,P.qtRaces
,P.PODIUM_RATE
,P.rowid AS RANK_PODIUM
,T.BEST_LAP
,T.AVG_LAP
,T.rowid AS RANK_LAPTIME
,0.0125 * (P.rowid + T.rowid) AS SCORE
FROM TEMP_INDOOR_RANKING_PODIUM P,TEMP_INDOOR_RANKING_LAPTIME_C_MODA T
WHERE P.kartNumber=T.kartNumber
GROUP BY P.kartNumber
ORDER BY SCORE;''')
#,0.0125 * (P.rowid + T.rowid) AS SCORE
#,0.00625 * (P.rowid + 3 * T.rowid) AS SCORE
# 1/(40+40) = .0125
# 1/(40+3*40) = .00625
dbConnection.commit()
####################
# CKC_BI_PAROLIN
####################
dbCursor.execute('''DROP TABLE IF EXISTS PAROLIN_KART_POS_FINISH;''')
dbCursor.execute('''CREATE TABLE PAROLIN_KART_POS_FINISH AS
SELECT kartNumber, positionFinish, COUNT(*) AS posCount
FROM races
WHERE driverClass='PAROLIN' AND raceId IN (SELECT raceId FROM LAST_RACES)
GROUP BY kartNumber, positionFinish;''')
dbCursor.execute('''DROP TABLE IF EXISTS PAROLIN_RANKING_PODIUM;''')
dbCursor.execute('''CREATE TABLE PAROLIN_RANKING_PODIUM AS
SELECT *,(0.28*ifnull(qt1,0) + 0.20*ifnull(qt2,0) + 0.17*ifnull(qt3,0) + 0.14*ifnull(qt4,0) + 0.11*ifnull(qt5,0) + 0.09*ifnull(qt6,0)) / qtRaces AS PODIUM_RATE
FROM (
SELECT kartNumber,
SUM(posCount) AS qtRaces
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=1) AS qt1
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=2) AS qt2
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=3) AS qt3
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=4) AS qt4
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=5) AS qt5
,(SELECT i.posCount FROM PAROLIN_KART_POS_FINISH i WHERE e.kartNumber=i.kartNumber AND i.positionFinish=6) AS qt6
FROM PAROLIN_KART_POS_FINISH e
GROUP BY kartNumber
)
WHERE qtRaces > 30
ORDER BY PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_PAROLIN_RANKING_PODIUM AS
SELECT * FROM PAROLIN_RANKING_PODIUM A ORDER BY A.PODIUM_RATE DESC;''')
dbCursor.execute('''CREATE TEMPORARY TABLE IF NOT EXISTS TEMP_PAROLIN_RANKING_LAPTIME_C_MODA AS
SELECT * FROM PAROLIN_RANKING_LAPTIME_C_MODA A ORDER BY A.BEST_LAP ASC;''')
dbCursor.execute('''DROP TABLE IF EXISTS CKC_BI_PAROLIN;''')
dbCursor.execute('''CREATE TABLE CKC_BI_PAROLIN AS
SELECT P.kartNumber
,P.qt1,P.qt2,P.qt3,P.qt4,P.qt5,P.qt6,P.qtRaces
,P.PODIUM_RATE
,P.rowid AS RANK_PODIUM
,T.BEST_LAP
,T.AVG_LAP
,T.rowid AS RANK_LAPTIME
,0.00625 * (P.rowid + 3 * T.rowid) AS SCORE
FROM TEMP_PAROLIN_RANKING_PODIUM P,TEMP_PAROLIN_RANKING_LAPTIME_C_MODA T
WHERE P.kartNumber=T.kartNumber
GROUP BY P.kartNumber
ORDER BY SCORE;''')
dbConnection.commit()
####################
####################
dbConnection.execute('''VACUUM;''')
dbConnection.commit()
####################
dbConnection.close()
###
logger.debug("DONE")
################################################################################
# MAIN
################################################################################
def main():
appName = sys.argv[0]
logging.basicConfig(
# filename = './log/' + appName + '_' + time.strftime("%Y%m%d_%H%M%S") + '.log',
datefmt = '%Y-%m%d %H:%M:%S',
format = '%(asctime)s | %(levelname)s | %(name)s | %(message)s',
level = logging.INFO
)
func_name = sys._getframe().f_code.co_name
logger = logging.getLogger(func_name)
logger.info('Started')
###
updateStatistics()
###
logger.info('Finished')
################################################################################
################################################################################
if __name__ == '__main__':
main()
|
gpl-3.0
| 7,718,151,535,844,989,000
| 43.417293
| 161
| 0.653915
| false
| 2.912968
| true
| false
| false
|
pantuza/art-gallery
|
src/triangle.py
|
1
|
1522
|
# -*- coding:utf-8 -*-
from point import Point
from side import Side
class Triangle(object):
""" Class representing a Triangle that is composed by
three Point objects
"""
def __init__(self, u, v, w):
if not all(isinstance(point, Point) for point in (u, v, w)):
raise TypeError("u, v, w must be Point objects", (u, v, w))
self.u, self.v, self.w = u, v, w
def __repr__(self):
return "[(%s, %s), (%s, %s), (%s, %s)]" \
% (self.u.x, self.u.y, self.v.x, self.v.y, self.w.x, self.w.y)
def __iter__(self):
yield self.u
yield self.v
yield self.w
def sides(self):
return (Side(self.u, self.v), Side(self.v, self.w), Side(self.w, self.u))
def opposite(self, side):
if self.u == side.p0:
if self.v == side.p1:
return self.w
else:
return self.v
elif self.u == side.p1:
if self.v == side.p0:
return self.w
else:
return self.v
return self.u
# Testing class
if __name__ == "__main__":
u = Point(0, 2)
v = Point(2, 0)
w = Point(5, 5)
triangle = Triangle(u, v, w)
print triangle
print "Point u = %s" % str(triangle.u)
print "Point v = %s" % str(triangle.v)
print "Point w = %s" % str(triangle.w)
# Testing class iterability
for point in triangle:
print point
# Testing the exception
Triangle(None, None, None)
|
gpl-2.0
| -6,126,048,377,337,462,000
| 23.95082
| 81
| 0.507884
| false
| 3.252137
| false
| false
| false
|
kidscancode/gamedev
|
tutorials/tilemap/part 18/sprites.py
|
1
|
8372
|
import pygame as pg
from random import uniform, choice, randint, random
from settings import *
from tilemap import collide_hit_rect
import pytweening as tween
vec = pg.math.Vector2
def collide_with_walls(sprite, group, dir):
if dir == 'x':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if hits[0].rect.centerx > sprite.hit_rect.centerx:
sprite.pos.x = hits[0].rect.left - sprite.hit_rect.width / 2
if hits[0].rect.centerx < sprite.hit_rect.centerx:
sprite.pos.x = hits[0].rect.right + sprite.hit_rect.width / 2
sprite.vel.x = 0
sprite.hit_rect.centerx = sprite.pos.x
if dir == 'y':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if hits[0].rect.centery > sprite.hit_rect.centery:
sprite.pos.y = hits[0].rect.top - sprite.hit_rect.height / 2
if hits[0].rect.centery < sprite.hit_rect.centery:
sprite.pos.y = hits[0].rect.bottom + sprite.hit_rect.height / 2
sprite.vel.y = 0
sprite.hit_rect.centery = sprite.pos.y
class Player(pg.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = PLAYER_LAYER
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.player_img
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = PLAYER_HIT_RECT
self.hit_rect.center = self.rect.center
self.vel = vec(0, 0)
self.pos = vec(x, y)
self.rot = 0
self.last_shot = 0
self.health = PLAYER_HEALTH
def get_keys(self):
self.rot_speed = 0
self.vel = vec(0, 0)
keys = pg.key.get_pressed()
if keys[pg.K_LEFT] or keys[pg.K_a]:
self.rot_speed = PLAYER_ROT_SPEED
if keys[pg.K_RIGHT] or keys[pg.K_d]:
self.rot_speed = -PLAYER_ROT_SPEED
if keys[pg.K_UP] or keys[pg.K_w]:
self.vel = vec(PLAYER_SPEED, 0).rotate(-self.rot)
if keys[pg.K_DOWN] or keys[pg.K_s]:
self.vel = vec(-PLAYER_SPEED / 2, 0).rotate(-self.rot)
if keys[pg.K_SPACE]:
now = pg.time.get_ticks()
if now - self.last_shot > BULLET_RATE:
self.last_shot = now
dir = vec(1, 0).rotate(-self.rot)
pos = self.pos + BARREL_OFFSET.rotate(-self.rot)
Bullet(self.game, pos, dir)
self.vel = vec(-KICKBACK, 0).rotate(-self.rot)
choice(self.game.weapon_sounds['gun']).play()
MuzzleFlash(self.game, pos)
def update(self):
self.get_keys()
self.rot = (self.rot + self.rot_speed * self.game.dt) % 360
self.image = pg.transform.rotate(self.game.player_img, self.rot)
self.rect = self.image.get_rect()
self.rect.center = self.pos
self.pos += self.vel * self.game.dt
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
def add_health(self, amount):
self.health += amount
if self.health > PLAYER_HEALTH:
self.health = PLAYER_HEALTH
class Mob(pg.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.mobs
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.mob_img.copy()
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = MOB_HIT_RECT.copy()
self.hit_rect.center = self.rect.center
self.pos = vec(x, y)
self.vel = vec(0, 0)
self.acc = vec(0, 0)
self.rect.center = self.pos
self.rot = 0
self.health = MOB_HEALTH
self.speed = choice(MOB_SPEEDS)
self.target = game.player
def avoid_mobs(self):
for mob in self.game.mobs:
if mob != self:
dist = self.pos - mob.pos
if 0 < dist.length() < AVOID_RADIUS:
self.acc += dist.normalize()
def update(self):
target_dist = self.target.pos - self.pos
if target_dist.length_squared() < DETECT_RADIUS**2:
if random() < 0.002:
choice(self.game.zombie_moan_sounds).play()
self.rot = target_dist.angle_to(vec(1, 0))
self.image = pg.transform.rotate(self.game.mob_img, self.rot)
self.rect.center = self.pos
self.acc = vec(1, 0).rotate(-self.rot)
self.avoid_mobs()
self.acc.scale_to_length(self.speed)
self.acc += self.vel * -1
self.vel += self.acc * self.game.dt
self.pos += self.vel * self.game.dt + 0.5 * self.acc * self.game.dt ** 2
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
if self.health <= 0:
choice(self.game.zombie_hit_sounds).play()
self.kill()
def draw_health(self):
if self.health > 60:
col = GREEN
elif self.health > 30:
col = YELLOW
else:
col = RED
width = int(self.rect.width * self.health / MOB_HEALTH)
self.health_bar = pg.Rect(0, 0, width, 7)
if self.health < MOB_HEALTH:
pg.draw.rect(self.image, col, self.health_bar)
class Bullet(pg.sprite.Sprite):
def __init__(self, game, pos, dir):
self._layer = BULLET_LAYER
self.groups = game.all_sprites, game.bullets
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.bullet_img
self.rect = self.image.get_rect()
self.hit_rect = self.rect
self.pos = vec(pos)
self.rect.center = pos
spread = uniform(-GUN_SPREAD, GUN_SPREAD)
self.vel = dir.rotate(spread) * BULLET_SPEED
self.spawn_time = pg.time.get_ticks()
def update(self):
self.pos += self.vel * self.game.dt
self.rect.center = self.pos
if pg.sprite.spritecollideany(self, self.game.walls):
self.kill()
if pg.time.get_ticks() - self.spawn_time > BULLET_LIFETIME:
self.kill()
class Obstacle(pg.sprite.Sprite):
def __init__(self, game, x, y, w, h):
self.groups = game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.rect = pg.Rect(x, y, w, h)
self.hit_rect = self.rect
self.x = x
self.y = y
self.rect.x = x
self.rect.y = y
class MuzzleFlash(pg.sprite.Sprite):
def __init__(self, game, pos):
self._layer = EFFECTS_LAYER
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
size = randint(20, 50)
self.image = pg.transform.scale(choice(game.gun_flashes), (size, size))
self.rect = self.image.get_rect()
self.pos = pos
self.rect.center = pos
self.spawn_time = pg.time.get_ticks()
def update(self):
if pg.time.get_ticks() - self.spawn_time > FLASH_DURATION:
self.kill()
class Item(pg.sprite.Sprite):
def __init__(self, game, pos, type):
self._layer = ITEMS_LAYER
self.groups = game.all_sprites, game.items
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.item_images[type]
self.rect = self.image.get_rect()
self.type = type
self.pos = pos
self.rect.center = pos
self.tween = tween.easeInOutSine
self.step = 0
self.dir = 1
def update(self):
# bobbing motion
offset = BOB_RANGE * (self.tween(self.step / BOB_RANGE) - 0.5)
self.rect.centery = self.pos.y + offset * self.dir
self.step += BOB_SPEED
if self.step > BOB_RANGE:
self.step = 0
self.dir *= -1
|
mit
| 4,963,443,543,521,539,000
| 36.711712
| 84
| 0.556976
| false
| 3.232432
| false
| false
| false
|
asya-bergal/led-matrix-server
|
CPW.py
|
1
|
8468
|
import datetime
import urllib
import json
import leds
import time
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
eventMessage = ""
eventMessage2 = ""
eventMessage3 = ""
def CPW (day,hour,minute):
intro = "Next Event In Random Hall:"
intro2 =""
global eventMessage
global eventMessage2
global eventMessage3
eventMessage = ""
eventMessage2 = ""
eventMessage3 = ""
if(day == 16):
if(hour < 16 or (hour == 16 and minute <=17)):
eventMessage = "Knitting Circle Taqueria 14:17 - 16:17 BMF "
print('hoi;')
elif(hour <19 or (hour == 19 and minute <=17)):
eventMessage = "Clam Olympics 18:17 - 19:17 Clam "
print('dfs')
elif (hour <20 or (hour == 20 and minute <=17)):
eventMessage = "Chemistry and Cake w/ LN2 ice cream continued 19:17 - 20:17 Pecker "
elif(hour <23 or (hour == 23 and minute <=59)):
eventMessage = "BBQ and Spinning on the Roofdeck w/ Giga Curry 21:47 - 24:47 Black Hole+Roofdeck "
eventMessage2 = "Five SCPs at Freddy's 22:47 - 24:47 Destiny "
eventMessage3 = "Crazy Cat Lady Make-A-Thon 23:47 - 24:47 Loop "
if(day == 17):
if (hour == 0 and minute <=47):
eventMessage = "BBQ and Spinning on the Roofdeck w/ Giga Curry 21:47 - 24:47 Black Hole+Roofdeck"
eventMessage2 = "Five SCPs at Freddy's 22:47 - 24:47 Destiny"
eventMessage3 = "Crazy Cat Lady Make-A-Thon 23:47 - 24:47 Loop"
elif (hour<12 or (hour == 12 and minute <=17)):
eventMessage = "Nerf Chess 11:17 - 12:17 Foo"
elif (hour<14 or (hour == 14 and minute <=47)):
eventMessage = "Physics and Coffee 2:17 PM - 16:17 Pecker"
eventMessage2 = "Dumpling Hylomorphisms 12:17 PM - 2:47 PM Black Hole"
elif (hour<14 or (hour == 14 and minute <=17)):
eventMessage = "Physics and Coffee 2:17 PM - 16:17 Pecker"
eventMessage2 = "Rocky Horrible's Nerdy Singalong Blog w/ LN2 ice cream 3:47 PM - 17:47 AIW "
elif (hour<17 or (hour == 17 and minute <=47)):
eventMessage = "Rocky Horrible's Nerdy Singalong Blog w/ LN2 ice cream 3:47 PM - 17:47 AIW "
eventMessage2 = "mitBEEF 17:00 - 18:00 Foo "
elif (hour<18 or (hour == 18 and minute <=1)):
eventMessage = "mitBEEF 17:00 - 18:00 Foo"
eventMessage2 = "Math and Tea 17:47 - 20:47 Pecker "
elif (hour<20 or (hour == 20 and minute <=47)):
eventMessage = "Math and Tea 17:47 - 20:47 Pecker "
elif (hour<22 or (hour == 22 and minute <=17)):
eventMessage = "Star Trek on Infinite Loop 20:47 - 22:47 Loop"
eventMessage2 = "Duct Tape Construction w/ Cookies by Committee 21:47 - 11:47 PM Black Hole"
elif (hour<23 or (hour == 23 and minute <=47)):
eventMessage = "Duct Tape Construction w/ Cookies by Committee 21:47 - 11:47 PM Black Hole"
eventMessage2 = "PowerPoint Karaoke + Latte Art 10:47 PM - 12:47 PM Foo "
elif (hour<23 or (hour == 23 and minute <=59)):
eventMessage = "PowerPoint Karaoke + Latte Art 22:47 - 24:47 Foo "
if(day == 18):
if (hour == 0 and minute <= 47):
eventMessage = "PowerPoint Karaoke + Latte Art 10:47 - 24:47 Foo"
elif (hour< 11 or (hour == 11 and minute <= 47)):
eventMessage = "Saturday Morning Breakfast Cartoons w/ Ceiling Tile Painting 9:47 AM - 11:47 AM Loop"
elif (hour< 13 or (hour == 13 and minute <= 17)):
eventMessage = "Epic Mealtime of Destiny 11:47 - 13:17 Destiny"
elif (hour< 15 or (hour == 15 and minute <= 47)):
eventMessage = "Carbonated Fruit! 13:47 - 15:47 Black Hole"
elif (hour< 17 or (hour == 17 and minute <= 17)):
eventMessage = "Storytime with Cruft w/ Liquid Nitrogen Ice Cream and Truffles 15:17 - 17:17 Foo "
eventMessage2 = "Random Plays Randomly + Smash! 16:17 - 17:47 AIW"
elif (hour< 17 or (hour == 17 and minute <= 47)):
eventMessage = "Random Plays Randomly + Smash! 16:17 - 17:47 AIW"
elif (hour< 21 or (hour == 21 and minute <= 47)):
eventMessage = "InterDorm Potluck Event 19:30 - 21:47 Foo "
eventMessage2 = "Chainmail w/ Experimental Smoothies 20:47 - 21:47 Destiny"
elif (hour< 23 or (hour == 23 and minute <= 59)):
eventMessage = "Pecker Board Game Night + Teach You Tichu 21:47 - 24:47 Pecker "
eventMessage2 = "(Almost) Life-Sized Settlers of Catan 21:47 - 24:47 Foo "
if (day == 19):
if (hour == 0 and minute <= 47):
eventMessage = "Pecker Board Game Night + Teach You Tichu 21:47 - 24:47 Pecker"
eventMessage2 = "(Almost) Life-Sized Settlers of Catan 21:47 - 24:47 Foo"
else:
eventMessage = "Tea Time with Teddy 11:47 - 12:47 BMF"
print("1" + eventMessage + "\n 2" + eventMessage2 + "\n 3" + eventMessage3)
font = ImageFont.truetype("/usr/share/fonts/pixelmix.ttf", 8)
widthIntro, ignore = font.getsize(intro)
widthMessage, ignore = font.getsize(intro2 + eventMessage+eventMessage2+eventMessage3)
currentEvents = Image.new("RGB", (widthMessage + 10, 16), "black")
introText = [("Next event in ",(127,63,0)), (eventMessage, (118,13,13)) ]
text = [("R",(127,0,0)), ("a",(127,63,0)),("n",(127,127,0)),("d",(14,86,60)),("o",(10,81,102)),("m",(79,0,127)), (" Hall: ",(127,63,0)), (eventMessage2,(53,45,103)),(eventMessage3,(0,101,44))]
x = 0
for element in introText:
drawIntro = ImageDraw.Draw(currentEvents).text((x,0),element[0], element[1], font=font)
x = x + font.getsize(element[0])[0]
x = 0
count = 0
for element in text:
count += 1
drawCurrentEvents = ImageDraw.Draw(currentEvents).text((x,8),element[0],element[1],font=font)
x = x + font.getsize(element[0])[0]
if count == 7:
x = x + font.getsize("Next event in ")[0] - font.getsize("Random Hall: ")[0]
currentEvents = currentEvents.rotate(180)
currentEvents.save("currentEvents.ppm")
leds.uploadPPM("currentEvents.ppm")
def Weather ():
try:
weatherURL = urllib.URLopener().retrieve("http://api.wunderground.com/api/efb7f164a8ddf6f5/conditions/forecast/q/pws:KMACAMBR9.json","weather.json")
with open("weather.json",'r') as weatherData:
weather = json.load(weatherData)
except:
return None
current = "Currently: " + str(weather["current_observation"]["temp_c"]) + u'\N{DEGREE SIGN}' + "C | " + str(weather["current_observation"]["weather"])
current1 = current.split("|")[0] + "|"
current2 = current.split("|")[1]
forecastFor = "Forecast for " + str(weather["forecast"]["simpleforecast"]["forecastday"][0]["date"]["weekday_short"]) + ": "
forecastHi = "Hi:" + str(weather["forecast"]["simpleforecast"]["forecastday"][0]["high"]["celsius"]) + u'\N{DEGREE SIGN}' + "C "
forecastLo = "Lo:" + str(weather["forecast"]["simpleforecast"]["forecastday"][0]["low"]["celsius"]) + u'\N{DEGREE SIGN}' + "C "
forecastConditions = str(weather["forecast"]["simpleforecast"]["forecastday"][0]["conditions"])
text = [(forecastFor ,(127,63,0)),(forecastHi,(100,4,10)),(forecastLo,(35,82,90)),(forecastConditions,(101,80,80))]
forecast = forecastFor + forecastHi + forecastLo + forecastConditions
font = ImageFont.truetype("/usr/share/fonts/pixelmix.ttf", 8)
widthCurrent, ignore = font.getsize(current)
widthForecast, ignore = font.getsize(forecast)
currentWeather = Image.new("RGB", (widthForecast + 10, 16), "black")
drawCurrentWeather = ImageDraw.Draw(currentWeather).text((0,0),current1, (127,63,0), font=font)
drawCurrentWeather = ImageDraw.Draw(currentWeather).text((font.getsize(current1)[0],0),current2, (101,80,80), font=font)
x = 0
for element in text:
drawCurrentForecast = ImageDraw.Draw(currentWeather).text((x,8),element[0],element[1],font=font)
x = x + font.getsize(element[0])[0]
currentWeather = currentWeather.rotate(180)
currentWeather.save("currentWeather.ppm")
'''
widthHello, ignore = font.getsize('Welcome to Random Hall')
welcome = Image.new('RGB',(widthHello + 32,16),'black')
drawWelcome = ImageDraw.Draw(welcome).text((0,0),'Welcome to Random Hall',(256,126,0),font=font)
welcome = welcome.rotate(180)
welcome.save('welcome.ppm')
leds.uploadPPM("welcome.ppm")
'''
leds.uploadPPM("currentWeather.ppm")
print(current + "\n" + forecastFor + forecastHi + forecastLo + forecastConditions )
def updateWeather():
leds.uploadPPM("currentWeather.ppm")
if __name__ =='__main__':
while (True):
Weather()
for i in range(50):
now = datetime.datetime.now()
CPW(now.day,now.hour + 1,now.minute)
sleepTime = (len("Random Hall: ") + len(eventMessage2) + len(eventMessage3))/5 +6
time.sleep(sleepTime)
updateWeather()
time.sleep(10)
|
gpl-2.0
| -1,864,003,059,169,031,000
| 47.113636
| 193
| 0.658243
| false
| 2.846387
| false
| false
| false
|
sindhus/hasjob
|
hasjob/views/location.py
|
2
|
2094
|
# -*- coding: utf-8 -*-
from collections import OrderedDict
from datetime import datetime
from flask import redirect, abort
from baseframe.forms import render_form
from ..models import db, agelimit, Location, JobLocation, JobPost, POSTSTATUS
from ..forms import NewLocationForm, EditLocationForm
from .. import app, lastuser
from .helper import location_geodata
@app.route('/in/new', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def location_new():
now = datetime.utcnow()
geonames = OrderedDict([(r.geonameid, None) for r in
db.session.query(JobLocation.geonameid, db.func.count(JobLocation.geonameid).label('count')).join(
JobPost).filter(JobPost.status.in_(POSTSTATUS.LISTED), JobPost.datetime > now - agelimit,
~JobLocation.geonameid.in_(db.session.query(Location.id))
).group_by(JobLocation.geonameid).order_by(db.text('count DESC')).limit(100)])
data = location_geodata(geonames.keys())
for row in data.values():
geonames[row['geonameid']] = row
choices = [('%s/%s' % (row['geonameid'], row['name']), row['picker_title']) for row in geonames.values()]
form = NewLocationForm()
form.geoname.choices = choices
if form.validate_on_submit():
geonameid, name = form.geoname.data.split('/', 1)
geonameid = int(geonameid)
title = geonames[geonameid]['use_title']
location = Location(id=geonameid, name=name, title=title)
db.session.add(location)
db.session.commit()
return redirect(location.url_for('edit'), code=303)
return render_form(form=form, title="Add a location")
@app.route('/in/<name>/edit', methods=['GET', 'POST'])
@lastuser.requires_permission('siteadmin')
def location_edit(name):
location = Location.get(name)
if not location:
abort(404)
form = EditLocationForm(obj=location)
if form.validate_on_submit():
form.populate_obj(location)
db.session.commit()
return redirect(location.url_for(), code=303)
return render_form(form=form, title="Edit location")
|
agpl-3.0
| -9,207,969,869,731,752,000
| 38.509434
| 109
| 0.677173
| false
| 3.537162
| false
| false
| false
|
emeric254/gala-stri-website
|
Handlers/ListingHandler.py
|
1
|
1504
|
# -*- coding: utf-8 -*-
import json
import logging
from tornado.web import authenticated
from Handlers.BaseHandler import BaseHandler
from Tools import PostgreSQL
logger = logging.getLogger(__name__)
class ListingHandler(BaseHandler):
"""Listing Handler which require a connected user"""
@authenticated
def get(self, path_request):
if path_request == 'inscrits':
self.write(json.dumps(PostgreSQL.get_all_inscrit()))
return
elif path_request == 'accompagnants':
self.write(json.dumps(PostgreSQL.get_all_accompagnants()))
return
elif path_request.startswith('inscrits') and '/' in path_request:
(_, id) = path_request.rsplit('/', 1)
try:
id = int(id)
if id < 0:
raise ValueError
except ValueError:
self.send_error(status_code=400)
return
self.write(json.dumps(PostgreSQL.get_all_accompagnants_inscrit(id)))
return
self.send_error(status_code=400)
@authenticated
def delete(self, path_request):
if path_request.startswith('inscrits/'):
PostgreSQL.supprimer_inscrit(path_request[9:])
self.write({})
return
elif path_request.startswith('accompagnants/'):
PostgreSQL.supprimer_accompagnant(path_request[14:])
self.write({})
return
self.send_error(status_code=400)
|
mit
| -2,530,221,862,486,792,000
| 31.695652
| 80
| 0.59109
| false
| 4.143251
| false
| false
| false
|
BlakeTeam/VHDLCodeGenerator
|
lib/System.py
|
1
|
7406
|
#-------------------------------------------------------------------------------
# PROJECT: VHDL Code Generator
# NAME: System
#
# LICENSE: GNU-GPL V3
#-------------------------------------------------------------------------------
__author__ = "BlakeTeam"
import lib.signature
from lib import *
from .Block import Block as _Block
from lib.Connection import Connection as _Connection
IN = 1
OUT = 0
class System:
def __init__(self,name,input_info,output_info):
""" Structure that handles an abstract system
:String name: Name of the system (Name of the project)
:Int[] input_info: List with the name & size of the input ports of the system
:Int[] output_info: List with the name & size of the output ports of the system
"""
self.name = name # The name of the system
self.block_name = set() # The name of all blocks on the system
self.conn_name = set() # The name of all connections on the system
self.block = [] # Block list of the system
self.connections = {} # Connection dictionary of the system <Abstract Connection: QGraphicsLineItem>
self.system_input = _Block((),[size for name,size in input_info],self)
# Setting names to input ports
for i in range(len(input_info)):
self.system_input.output_ports[i].name = input_info[i][0]
self.system_input.screenPos = (-50,0)
self.system_input.setName("SystemInput")
self.system_output = _Block([size for name,size in output_info],(),self)
# Setting names to input ports
for i in range(len(output_info)):
self.system_output.input_ports[i].name = output_info[i][0]
self.system_output.screenPos = (50,0)
self.system_output.setName("SystemOutput")
self.input_info = input_info
self.output_info = output_info
self.input_names = [name for name,size in input_info]
self.output_names = [name for name,size in output_info]
self.includedLibrary = ["ieee.std_logic_1164.all"] #TODO: Revisar esto, hay que modificarlo
def buildVHDLCode(self):
""" Building the code that will be generated.
"""
fileText = lib.signature.signature()
# Including libraries
fileText += "-- Including libraries\nLIBRARY ieee;\n"
for i in self.includedLibrary:
fileText += "USE %s;\n"%i
fileText += "\n"
fileText += "ENTITY %s IS\n"%self.name
fileText += "-- Generating ports\n"
fileText += "PORT (\n"
# Generating input ports
for i in self.system_input.output_ports:
fileText += "%s: IN std_logic%s;\n"%(i.name,"" if i.size == 1 else "_vector(%d downto 0)"%(i.size - 1)) #TODO: Aqui cambie
# Generating output ports
for i in self.system_output.input_ports:
fileText += "%s: OUT std_logic%s;\n"%(i.name,"" if i.size == 1 else "_vector(%d downto 0)"%(i.size - 1)) #TODO: Aqui cambie
fileText = fileText[:-2]
fileText += ");\n"
fileText += "END %s;\n"%self.name
# Architecture Implementation
fileText += "\n-- Architecture Implementation\n"
fileText += "ARCHITECTURE Arq_%s OF %s IS\n"%(self.name,self.name)
fileText += "BEGIN\n"
# Port declaration
fileText += "-- Port declaration\n"
# TODO: Overrated RAM
for i in self.block:
signals = i.getSignals()
inputSig = []
outputSig = []
tempSig = []
for name,size,mode in signals:
if mode == IN:
inputSig.append((name,size))
elif mode == OUT:
outputSig.append((name,size))
else:
tempSig.append((name,size))
fileText += "\n-- Declaring %s's ports%s\n"%(i.name," & temporary signals" if len(tempSig) != 0 else "") #TODO: Aqui cambie y moví la linea de lugar
fileText += "-- Input ports\n"
for name,size in inputSig:
fileText += "signal %s__%s: std_logic%s;\n"%(i.name,name,"" if size == 1 else "_vector(%d downto 0)"%(size - 1)) #TODO: Aqui cambie
fileText += "\n-- Output ports\n"
for name,size in outputSig:
fileText += "signal %s__%s: std_logic%s;\n"%(i.name,name,"" if size == 1 else "_vector(%d downto 0)"%(size - 1)) #TODO: Aqui cambie
if len(tempSig) != 0: #TODO: Aqui cambie
fileText += "\n-- Temporary signals\n"
for name,size in tempSig:
fileText += "signal %s__%s: std_logic%s;\n"%(i.name,name,"" if size == 1 else "_vector(%d downto 0)"%(size - 1)) #TODO: Aqui cambie
# Defining connections
fileText += "\n-- Defining connections\n"
for i in self.block:
for port_inp in i.input_ports:
receiver = i.name + "__" + port_inp.name
if self.system_input == port_inp.connection.out_block:
sender = port_inp.connection.out_block.output_ports[port_inp.connection.ind_output].name
else:
sender = port_inp.connection.out_block.name + "__" + port_inp.connection.out_block.output_ports[port_inp.connection.ind_output].name
fileText += "%s <= %s;\n"%(receiver, sender)
fileText += "\n"
# Block implementations
fileText += "\n-- Blocks implementation\n"
for i in self.block:
fileText += "-- Implementation of %s block\n"%i.name
fileText += i.generate()
fileText += "\n"
# Connecting outputs
fileText += "-- Connecting outputs\n"
for i in self.system_output.input_ports:
fileText += "%s <= %s__%s;\n"%(i.name,i.connection.out_block.name,i.connection.out_block.output_ports[i.connection.ind_output].name)
fileText += "END Arq_%s;\n"%self.name
# print("\nGENERATED CODE\n")
# print(fileText)
return fileText
def __getitem__(self, name):
""" Find a port for his name.
This function starts for input ports.
If the port exist it returns the reference to the port & mode(IN/OUT)
Else it returns -1
:String name: The name of the wanted port/
"""
try:
pos = self.input_names.index(name)
return pos,IN
except ValueError:
try:
pos = self.output_names.index(name)
return pos,OUT
except ValueError:
return -1
def connect(self,output_block,ind_output,input_block,ind_input,visualConnection = None):
"""
:param output_block:
:param ind_output:
:param input_block:
:param ind_input:
"""
conn = _Connection(output_block,ind_output,input_block,ind_input,self) # Creating the connection between 2 blocks
output_block.output_ports[ind_output].connection.append(conn) # Linking the connection with the output block
input_block.input_ports[ind_input].connection = conn # Linking the connection with the input block
self.connections.update({conn:visualConnection}) # Adding the connection to the connection list (on the system)
return conn
|
gpl-3.0
| 2,981,545,971,807,642,600
| 39.692308
| 160
| 0.559352
| false
| 3.840768
| false
| false
| false
|
zmughal/pygments-mirror
|
tests/test_latex_formatter.py
|
1
|
1504
|
# -*- coding: utf-8 -*-
"""
Pygments LaTeX formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import os
import unittest
import tempfile
from pygments.formatters import LatexFormatter
from pygments.lexers import PythonLexer
import support
TESTFILE, TESTDIR = support.location(__file__)
class LatexFormatterTest(unittest.TestCase):
def test_valid_output(self):
with open(TESTFILE) as fp:
tokensource = list(PythonLexer().get_tokens(fp.read()))
fmt = LatexFormatter(full=True, encoding='latin1')
handle, pathname = tempfile.mkstemp('.tex')
# place all output files in /tmp too
old_wd = os.getcwd()
os.chdir(os.path.dirname(pathname))
tfile = os.fdopen(handle, 'wb')
fmt.format(tokensource, tfile)
tfile.close()
try:
import subprocess
po = subprocess.Popen(['latex', '-interaction=nonstopmode',
pathname], stdout=subprocess.PIPE)
ret = po.wait()
output = po.stdout.read()
po.stdout.close()
except OSError:
# latex not available
pass
else:
if ret:
print(output)
self.assertFalse(ret, 'latex run reported errors')
os.unlink(pathname)
os.chdir(old_wd)
|
bsd-2-clause
| -1,551,528,746,510,471,000
| 26.851852
| 71
| 0.584441
| false
| 4.309456
| true
| false
| false
|
jelly/calibre
|
src/calibre/ebooks/docx/to_html.py
|
1
|
32913
|
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import sys, os, re, math, errno, uuid
from collections import OrderedDict, defaultdict
from lxml import html
from lxml.html.builder import (
HTML, HEAD, TITLE, BODY, LINK, META, P, SPAN, BR, DIV, SUP, A, DT, DL, DD, H1)
from calibre import guess_type
from calibre.ebooks.docx.container import DOCX, fromstring
from calibre.ebooks.docx.names import XML, generate_anchor
from calibre.ebooks.docx.styles import Styles, inherit, PageProperties
from calibre.ebooks.docx.numbering import Numbering
from calibre.ebooks.docx.fonts import Fonts
from calibre.ebooks.docx.images import Images
from calibre.ebooks.docx.tables import Tables
from calibre.ebooks.docx.footnotes import Footnotes
from calibre.ebooks.docx.cleanup import cleanup_markup
from calibre.ebooks.docx.theme import Theme
from calibre.ebooks.docx.toc import create_toc
from calibre.ebooks.docx.fields import Fields
from calibre.ebooks.docx.settings import Settings
from calibre.ebooks.metadata.opf2 import OPFCreator
from calibre.utils.localization import canonicalize_lang, lang_as_iso639_1
NBSP = '\xa0'
class Text:
def __init__(self, elem, attr, buf):
self.elem, self.attr, self.buf = elem, attr, buf
def add_elem(self, elem):
setattr(self.elem, self.attr, ''.join(self.buf))
self.elem, self.attr, self.buf = elem, 'tail', []
def html_lang(docx_lang):
lang = canonicalize_lang(docx_lang)
if lang and lang != 'und':
lang = lang_as_iso639_1(lang)
if lang:
return lang
class Convert(object):
def __init__(self, path_or_stream, dest_dir=None, log=None, detect_cover=True, notes_text=None, notes_nopb=False, nosupsub=False):
self.docx = DOCX(path_or_stream, log=log)
self.namespace = self.docx.namespace
self.ms_pat = re.compile(r'\s{2,}')
self.ws_pat = re.compile(r'[\n\r\t]')
self.log = self.docx.log
self.detect_cover = detect_cover
self.notes_text = notes_text or _('Notes')
self.notes_nopb = notes_nopb
self.nosupsub = nosupsub
self.dest_dir = dest_dir or os.getcwdu()
self.mi = self.docx.metadata
self.body = BODY()
self.theme = Theme(self.namespace)
self.settings = Settings(self.namespace)
self.tables = Tables(self.namespace)
self.fields = Fields(self.namespace)
self.styles = Styles(self.namespace, self.tables)
self.images = Images(self.namespace, self.log)
self.object_map = OrderedDict()
self.html = HTML(
HEAD(
META(charset='utf-8'),
TITLE(self.mi.title or _('Unknown')),
LINK(rel='stylesheet', type='text/css', href='docx.css'),
),
self.body
)
self.html.text='\n\t'
self.html[0].text='\n\t\t'
self.html[0].tail='\n'
for child in self.html[0]:
child.tail = '\n\t\t'
self.html[0][-1].tail = '\n\t'
self.html[1].text = self.html[1].tail = '\n'
lang = html_lang(self.mi.language)
if lang:
self.html.set('lang', lang)
self.doc_lang = lang
else:
self.doc_lang = None
def __call__(self):
doc = self.docx.document
relationships_by_id, relationships_by_type = self.docx.document_relationships
self.fields(doc, self.log)
self.read_styles(relationships_by_type)
self.images(relationships_by_id)
self.layers = OrderedDict()
self.framed = [[]]
self.frame_map = {}
self.framed_map = {}
self.anchor_map = {}
self.link_map = defaultdict(list)
self.link_source_map = {}
self.toc_anchor = None
self.block_runs = []
paras = []
self.log.debug('Converting Word markup to HTML')
self.read_page_properties(doc)
self.current_rels = relationships_by_id
for wp, page_properties in self.page_map.iteritems():
self.current_page = page_properties
if wp.tag.endswith('}p'):
p = self.convert_p(wp)
self.body.append(p)
paras.append(wp)
self.read_block_anchors(doc)
self.styles.apply_contextual_spacing(paras)
self.mark_block_runs(paras)
# Apply page breaks at the start of every section, except the first
# section (since that will be the start of the file)
self.styles.apply_section_page_breaks(self.section_starts[1:])
notes_header = None
orig_rid_map = self.images.rid_map
if self.footnotes.has_notes:
self.body.append(H1(self.notes_text))
notes_header = self.body[-1]
notes_header.set('class', 'notes-header')
for anchor, text, note in self.footnotes:
dl = DL(id=anchor)
dl.set('class', 'footnote')
self.body.append(dl)
dl.append(DT('[', A('←' + text, href='#back_%s' % anchor, title=text)))
dl[-1][0].tail = ']'
dl.append(DD())
paras = []
self.images.rid_map = self.current_rels = note.rels[0]
for wp in note:
if wp.tag.endswith('}tbl'):
self.tables.register(wp, self.styles)
self.page_map[wp] = self.current_page
else:
p = self.convert_p(wp)
dl[-1].append(p)
paras.append(wp)
self.styles.apply_contextual_spacing(paras)
self.mark_block_runs(paras)
for p, wp in self.object_map.iteritems():
if len(p) > 0 and not p.text and len(p[0]) > 0 and not p[0].text and p[0][0].get('class', None) == 'tab':
# Paragraph uses tabs for indentation, convert to text-indent
parent = p[0]
tabs = []
for child in parent:
if child.get('class', None) == 'tab':
tabs.append(child)
if child.tail:
break
else:
break
indent = len(tabs) * self.settings.default_tab_stop
style = self.styles.resolve(wp)
if style.text_indent is inherit or (hasattr(style.text_indent, 'endswith') and style.text_indent.endswith('pt')):
if style.text_indent is not inherit:
indent = float(style.text_indent[:-2]) + indent
style.text_indent = '%.3gpt' % indent
parent.text = tabs[-1].tail or ''
map(parent.remove, tabs)
self.images.rid_map = orig_rid_map
self.resolve_links()
self.styles.cascade(self.layers)
self.tables.apply_markup(self.object_map, self.page_map)
numbered = []
for html_obj, obj in self.object_map.iteritems():
raw = obj.get('calibre_num_id', None)
if raw is not None:
lvl, num_id = raw.partition(':')[0::2]
try:
lvl = int(lvl)
except (TypeError, ValueError):
lvl = 0
numbered.append((html_obj, num_id, lvl))
self.numbering.apply_markup(numbered, self.body, self.styles, self.object_map, self.images)
self.apply_frames()
if len(self.body) > 0:
self.body.text = '\n\t'
for child in self.body:
child.tail = '\n\t'
self.body[-1].tail = '\n'
self.log.debug('Converting styles to CSS')
self.styles.generate_classes()
for html_obj, obj in self.object_map.iteritems():
style = self.styles.resolve(obj)
if style is not None:
css = style.css
if css:
cls = self.styles.class_name(css)
if cls:
html_obj.set('class', cls)
for html_obj, css in self.framed_map.iteritems():
cls = self.styles.class_name(css)
if cls:
html_obj.set('class', cls)
if notes_header is not None:
for h in self.namespace.children(self.body, 'h1', 'h2', 'h3'):
notes_header.tag = h.tag
cls = h.get('class', None)
if cls and cls != 'notes-header':
notes_header.set('class', '%s notes-header' % cls)
break
self.fields.polish_markup(self.object_map)
self.log.debug('Cleaning up redundant markup generated by Word')
self.cover_image = cleanup_markup(self.log, self.html, self.styles, self.dest_dir, self.detect_cover, self.namespace.XPath)
return self.write(doc)
def read_page_properties(self, doc):
current = []
self.page_map = OrderedDict()
self.section_starts = []
for p in self.namespace.descendants(doc, 'w:p', 'w:tbl'):
if p.tag.endswith('}tbl'):
self.tables.register(p, self.styles)
current.append(p)
continue
sect = tuple(self.namespace.descendants(p, 'w:sectPr'))
if sect:
pr = PageProperties(self.namespace, sect)
paras = current + [p]
for x in paras:
self.page_map[x] = pr
self.section_starts.append(paras[0])
current = []
else:
current.append(p)
if current:
self.section_starts.append(current[0])
last = self.namespace.XPath('./w:body/w:sectPr')(doc)
pr = PageProperties(self.namespace, last)
for x in current:
self.page_map[x] = pr
def read_styles(self, relationships_by_type):
def get_name(rtype, defname):
name = relationships_by_type.get(rtype, None)
if name is None:
cname = self.docx.document_name.split('/')
cname[-1] = defname
if self.docx.exists('/'.join(cname)):
name = name
if name and name.startswith('word/word') and not self.docx.exists(name):
name = name.partition('/')[2]
return name
nname = get_name(self.namespace.names['NUMBERING'], 'numbering.xml')
sname = get_name(self.namespace.names['STYLES'], 'styles.xml')
sename = get_name(self.namespace.names['SETTINGS'], 'settings.xml')
fname = get_name(self.namespace.names['FONTS'], 'fontTable.xml')
tname = get_name(self.namespace.names['THEMES'], 'theme1.xml')
foname = get_name(self.namespace.names['FOOTNOTES'], 'footnotes.xml')
enname = get_name(self.namespace.names['ENDNOTES'], 'endnotes.xml')
numbering = self.numbering = Numbering(self.namespace)
footnotes = self.footnotes = Footnotes(self.namespace)
fonts = self.fonts = Fonts(self.namespace)
foraw = enraw = None
forel, enrel = ({}, {}), ({}, {})
if sename is not None:
try:
seraw = self.docx.read(sename)
except KeyError:
self.log.warn('Settings %s do not exist' % sename)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
self.log.warn('Settings %s file missing' % sename)
else:
self.settings(fromstring(seraw))
if foname is not None:
try:
foraw = self.docx.read(foname)
except KeyError:
self.log.warn('Footnotes %s do not exist' % foname)
else:
forel = self.docx.get_relationships(foname)
if enname is not None:
try:
enraw = self.docx.read(enname)
except KeyError:
self.log.warn('Endnotes %s do not exist' % enname)
else:
enrel = self.docx.get_relationships(enname)
footnotes(fromstring(foraw) if foraw else None, forel, fromstring(enraw) if enraw else None, enrel)
if fname is not None:
embed_relationships = self.docx.get_relationships(fname)[0]
try:
raw = self.docx.read(fname)
except KeyError:
self.log.warn('Fonts table %s does not exist' % fname)
else:
fonts(fromstring(raw), embed_relationships, self.docx, self.dest_dir)
if tname is not None:
try:
raw = self.docx.read(tname)
except KeyError:
self.log.warn('Styles %s do not exist' % sname)
else:
self.theme(fromstring(raw))
styles_loaded = False
if sname is not None:
try:
raw = self.docx.read(sname)
except KeyError:
self.log.warn('Styles %s do not exist' % sname)
else:
self.styles(fromstring(raw), fonts, self.theme)
styles_loaded = True
if not styles_loaded:
self.styles(None, fonts, self.theme)
if nname is not None:
try:
raw = self.docx.read(nname)
except KeyError:
self.log.warn('Numbering styles %s do not exist' % nname)
else:
numbering(fromstring(raw), self.styles, self.docx.get_relationships(nname)[0])
self.styles.resolve_numbering(numbering)
def write(self, doc):
toc = create_toc(doc, self.body, self.resolved_link_map, self.styles, self.object_map, self.log, self.namespace)
raw = html.tostring(self.html, encoding='utf-8', doctype='<!DOCTYPE html>')
with lopen(os.path.join(self.dest_dir, 'index.html'), 'wb') as f:
f.write(raw)
css = self.styles.generate_css(self.dest_dir, self.docx, self.notes_nopb, self.nosupsub)
if css:
with lopen(os.path.join(self.dest_dir, 'docx.css'), 'wb') as f:
f.write(css.encode('utf-8'))
opf = OPFCreator(self.dest_dir, self.mi)
opf.toc = toc
opf.create_manifest_from_files_in([self.dest_dir])
for item in opf.manifest:
if item.media_type == 'text/html':
item.media_type = guess_type('a.xhtml')[0]
opf.create_spine(['index.html'])
if self.cover_image is not None:
opf.guide.set_cover(self.cover_image)
def process_guide(E, guide):
if self.toc_anchor is not None:
guide.append(E.reference(
href='index.html#' + self.toc_anchor, title=_('Table of Contents'), type='toc'))
toc_file = os.path.join(self.dest_dir, 'toc.ncx')
with lopen(os.path.join(self.dest_dir, 'metadata.opf'), 'wb') as of, open(toc_file, 'wb') as ncx:
opf.render(of, ncx, 'toc.ncx', process_guide=process_guide)
if os.path.getsize(toc_file) == 0:
os.remove(toc_file)
return os.path.join(self.dest_dir, 'metadata.opf')
def read_block_anchors(self, doc):
doc_anchors = frozenset(self.namespace.XPath('./w:body/w:bookmarkStart[@w:name]')(doc))
if doc_anchors:
current_bm = set()
rmap = {v:k for k, v in self.object_map.iteritems()}
for p in self.namespace.descendants(doc, 'w:p', 'w:bookmarkStart[@w:name]'):
if p.tag.endswith('}p'):
if current_bm and p in rmap:
para = rmap[p]
if 'id' not in para.attrib:
para.set('id', generate_anchor(next(iter(current_bm)), frozenset(self.anchor_map.itervalues())))
for name in current_bm:
self.anchor_map[name] = para.get('id')
current_bm = set()
elif p in doc_anchors:
anchor = self.namespace.get(p, 'w:name')
if anchor:
current_bm.add(anchor)
def convert_p(self, p):
dest = P()
self.object_map[dest] = p
style = self.styles.resolve_paragraph(p)
self.layers[p] = []
self.frame_map[p] = style.frame
self.add_frame(dest, style.frame)
current_anchor = None
current_hyperlink = None
hl_xpath = self.namespace.XPath('ancestor::w:hyperlink[1]')
def p_parent(x):
# Ensure that nested <w:p> tags are handled. These can occur if a
# textbox is present inside a paragraph.
while True:
x = x.getparent()
try:
if x.tag.endswith('}p'):
return x
except AttributeError:
break
for x in self.namespace.descendants(p, 'w:r', 'w:bookmarkStart', 'w:hyperlink', 'w:instrText'):
if p_parent(x) is not p:
continue
if x.tag.endswith('}r'):
span = self.convert_run(x)
if current_anchor is not None:
(dest if len(dest) == 0 else span).set('id', current_anchor)
current_anchor = None
if current_hyperlink is not None:
try:
hl = hl_xpath(x)[0]
self.link_map[hl].append(span)
self.link_source_map[hl] = self.current_rels
x.set('is-link', '1')
except IndexError:
current_hyperlink = None
dest.append(span)
self.layers[p].append(x)
elif x.tag.endswith('}bookmarkStart'):
anchor = self.namespace.get(x, 'w:name')
if anchor and anchor not in self.anchor_map and anchor != '_GoBack':
# _GoBack is a special bookmark inserted by Word 2010 for
# the return to previous edit feature, we ignore it
old_anchor = current_anchor
self.anchor_map[anchor] = current_anchor = generate_anchor(anchor, frozenset(self.anchor_map.itervalues()))
if old_anchor is not None:
# The previous anchor was not applied to any element
for a, t in tuple(self.anchor_map.iteritems()):
if t == old_anchor:
self.anchor_map[a] = current_anchor
elif x.tag.endswith('}hyperlink'):
current_hyperlink = x
elif x.tag.endswith('}instrText') and x.text and x.text.strip().startswith('TOC '):
old_anchor = current_anchor
anchor = str(uuid.uuid4())
self.anchor_map[anchor] = current_anchor = generate_anchor('toc', frozenset(self.anchor_map.itervalues()))
self.toc_anchor = current_anchor
if old_anchor is not None:
# The previous anchor was not applied to any element
for a, t in tuple(self.anchor_map.iteritems()):
if t == old_anchor:
self.anchor_map[a] = current_anchor
if current_anchor is not None:
# This paragraph had no <w:r> descendants
dest.set('id', current_anchor)
current_anchor = None
m = re.match(r'heading\s+(\d+)$', style.style_name or '', re.IGNORECASE)
if m is not None:
n = min(6, max(1, int(m.group(1))))
dest.tag = 'h%d' % n
if style.bidi is True:
dest.set('dir', 'rtl')
border_runs = []
common_borders = []
for span in dest:
run = self.object_map[span]
style = self.styles.resolve_run(run)
if not border_runs or border_runs[-1][1].same_border(style):
border_runs.append((span, style))
elif border_runs:
if len(border_runs) > 1:
common_borders.append(border_runs)
border_runs = []
for border_run in common_borders:
spans = []
bs = {}
for span, style in border_run:
style.get_border_css(bs)
style.clear_border_css()
spans.append(span)
if bs:
cls = self.styles.register(bs, 'text_border')
wrapper = self.wrap_elems(spans, SPAN())
wrapper.set('class', cls)
if not dest.text and len(dest) == 0 and not style.has_visible_border():
# Empty paragraph add a non-breaking space so that it is rendered
# by WebKit
dest.text = NBSP
# If the last element in a block is a <br> the <br> is not rendered in
# HTML, unless it is followed by a trailing space. Word, on the other
# hand inserts a blank line for trailing <br>s.
if len(dest) > 0 and not dest[-1].tail:
if dest[-1].tag == 'br':
dest[-1].tail = NBSP
elif len(dest[-1]) > 0 and dest[-1][-1].tag == 'br' and not dest[-1][-1].tail:
dest[-1][-1].tail = NBSP
return dest
def wrap_elems(self, elems, wrapper):
p = elems[0].getparent()
idx = p.index(elems[0])
p.insert(idx, wrapper)
wrapper.tail = elems[-1].tail
elems[-1].tail = None
for elem in elems:
try:
p.remove(elem)
except ValueError:
# Probably a hyperlink that spans multiple
# paragraphs,theoretically we should break this up into
# multiple hyperlinks, but I can't be bothered.
elem.getparent().remove(elem)
wrapper.append(elem)
return wrapper
def resolve_links(self):
self.resolved_link_map = {}
for hyperlink, spans in self.link_map.iteritems():
relationships_by_id = self.link_source_map[hyperlink]
span = spans[0]
if len(spans) > 1:
span = self.wrap_elems(spans, SPAN())
span.tag = 'a'
self.resolved_link_map[hyperlink] = span
tgt = self.namespace.get(hyperlink, 'w:tgtFrame')
if tgt:
span.set('target', tgt)
tt = self.namespace.get(hyperlink, 'w:tooltip')
if tt:
span.set('title', tt)
rid = self.namespace.get(hyperlink, 'r:id')
if rid and rid in relationships_by_id:
span.set('href', relationships_by_id[rid])
continue
anchor = self.namespace.get(hyperlink, 'w:anchor')
if anchor and anchor in self.anchor_map:
span.set('href', '#' + self.anchor_map[anchor])
continue
self.log.warn('Hyperlink with unknown target (rid=%s, anchor=%s), ignoring' %
(rid, anchor))
# hrefs that point nowhere give epubcheck a hernia. The element
# should be styled explicitly by Word anyway.
# span.set('href', '#')
rmap = {v:k for k, v in self.object_map.iteritems()}
for hyperlink, runs in self.fields.hyperlink_fields:
spans = [rmap[r] for r in runs if r in rmap]
if not spans:
continue
span = spans[0]
if len(spans) > 1:
span = self.wrap_elems(spans, SPAN())
span.tag = 'a'
tgt = hyperlink.get('target', None)
if tgt:
span.set('target', tgt)
tt = hyperlink.get('title', None)
if tt:
span.set('title', tt)
url = hyperlink.get('url', None)
if url is None:
anchor = hyperlink.get('anchor', None)
if anchor in self.anchor_map:
span.set('href', '#' + self.anchor_map[anchor])
continue
self.log.warn('Hyperlink field with unknown anchor: %s' % anchor)
else:
if url in self.anchor_map:
span.set('href', '#' + self.anchor_map[url])
continue
span.set('href', url)
for img, link, relationships_by_id in self.images.links:
parent = img.getparent()
idx = parent.index(img)
a = A(img)
a.tail, img.tail = img.tail, None
parent.insert(idx, a)
tgt = link.get('target', None)
if tgt:
a.set('target', tgt)
tt = link.get('title', None)
if tt:
a.set('title', tt)
rid = link['id']
if rid in relationships_by_id:
dest = relationships_by_id[rid]
if dest.startswith('#'):
if dest[1:] in self.anchor_map:
a.set('href', '#' + self.anchor_map[dest[1:]])
else:
a.set('href', dest)
def convert_run(self, run):
ans = SPAN()
self.object_map[ans] = run
text = Text(ans, 'text', [])
for child in run:
if self.namespace.is_tag(child, 'w:t'):
if not child.text:
continue
space = child.get(XML('space'), None)
preserve = False
ctext = child.text
if space != 'preserve':
# Remove leading and trailing whitespace. Word ignores
# leading and trailing whitespace without preserve
ctext = ctext.strip(' \n\r\t')
# Only use a <span> with white-space:pre-wrap if this element
# actually needs it, i.e. if it has more than one
# consecutive space or it has newlines or tabs.
multi_spaces = self.ms_pat.search(ctext) is not None
preserve = multi_spaces or self.ws_pat.search(ctext) is not None
if preserve:
text.add_elem(SPAN(ctext, style="white-space:pre-wrap"))
ans.append(text.elem)
else:
text.buf.append(ctext)
elif self.namespace.is_tag(child, 'w:cr'):
text.add_elem(BR())
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:br'):
typ = self.namespace.get(child, 'w:type')
if typ in {'column', 'page'}:
br = BR(style='page-break-after:always')
else:
clear = child.get('clear', None)
if clear in {'all', 'left', 'right'}:
br = BR(style='clear:%s'%('both' if clear == 'all' else clear))
else:
br = BR()
text.add_elem(br)
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:drawing') or self.namespace.is_tag(child, 'w:pict'):
for img in self.images.to_html(child, self.current_page, self.docx, self.dest_dir):
text.add_elem(img)
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:footnoteReference') or self.namespace.is_tag(child, 'w:endnoteReference'):
anchor, name = self.footnotes.get_ref(child)
if anchor and name:
l = A(SUP(name, id='back_%s' % anchor), href='#' + anchor, title=name)
l.set('class', 'noteref')
text.add_elem(l)
ans.append(text.elem)
elif self.namespace.is_tag(child, 'w:tab'):
spaces = int(math.ceil((self.settings.default_tab_stop / 36) * 6))
text.add_elem(SPAN(NBSP * spaces))
ans.append(text.elem)
ans[-1].set('class', 'tab')
elif self.namespace.is_tag(child, 'w:noBreakHyphen'):
text.buf.append(u'\u2011')
elif self.namespace.is_tag(child, 'w:softHyphen'):
text.buf.append(u'\u00ad')
if text.buf:
setattr(text.elem, text.attr, ''.join(text.buf))
style = self.styles.resolve_run(run)
if style.vert_align in {'superscript', 'subscript'}:
ans.tag = 'sub' if style.vert_align == 'subscript' else 'sup'
if style.lang is not inherit:
lang = html_lang(style.lang)
if lang is not None and lang != self.doc_lang:
ans.set('lang', lang)
if style.rtl is True:
ans.set('dir', 'rtl')
return ans
def add_frame(self, html_obj, style):
last_run = self.framed[-1]
if style is inherit:
if last_run:
self.framed.append([])
return
if last_run:
if last_run[-1][1] == style:
last_run.append((html_obj, style))
else:
self.framed[-1].append((html_obj, style))
else:
last_run.append((html_obj, style))
def apply_frames(self):
for run in filter(None, self.framed):
style = run[0][1]
paras = tuple(x[0] for x in run)
parent = paras[0].getparent()
idx = parent.index(paras[0])
frame = DIV(*paras)
parent.insert(idx, frame)
self.framed_map[frame] = css = style.css(self.page_map[self.object_map[paras[0]]])
self.styles.register(css, 'frame')
if not self.block_runs:
return
rmap = {v:k for k, v in self.object_map.iteritems()}
for border_style, blocks in self.block_runs:
paras = tuple(rmap[p] for p in blocks)
parent = paras[0].getparent()
if parent.tag in ('ul', 'ol'):
ul = parent
parent = ul.getparent()
idx = parent.index(ul)
frame = DIV(ul)
else:
idx = parent.index(paras[0])
frame = DIV(*paras)
parent.insert(idx, frame)
self.framed_map[frame] = css = border_style.css
self.styles.register(css, 'frame')
def mark_block_runs(self, paras):
def process_run(run):
max_left = max_right = 0
has_visible_border = None
for p in run:
style = self.styles.resolve_paragraph(p)
if has_visible_border is None:
has_visible_border = style.has_visible_border()
max_left, max_right = max(style.margin_left, max_left), max(style.margin_right, max_right)
if has_visible_border:
style.margin_left = style.margin_right = inherit
if p is not run[0]:
style.padding_top = 0
else:
border_style = style.clone_border_styles()
if has_visible_border:
border_style.margin_top, style.margin_top = style.margin_top, inherit
if p is not run[-1]:
style.padding_bottom = 0
else:
if has_visible_border:
border_style.margin_bottom, style.margin_bottom = style.margin_bottom, inherit
style.clear_borders()
if p is not run[-1]:
style.apply_between_border()
if has_visible_border:
border_style.margin_left, border_style.margin_right = max_left,max_right
self.block_runs.append((border_style, run))
run = []
for p in paras:
if run and self.frame_map.get(p) == self.frame_map.get(run[-1]):
style = self.styles.resolve_paragraph(p)
last_style = self.styles.resolve_paragraph(run[-1])
if style.has_identical_borders(last_style):
run.append(p)
continue
if len(run) > 1:
process_run(run)
run = [p]
if len(run) > 1:
process_run(run)
if __name__ == '__main__':
import shutil
from calibre.utils.logging import default_log
default_log.filter_level = default_log.DEBUG
dest_dir = os.path.join(os.getcwdu(), 'docx_input')
if os.path.exists(dest_dir):
shutil.rmtree(dest_dir)
os.mkdir(dest_dir)
Convert(sys.argv[-1], dest_dir=dest_dir, log=default_log)()
|
gpl-3.0
| -1,772,661,482,108,892,200
| 40.554293
| 134
| 0.520009
| false
| 3.87827
| false
| false
| false
|
ltilve/chromium
|
tools/telemetry/telemetry/core/browser_options.py
|
1
|
15478
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import logging
import optparse
import os
import shlex
import socket
import sys
from telemetry.core import browser_finder
from telemetry.core import browser_finder_exceptions
from telemetry.core import device_finder
from telemetry.core import platform
from telemetry.core.platform.profiler import profiler_finder
from telemetry.core import profile_types
from telemetry.core import util
from telemetry.core import wpr_modes
util.AddDirToPythonPath(
util.GetChromiumSrcDir(), 'third_party', 'webpagereplay')
import net_configs # pylint: disable=F0401
class BrowserFinderOptions(optparse.Values):
"""Options to be used for discovering a browser."""
def __init__(self, browser_type=None):
optparse.Values.__init__(self)
self.browser_type = browser_type
self.browser_executable = None
self.chrome_root = None
self.device = None
self.cros_ssh_identity = None
self.extensions_to_load = []
# If set, copy the generated profile to this path on exit.
self.output_profile_path = None
self.cros_remote = None
self.profiler = None
self.verbosity = 0
self.browser_options = BrowserOptions()
self.output_file = None
self.android_rndis = False
self.no_performance_mode = False
def __repr__(self):
return str(sorted(self.__dict__.items()))
def Copy(self):
return copy.deepcopy(self)
def CreateParser(self, *args, **kwargs):
parser = optparse.OptionParser(*args, **kwargs)
# Selection group
group = optparse.OptionGroup(parser, 'Which browser to use')
group.add_option('--browser',
dest='browser_type',
default=None,
help='Browser type to run, '
'in order of priority. Supported values: list,%s' %
','.join(browser_finder.FindAllBrowserTypes(self)))
group.add_option('--browser-executable',
dest='browser_executable',
help='The exact browser to run.')
group.add_option('--chrome-root',
dest='chrome_root',
help='Where to look for chrome builds.'
'Defaults to searching parent dirs by default.')
group.add_option('--device',
dest='device',
help='The device ID to use.'
'If not specified, only 0 or 1 connected devices are supported. If'
'specified as "android", all available Android devices are used.')
group.add_option('--target-arch',
dest='target_arch',
help='The target architecture of the browser. Options available are: '
'x64, x86_64, arm, arm64 and mips. '
'Defaults to the default architecture of the platform if omitted.')
group.add_option(
'--remote',
dest='cros_remote',
help='The hostname of a remote ChromeOS device to use.')
group.add_option(
'--remote-ssh-port',
type=int,
default=socket.getservbyname('ssh'),
dest='cros_remote_ssh_port',
help='The SSH port of the remote ChromeOS device (requires --remote).')
identity = None
testing_rsa = os.path.join(
util.GetChromiumSrcDir(),
'third_party', 'chromite', 'ssh_keys', 'testing_rsa')
if os.path.exists(testing_rsa):
identity = testing_rsa
group.add_option('--identity',
dest='cros_ssh_identity',
default=identity,
help='The identity file to use when ssh\'ing into the ChromeOS device')
parser.add_option_group(group)
# Debugging options
group = optparse.OptionGroup(parser, 'When things go wrong')
profiler_choices = profiler_finder.GetAllAvailableProfilers()
group.add_option(
'--profiler', default=None, type='choice',
choices=profiler_choices,
help='Record profiling data using this tool. Supported values: ' +
', '.join(profiler_choices))
group.add_option(
'-v', '--verbose', action='count', dest='verbosity',
help='Increase verbosity level (repeat as needed)')
group.add_option('--print-bootstrap-deps',
action='store_true',
help='Output bootstrap deps list.')
parser.add_option_group(group)
# Platform options
group = optparse.OptionGroup(parser, 'Platform options')
group.add_option('--no-performance-mode', action='store_true',
help='Some platforms run on "full performance mode" where the '
'test is executed at maximum CPU speed in order to minimize noise '
'(specially important for dashboards / continuous builds). '
'This option prevents Telemetry from tweaking such platform settings.')
group.add_option('--android-rndis', dest='android_rndis', default=False,
action='store_true', help='Use RNDIS forwarding on Android.')
group.add_option('--no-android-rndis', dest='android_rndis',
action='store_false', help='Do not use RNDIS forwarding on Android.'
' [default]')
parser.add_option_group(group)
# Browser options.
self.browser_options.AddCommandLineArgs(parser)
real_parse = parser.parse_args
def ParseArgs(args=None):
defaults = parser.get_default_values()
for k, v in defaults.__dict__.items():
if k in self.__dict__ and self.__dict__[k] != None:
continue
self.__dict__[k] = v
ret = real_parse(args, self) # pylint: disable=E1121
if self.verbosity >= 2:
logging.getLogger().setLevel(logging.DEBUG)
elif self.verbosity:
logging.getLogger().setLevel(logging.INFO)
else:
logging.getLogger().setLevel(logging.WARNING)
if self.device == 'list':
devices = device_finder.GetDevicesMatchingOptions(self)
print 'Available devices:'
for device in devices:
print ' ', device.name
sys.exit(0)
if self.browser_executable and not self.browser_type:
self.browser_type = 'exact'
if self.browser_type == 'list':
devices = device_finder.GetDevicesMatchingOptions(self)
if not devices:
sys.exit(0)
browser_types = {}
for device in devices:
try:
possible_browsers = browser_finder.GetAllAvailableBrowsers(self,
device)
browser_types[device.name] = sorted(
[browser.browser_type for browser in possible_browsers])
except browser_finder_exceptions.BrowserFinderException as ex:
print >> sys.stderr, 'ERROR: ', ex
sys.exit(1)
print 'Available browsers:'
if len(browser_types) == 0:
print ' No devices were found.'
for device_name in sorted(browser_types.keys()):
print ' ', device_name
for browser_type in browser_types[device_name]:
print ' ', browser_type
sys.exit(0)
# Parse browser options.
self.browser_options.UpdateFromParseResults(self)
return ret
parser.parse_args = ParseArgs
return parser
def AppendExtraBrowserArgs(self, args):
self.browser_options.AppendExtraBrowserArgs(args)
def MergeDefaultValues(self, defaults):
for k, v in defaults.__dict__.items():
self.ensure_value(k, v)
class BrowserOptions(object):
"""Options to be used for launching a browser."""
def __init__(self):
self.browser_type = None
self.show_stdout = False
# When set to True, the browser will use the default profile. Telemetry
# will not provide an alternate profile directory.
self.dont_override_profile = False
self.profile_dir = None
self.profile_type = None
self._extra_browser_args = set()
self.extra_wpr_args = []
self.wpr_mode = wpr_modes.WPR_OFF
self.netsim = None
# The amount of time Telemetry should wait for the browser to start.
# This property is not exposed as a command line option.
self._browser_startup_timeout = 30
self.disable_background_networking = True
self.no_proxy_server = False
self.browser_user_agent_type = None
self.clear_sytem_cache_for_browser_and_profile_on_start = False
self.startup_url = 'about:blank'
# Background pages of built-in component extensions can interfere with
# performance measurements.
self.disable_component_extensions_with_background_pages = True
# Disable default apps.
self.disable_default_apps = True
# Whether to use the new code path for choosing an ephemeral port for
# DevTools. The bots set this to true. When Chrome 37 reaches stable,
# remove this setting and the old code path. http://crbug.com/379980
self.use_devtools_active_port = False
def __repr__(self):
return str(sorted(self.__dict__.items()))
def IsCrosBrowserOptions(self):
return False
@classmethod
def AddCommandLineArgs(cls, parser):
############################################################################
# Please do not add any more options here without first discussing with #
# a telemetry owner. This is not the right place for platform-specific #
# options. #
############################################################################
group = optparse.OptionGroup(parser, 'Browser options')
profile_choices = profile_types.GetProfileTypes()
group.add_option('--profile-type',
dest='profile_type',
type='choice',
default='clean',
choices=profile_choices,
help=('The user profile to use. A clean profile is used by default. '
'Supported values: ' + ', '.join(profile_choices)))
group.add_option('--profile-dir',
dest='profile_dir',
help='Profile directory to launch the browser with. '
'A clean profile is used by default')
group.add_option('--extra-browser-args',
dest='extra_browser_args_as_string',
help='Additional arguments to pass to the browser when it starts')
group.add_option('--extra-wpr-args',
dest='extra_wpr_args_as_string',
help=('Additional arguments to pass to Web Page Replay. '
'See third_party/webpagereplay/replay.py for usage.'))
group.add_option('--netsim', default=None, type='choice',
choices=net_configs.NET_CONFIG_NAMES,
help=('Run benchmark under simulated network conditions. '
'Will prompt for sudo. Supported values: ' +
', '.join(net_configs.NET_CONFIG_NAMES)))
group.add_option('--show-stdout',
action='store_true',
help='When possible, will display the stdout of the process')
# This hidden option is to be removed, and the older code path deleted,
# once Chrome 37 reaches Stable. http://crbug.com/379980
group.add_option('--use-devtools-active-port',
action='store_true',
help=optparse.SUPPRESS_HELP)
parser.add_option_group(group)
group = optparse.OptionGroup(parser, 'Compatibility options')
group.add_option('--gtest_output',
help='Ignored argument for compatibility with runtest.py harness')
parser.add_option_group(group)
group = optparse.OptionGroup(parser, 'Synthetic gesture options')
synthetic_gesture_source_type_choices = ['default', 'mouse', 'touch']
group.add_option('--synthetic-gesture-source-type',
dest='synthetic_gesture_source_type',
default='default', type='choice',
choices=synthetic_gesture_source_type_choices,
help='Specify the source type for synthtic gestures. Note that some ' +
'actions only support a specific source type. ' +
'Supported values: ' +
', '.join(synthetic_gesture_source_type_choices))
parser.add_option_group(group)
def UpdateFromParseResults(self, finder_options):
"""Copies our options from finder_options"""
browser_options_list = [
'extra_browser_args_as_string',
'extra_wpr_args_as_string',
'netsim',
'profile_dir',
'profile_type',
'show_stdout',
'synthetic_gesture_source_type',
'use_devtools_active_port',
]
for o in browser_options_list:
a = getattr(finder_options, o, None)
if a is not None:
setattr(self, o, a)
delattr(finder_options, o)
self.browser_type = finder_options.browser_type
if hasattr(self, 'extra_browser_args_as_string'): # pylint: disable=E1101
tmp = shlex.split(
self.extra_browser_args_as_string) # pylint: disable=E1101
self.AppendExtraBrowserArgs(tmp)
delattr(self, 'extra_browser_args_as_string')
if hasattr(self, 'extra_wpr_args_as_string'): # pylint: disable=E1101
tmp = shlex.split(
self.extra_wpr_args_as_string) # pylint: disable=E1101
self.extra_wpr_args.extend(tmp)
delattr(self, 'extra_wpr_args_as_string')
if self.profile_type == 'default':
self.dont_override_profile = True
if self.profile_dir and self.profile_type != 'clean':
logging.critical(
"It's illegal to specify both --profile-type and --profile-dir.\n"
"For more information see: http://goo.gl/ngdGD5")
sys.exit(1)
if self.profile_dir and not os.path.isdir(self.profile_dir):
logging.critical(
"Directory specified by --profile-dir (%s) doesn't exist "
"or isn't a directory.\n"
"For more information see: http://goo.gl/ngdGD5" % self.profile_dir)
sys.exit(1)
if not self.profile_dir:
self.profile_dir = profile_types.GetProfileDir(self.profile_type)
# This deferred import is necessary because browser_options is imported in
# telemetry/telemetry/__init__.py.
finder_options.browser_options = CreateChromeBrowserOptions(self)
@property
def extra_browser_args(self):
return self._extra_browser_args
@property
def browser_startup_timeout(self):
return self._browser_startup_timeout
@browser_startup_timeout.setter
def browser_startup_timeout(self, value):
self._browser_startup_timeout = value
def AppendExtraBrowserArgs(self, args):
if isinstance(args, list):
self._extra_browser_args.update(args)
else:
self._extra_browser_args.add(args)
def CreateChromeBrowserOptions(br_options):
browser_type = br_options.browser_type
if (platform.GetHostPlatform().GetOSName() == 'chromeos' or
(browser_type and browser_type.startswith('cros'))):
return CrosBrowserOptions(br_options)
return br_options
class ChromeBrowserOptions(BrowserOptions):
"""Chrome-specific browser options."""
def __init__(self, br_options):
super(ChromeBrowserOptions, self).__init__()
# Copy to self.
self.__dict__.update(br_options.__dict__)
class CrosBrowserOptions(ChromeBrowserOptions):
"""ChromeOS-specific browser options."""
def __init__(self, br_options):
super(CrosBrowserOptions, self).__init__(br_options)
# Create a browser with oobe property.
self.create_browser_with_oobe = False
# Clear enterprise policy before logging in.
self.clear_enterprise_policy = True
# Disable GAIA/enterprise services.
self.disable_gaia_services = True
self.auto_login = True
self.gaia_login = False
self.username = 'test@test.test'
self.password = ''
def IsCrosBrowserOptions(self):
return True
|
bsd-3-clause
| 3,075,533,926,595,646,500
| 35.764846
| 80
| 0.645238
| false
| 4.03283
| true
| false
| false
|
redreamality/broca
|
broca/tokenize/keyword/pos.py
|
1
|
2250
|
"""
A naive keyword extractor which just pulls out nouns and noun phrases.
Was using the PerceptronTagger is _way_ faster than NLTK's default tagger, and more accurate to boot.
See <http://stevenloria.com/tutorial-state-of-the-art-part-of-speech-tagging-in-textblob/>.
However, it complicates the library's installation, and the spacy tagger is quite fast and good too.
"""
from broca.common.shared import spacy
from broca.tokenize import Tokenizer
from broca.tokenize.util import prune
CFG = {
('NNP', 'NNP'): 'NNP',
('NN', 'NN'): 'NNI',
('NNI', 'NN'): 'NNI',
('JJ', 'JJ'): 'JJ',
('JJ', 'NN'): 'NNI',
}
class POS(Tokenizer):
def tokenize(self, docs):
tags = ['NN', 'NNS', 'NNP', 'NNPS']
keywords = []
for doc in docs:
toks = spacy(doc, tag=True, parse=False, entity=False)
tagged = [(t.lower_.strip(), t.tag_) for t in toks]
kws = [t for t, tag in tagged if tag in tags]
kws += extract_noun_phrases(tagged)
keywords.append(kws)
return prune(keywords)
def extract_noun_phrases(tagged_doc):
"""
(From textblob)
"""
tags = _normalize_tags(tagged_doc)
merge = True
while merge:
merge = False
for x in range(0, len(tags) - 1):
t1 = tags[x]
t2 = tags[x + 1]
key = t1[1], t2[1]
value = CFG.get(key, '')
if value:
merge = True
tags.pop(x)
tags.pop(x)
match = '%s %s' % (t1[0], t2[0])
pos = value
tags.insert(x, (match, pos))
break
matches = [t[0] for t in tags if t[1] in ['NNP', 'NNI']]
return matches
def _normalize_tags(chunk):
"""
(From textblob)
Normalize the corpus tags.
("NN", "NN-PL", "NNS") -> "NN"
"""
ret = []
for word, tag in chunk:
if tag == 'NP-TL' or tag == 'NP':
ret.append((word, 'NNP'))
continue
if tag.endswith('-TL'):
ret.append((word, tag[:-3]))
continue
if tag.endswith('S'):
ret.append((word, tag[:-1]))
continue
ret.append((word, tag))
return ret
|
mit
| -5,785,700,199,775,519,000
| 26.439024
| 101
| 0.519556
| false
| 3.343239
| false
| false
| false
|
psiinon/addons-server
|
src/olympia/discovery/management/commands/extract_content_strings.py
|
1
|
3587
|
# -*- coding: utf-8 -*-
import json
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import requests
import olympia.core.logger
log = olympia.core.logger.getLogger('z.discovery.extract_content_strings')
class BaseAPIParser():
def get_results_content(self):
results = self.fetch_strings_from_api()
log.info(f'Building "{self.l10n_comment}" strings.')
return '\n'.join(
self.build_output_for_item(item) for item in results)
def fetch_strings_from_api(self):
log.info(f'Fetching {self.l10n_comment} from the API.')
response = requests.get(self.api)
if response.status_code != 200:
raise CommandError(f'Fetching {self.l10n_comment} failed.')
return json.loads(response.content)['results']
def _get_item(self, item, field):
# A sub field is selected with "." e.g. addon.authors.name
fields = field.split('.', maxsplit=1)
sub_item = item.get(fields[0])
if len(fields) == 1 or not sub_item:
# Easy case, no subfields or empty/missing already.
return sub_item
if isinstance(sub_item, list):
# It's a list, but we're selecting sub fields so iterate through.
values = []
for sub_sub in sub_item:
value = self._get_item(sub_sub, fields[1])
# we don't want lists of lists, so flatten along the way
if isinstance(value, list):
values.extend(value)
else:
values.append(value)
return values
else:
# We just need to select the item from a sub field.
return self._get_item(sub_item, fields[1])
def build_output_for_item(self, item):
output = []
for field in self.fields:
values = self._get_item(item, field)
if not isinstance(values, list):
values = [values]
for value in values:
if value:
output.append(self.build_output_for_single_value(value))
return ''.join(output)
def build_output_for_single_value(self, value):
output = (
'{# L10n: %s #}\n'
'{%% trans %%}%s{%% endtrans %%}\n' % (self.l10n_comment, value))
return output
class DiscoItemAPIParser(BaseAPIParser):
api = settings.DISCOVERY_EDITORIAL_CONTENT_API
l10n_comment = 'editorial content for the discovery pane.'
fields = ('custom_heading', 'custom_description')
class SecondaryHeroShelfAPIParser(BaseAPIParser):
api = settings.SECONDARY_HERO_EDITORIAL_CONTENT_API
l10n_comment = 'editorial content for the secondary hero shelves.'
fields = ('headline', 'description', 'cta.text', 'modules.description',
'modules.cta.text')
class Command(BaseCommand):
help = ('Extract editorial disco pane and secondary hero shelf content '
'that need to be translated.')
def handle(self, *args, **options):
disco = DiscoItemAPIParser()
secondary_hero = SecondaryHeroShelfAPIParser()
results_content = (
disco.get_results_content() + '\n' +
secondary_hero.get_results_content())
self.generate_file_from_api_results(results_content)
def generate_file_from_api_results(self, results_content):
log.info('Writing Editorial content strings file.')
with open(settings.EDITORIAL_CONTENT_FILENAME, 'wb') as f:
f.write(results_content.encode('utf-8'))
|
bsd-3-clause
| -8,826,200,749,400,742,000
| 35.979381
| 77
| 0.609702
| false
| 3.836364
| false
| false
| false
|
plamut/ggrc-core
|
test/selenium/src/lib/constants/url.py
|
2
|
1194
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Constants for URLs construction."""
# pylint: disable=wildcard-import
# pylint: disable=unused-wildcard-import
from lib.constants.objects import * # noqa; the names are later exported
# URL's parts for objects
API = "api"
DASHBOARD = "dashboard"
ADMIN_DASHBOARD = "admin"
AUDIT = AUDITS + "/{0}"
RELATIONSHIPS = "relationships"
OBJECT_OWNERS = "object_owners"
CONTACTS = "contacts"
QUERY = "query"
# url path for user
DEFAULT_EMAIL_DOMAIN = "example.com"
DEFAULT_USER_EMAIL = "user@" + DEFAULT_EMAIL_DOMAIN
DEFAULT_USER_HREF = "/".join([API, PEOPLE, str(1)])
class Widget(object):
"""URL's parts for widgets."""
# pylint: disable=too-few-public-methods
# admin dashboard page
CUSTOM_ATTRIBUTES = "#custom_attribute_widget"
EVENTS = "#events_list_widget"
ROLES = "#roles_list_widget"
PEOPLE = "#people_list_widget"
# widgets
INFO = "#info_widget"
AUDITS = "#audit_widget"
ASSESSMENTS = "#assessment_widget"
ASSESSMENT_TEMPLATES = "#assessment_template_widget"
CONTROLS = "#control_widget"
ISSUES = "#issue_widget"
PROGRAMS = "#program_widget"
|
apache-2.0
| -1,714,638,477,626,481,400
| 28.121951
| 78
| 0.706868
| false
| 3.192513
| false
| false
| false
|
pytest-dev/py
|
py/_path/common.py
|
4
|
14818
|
"""
"""
import warnings
import os
import sys
import posixpath
import fnmatch
import py
# Moved from local.py.
iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
try:
# FileNotFoundError might happen in py34, and is not available with py27.
import_errors = (ImportError, FileNotFoundError)
except NameError:
import_errors = (ImportError,)
try:
from os import fspath
except ImportError:
def fspath(path):
"""
Return the string representation of the path.
If str or bytes is passed in, it is returned unchanged.
This code comes from PEP 519, modified to support earlier versions of
python.
This is required for python < 3.6.
"""
if isinstance(path, (py.builtin.text, py.builtin.bytes)):
return path
# Work from the object's type to match method resolution of other magic
# methods.
path_type = type(path)
try:
return path_type.__fspath__(path)
except AttributeError:
if hasattr(path_type, '__fspath__'):
raise
try:
import pathlib
except import_errors:
pass
else:
if isinstance(path, pathlib.PurePath):
return py.builtin.text(path)
raise TypeError("expected str, bytes or os.PathLike object, not "
+ path_type.__name__)
class Checkers:
_depend_on_existence = 'exists', 'link', 'dir', 'file'
def __init__(self, path):
self.path = path
def dir(self):
raise NotImplementedError
def file(self):
raise NotImplementedError
def dotfile(self):
return self.path.basename.startswith('.')
def ext(self, arg):
if not arg.startswith('.'):
arg = '.' + arg
return self.path.ext == arg
def exists(self):
raise NotImplementedError
def basename(self, arg):
return self.path.basename == arg
def basestarts(self, arg):
return self.path.basename.startswith(arg)
def relto(self, arg):
return self.path.relto(arg)
def fnmatch(self, arg):
return self.path.fnmatch(arg)
def endswith(self, arg):
return str(self.path).endswith(arg)
def _evaluate(self, kw):
for name, value in kw.items():
invert = False
meth = None
try:
meth = getattr(self, name)
except AttributeError:
if name[:3] == 'not':
invert = True
try:
meth = getattr(self, name[3:])
except AttributeError:
pass
if meth is None:
raise TypeError(
"no %r checker available for %r" % (name, self.path))
try:
if py.code.getrawcode(meth).co_argcount > 1:
if (not meth(value)) ^ invert:
return False
else:
if bool(value) ^ bool(meth()) ^ invert:
return False
except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
# EBUSY feels not entirely correct,
# but its kind of necessary since ENOMEDIUM
# is not accessible in python
for name in self._depend_on_existence:
if name in kw:
if kw.get(name):
return False
name = 'not' + name
if name in kw:
if not kw.get(name):
return False
return True
class NeverRaised(Exception):
pass
class PathBase(object):
""" shared implementation for filesystem path objects."""
Checkers = Checkers
def __div__(self, other):
return self.join(fspath(other))
__truediv__ = __div__ # py3k
def basename(self):
""" basename part of path. """
return self._getbyspec('basename')[0]
basename = property(basename, None, None, basename.__doc__)
def dirname(self):
""" dirname part of path. """
return self._getbyspec('dirname')[0]
dirname = property(dirname, None, None, dirname.__doc__)
def purebasename(self):
""" pure base name of the path."""
return self._getbyspec('purebasename')[0]
purebasename = property(purebasename, None, None, purebasename.__doc__)
def ext(self):
""" extension of the path (including the '.')."""
return self._getbyspec('ext')[0]
ext = property(ext, None, None, ext.__doc__)
def dirpath(self, *args, **kwargs):
""" return the directory path joined with any given path arguments. """
return self.new(basename='').join(*args, **kwargs)
def read_binary(self):
""" read and return a bytestring from reading the path. """
with self.open('rb') as f:
return f.read()
def read_text(self, encoding):
""" read and return a Unicode string from reading the path. """
with self.open("r", encoding=encoding) as f:
return f.read()
def read(self, mode='r'):
""" read and return a bytestring from reading the path. """
with self.open(mode) as f:
return f.read()
def readlines(self, cr=1):
""" read and return a list of lines from the path. if cr is False, the
newline will be removed from the end of each line. """
if sys.version_info < (3, ):
mode = 'rU'
else: # python 3 deprecates mode "U" in favor of "newline" option
mode = 'r'
if not cr:
content = self.read(mode)
return content.split('\n')
else:
f = self.open(mode)
try:
return f.readlines()
finally:
f.close()
def load(self):
""" (deprecated) return object unpickled from self.read() """
f = self.open('rb')
try:
import pickle
return py.error.checked_call(pickle.load, f)
finally:
f.close()
def move(self, target):
""" move this path to target. """
if target.relto(self):
raise py.error.EINVAL(
target,
"cannot move path into a subdirectory of itself")
try:
self.rename(target)
except py.error.EXDEV: # invalid cross-device link
self.copy(target)
self.remove()
def __repr__(self):
""" return a string representation of this path. """
return repr(str(self))
def check(self, **kw):
""" check a path for existence and properties.
Without arguments, return True if the path exists, otherwise False.
valid checkers::
file=1 # is a file
file=0 # is not a file (may not even exist)
dir=1 # is a dir
link=1 # is a link
exists=1 # exists
You can specify multiple checker definitions, for example::
path.check(file=1, link=1) # a link pointing to a file
"""
if not kw:
kw = {'exists': 1}
return self.Checkers(self)._evaluate(kw)
def fnmatch(self, pattern):
"""return true if the basename/fullname matches the glob-'pattern'.
valid pattern characters::
* matches everything
? matches any single character
[seq] matches any character in seq
[!seq] matches any char not in seq
If the pattern contains a path-separator then the full path
is used for pattern matching and a '*' is prepended to the
pattern.
if the pattern doesn't contain a path-separator the pattern
is only matched against the basename.
"""
return FNMatcher(pattern)(self)
def relto(self, relpath):
""" return a string which is the relative part of the path
to the given 'relpath'.
"""
if not isinstance(relpath, (str, PathBase)):
raise TypeError("%r: not a string or path object" %(relpath,))
strrelpath = str(relpath)
if strrelpath and strrelpath[-1] != self.sep:
strrelpath += self.sep
#assert strrelpath[-1] == self.sep
#assert strrelpath[-2] != self.sep
strself = self.strpath
if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
if os.path.normcase(strself).startswith(
os.path.normcase(strrelpath)):
return strself[len(strrelpath):]
elif strself.startswith(strrelpath):
return strself[len(strrelpath):]
return ""
def ensure_dir(self, *args):
""" ensure the path joined with args is a directory. """
return self.ensure(*args, **{"dir": True})
def bestrelpath(self, dest):
""" return a string which is a relative path from self
(assumed to be a directory) to dest such that
self.join(bestrelpath) == dest and if not such
path can be determined return dest.
"""
try:
if self == dest:
return os.curdir
base = self.common(dest)
if not base: # can be the case on windows
return str(dest)
self2base = self.relto(base)
reldest = dest.relto(base)
if self2base:
n = self2base.count(self.sep) + 1
else:
n = 0
l = [os.pardir] * n
if reldest:
l.append(reldest)
target = dest.sep.join(l)
return target
except AttributeError:
return str(dest)
def exists(self):
return self.check()
def isdir(self):
return self.check(dir=1)
def isfile(self):
return self.check(file=1)
def parts(self, reverse=False):
""" return a root-first list of all ancestor directories
plus the path itself.
"""
current = self
l = [self]
while 1:
last = current
current = current.dirpath()
if last == current:
break
l.append(current)
if not reverse:
l.reverse()
return l
def common(self, other):
""" return the common part shared with the other path
or None if there is no common part.
"""
last = None
for x, y in zip(self.parts(), other.parts()):
if x != y:
return last
last = x
return last
def __add__(self, other):
""" return new path object with 'other' added to the basename"""
return self.new(basename=self.basename+str(other))
def __cmp__(self, other):
""" return sort value (-1, 0, +1). """
try:
return cmp(self.strpath, other.strpath)
except AttributeError:
return cmp(str(self), str(other)) # self.path, other.path)
def __lt__(self, other):
try:
return self.strpath < other.strpath
except AttributeError:
return str(self) < str(other)
def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
""" yields all paths below the current one
fil is a filter (glob pattern or callable), if not matching the
path will not be yielded, defaulting to None (everything is
returned)
rec is a filter (glob pattern or callable) that controls whether
a node is descended, defaulting to None
ignore is an Exception class that is ignoredwhen calling dirlist()
on any of the paths (by default, all exceptions are reported)
bf if True will cause a breadthfirst search instead of the
default depthfirst. Default: False
sort if True will sort entries within each directory level.
"""
for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
yield x
def _sortlist(self, res, sort):
if sort:
if hasattr(sort, '__call__'):
warnings.warn(DeprecationWarning(
"listdir(sort=callable) is deprecated and breaks on python3"
), stacklevel=3)
res.sort(sort)
else:
res.sort()
def samefile(self, other):
""" return True if other refers to the same stat object as self. """
return self.strpath == str(other)
def __fspath__(self):
return self.strpath
class Visitor:
def __init__(self, fil, rec, ignore, bf, sort):
if isinstance(fil, py.builtin._basestring):
fil = FNMatcher(fil)
if isinstance(rec, py.builtin._basestring):
self.rec = FNMatcher(rec)
elif not hasattr(rec, '__call__') and rec:
self.rec = lambda path: True
else:
self.rec = rec
self.fil = fil
self.ignore = ignore
self.breadthfirst = bf
self.optsort = sort and sorted or (lambda x: x)
def gen(self, path):
try:
entries = path.listdir()
except self.ignore:
return
rec = self.rec
dirs = self.optsort([p for p in entries
if p.check(dir=1) and (rec is None or rec(p))])
if not self.breadthfirst:
for subdir in dirs:
for p in self.gen(subdir):
yield p
for p in self.optsort(entries):
if self.fil is None or self.fil(p):
yield p
if self.breadthfirst:
for subdir in dirs:
for p in self.gen(subdir):
yield p
class FNMatcher:
def __init__(self, pattern):
self.pattern = pattern
def __call__(self, path):
pattern = self.pattern
if (pattern.find(path.sep) == -1 and
iswin32 and
pattern.find(posixpath.sep) != -1):
# Running on Windows, the pattern has no Windows path separators,
# and the pattern has one or more Posix path separators. Replace
# the Posix path separators with the Windows path separator.
pattern = pattern.replace(posixpath.sep, path.sep)
if pattern.find(path.sep) == -1:
name = path.basename
else:
name = str(path) # path.strpath # XXX svn?
if not os.path.isabs(pattern):
pattern = '*' + path.sep + pattern
return fnmatch.fnmatch(name, pattern)
|
mit
| -7,689,056,048,696,364,000
| 31.283224
| 82
| 0.537522
| false
| 4.407496
| false
| false
| false
|
KeepSafe/zendesk-helpcenter-cms
|
src/test/fixtures/__init__.py
|
1
|
1176
|
import model
def simple_category():
category = model.Category('category', 'category desc', 'category')
category.meta = {'id': 'category id', 'webtranslateit_ids': {'content': 'category translate id'}}
section = model.Section(category, 'section', 'section desc', 'section')
section.meta = {'id': 'section id', 'webtranslateit_ids': {'content': 'section translate id'}}
article = model.Article(section, 'article', 'body', 'article')
article.meta = {'id': 'article id',
'webtranslateit_ids': {'body': 'body translate id', 'content': 'article translate id'}}
category.sections.append(section)
section.articles.append(article)
return category
def category_with_translations():
category = simple_category()
group_translation = model.GroupTranslation('pl', 'dummy translation name', 'dummy translation description')
category.translations.append(group_translation)
category.sections[0].translations.append(group_translation)
article_translation = model.ArticleTranslation('pl', 'dummy name', 'dummy body')
category.sections[0].articles[0].translations.append(article_translation)
return category
|
apache-2.0
| 9,108,598,873,475,510,000
| 48
| 111
| 0.69983
| false
| 4.2
| false
| false
| false
|
Ray1235/CoDMayaTools
|
CoDMayaTools.py
|
1
|
161584
|
# Copyright 2016, Ray1235
# CoDMayaTools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------------------
#
# Change log now available on Github!
#
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------- Customization (You can change these values!) ----------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# Maximum number of warnings to show per export
MAX_WARNINGS_SHOWN = 1
# Number of slots in the export windows
EXPORT_WINDOW_NUMSLOTS = 100
# To export any black vertices as white, set to 'True'. Otherwise, set to 'False'.
CONVERT_BLACK_VERTS_TO_WHITE = False
# Enable Support for ExportX/Export2Bin
USE_EXPORT_X = False
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------- Global ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
import os
import maya.cmds as cmds
import maya.mel as mel
import math
import sys
import datetime
import os.path
import traceback
import maya.OpenMaya as OpenMaya
import maya.OpenMayaAnim as OpenMayaAnim
import urllib2
import socket
import subprocess
import webbrowser
import Queue
import _winreg as reg
import time
import struct
import shutil
import zipfile
import re
import json
from PyCoD import xmodel as xModel
from PyCoD import xanim as xAnim
from array import array
from subprocess import Popen, PIPE, STDOUT
WarningsDuringExport = 0 # Number of warnings shown during current export
CM_TO_INCH = 0.3937007874015748031496062992126 # 1cm = 50/127in
M_PI = 3.14159265359
FILE_VERSION = 2.9
VERSION_CHECK_URL = "https://raw.githubusercontent.com/Ray1235/CoDMayaTools/master/version"
# Registry path for global data storage
GLOBAL_STORAGE_REG_KEY = (reg.HKEY_CURRENT_USER, "Software\\CoDMayaTools")
# name : control code name, control friendly name, data storage node name, refresh function, export function
OBJECT_NAMES = {'menu' : ["CoDMayaToolsMenu", "Call of Duty Tools", None, None, None],
'progress' : ["CoDMayaToolsProgressbar", "Progress", None, None, None],
'xmodel': ["CoDMayaXModelExportWindow", "Export XModel", "XModelExporterInfo", "RefreshXModelWindow", "ExportXModel"],
'xanim' : ["CoDMayaXAnimExportWindow", "Export XAnim", "XAnimExporterInfo", "RefreshXAnimWindow", "ExportXAnim"],
'xcam' : ["CoDMayaXCamExportWindow", "Export XCam", "XCamExporterInfo", "RefreshXCamWindow", "ExportXCam"]}
# Working Directory
WORKING_DIR = os.path.dirname(os.path.realpath(__file__))
# Current Game
currentGame = "none"
# Format (JOINT, PARENTNAME) : NEWNAME
# Leave parent to None to rename regardless.
RENAME_DICTONARY = {("tag_weapon", "tag_torso") : "tag_weapon_right",
("tag_weapon1", "tag_torso") : "tag_weapon_left",
("j_gun", None) : "tag_weapon",
("j_gun1", None) : "tag_weapon_le",
("tag_flash1", "j_gun1") : "tag_flash_le",
("tag_brass1", None) : "tag_brass_le",
}
# Tags to attach
GUN_BASE_TAGS = ["j_gun", "j_gun1", "j_gun", "j_gun1", "tag_weapon", "tag_weapon1"]
VIEW_HAND_TAGS = ["t7:tag_weapon_right", "t7:tag_weapon_left", "tag_weapon", "tag_weapon1", "tag_weapon_right", "tag_weapon_left"]
# Supported xModel Versions for importing.
SUPPORTED_XMODELS = [25, 62]
# xModel Versions based off games
XMODEL_VERSION = {
"CoD1" : 5,
"CoD2" : 6,
"CoD4" : 6,
"CoD5" : 6,
"CoD7" : 6,
"CoD12": 7
}
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------------ Init ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateMenu():
cmds.setParent(mel.eval("$temp1=$gMainWindow"))
if cmds.control(OBJECT_NAMES['menu'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['menu'][0], menu=True)
menu = cmds.menu(OBJECT_NAMES['menu'][0],
label=OBJECT_NAMES["menu"][1],tearOff=True)
# Export tools
cmds.menuItem(label=OBJECT_NAMES['xmodel'][1]+"...",
command="CoDMayaTools.ShowWindow('xmodel')")
cmds.menuItem(label=OBJECT_NAMES['xanim'][1]+"...",
command="CoDMayaTools.ShowWindow('xanim')")
cmds.menuItem(label=OBJECT_NAMES['xcam'][1]+"...",
command="CoDMayaTools.ShowWindow('xcam')")
# Import tools
cmds.menuItem(divider=True)
cmds.menuItem(label="Import XModel...",
subMenu=True)
cmds.menuItem(label="...from CoD7",
command="CoDMayaTools.ImportXModel('CoD7')")
cmds.menuItem(label="...from CoD5",
command="CoDMayaTools.ImportXModel('CoD5')")
cmds.menuItem(label="...from CoD4",
command="CoDMayaTools.ImportXModel('CoD4')")
cmds.setParent(menu,
menu=True)
cmds.menuItem(divider=True)
# Utilities Menu
util_menu = cmds.menuItem(label="Utilities",
subMenu=True)
cmds.menuItem(divider=True)
# Rays Animation Toolkit
cmds.menuItem(label="Ray's Camera Animation Toolkit",
subMenu=True)
cmds.menuItem(label="Mark as camera",
command="CoDMayaTools.setObjectAlias('camera')")
cmds.menuItem(label="Mark as weapon",
command="CoDMayaTools.setObjectAlias('weapon')")
cmds.menuItem(divider=True)
cmds.menuItem(label="Generate camera animation",
command="CoDMayaTools.GenerateCamAnim()")
cmds.menuItem(divider=True)
cmds.menuItem(label="Remove camera animation in current range",
command=RemoveCameraKeys)
cmds.menuItem(label="Reset camera",
command=RemoveCameraAnimData)
cmds.setParent(util_menu,
menu=True)
# Attach Weapon To Rig
cmds.menuItem(divider=True)
cmds.menuItem(label="Attach Weapon to Rig", command=lambda x:WeaponBinder())
# IWIxDDS
cmds.menuItem(divider=True)
cmds.menuItem(label="Convert IWI to DDS",
command=lambda x:IWIToDDSUser())
# Viewmodel Tools
cmds.menuItem(label="ViewModel Tools", subMenu=True)
cmds.menuItem(label="Create New Gunsleeve Maya File",
command=lambda x:CreateNewGunsleeveMayaFile())
cmds.menuItem(label="Create New ViewModel Rig File",
command=lambda x:CreateNewViewmodelRigFile())
cmds.menuItem(label="Switch Gun in Current Rig File",
command=lambda x:SwitchGunInCurrentRigFile())
cmds.setParent(menu, menu=True)
# Settings
cmds.menuItem(divider=True)
settings_menu = cmds.menuItem(label="Settings", subMenu=True)
# Game/Game Folder Settings
cmds.menuItem(label="Game Settings", subMenu=True)
cmds.menuItem(label="Set CoD 1 Root Folder", command=lambda x:SetRootFolder(None, 'CoD1'))
cmds.menuItem(label="Set CoD 2 Root Folder", command=lambda x:SetRootFolder(None, 'CoD2'))
cmds.menuItem(label="Set MW Root Folder", command=lambda x:SetRootFolder(None, 'CoD4'))
cmds.menuItem(label="Set WaW Root Folder", command=lambda x:SetRootFolder(None, 'CoD5'))
cmds.menuItem(label="Set Bo1 Root Folder", command=lambda x:SetRootFolder(None, 'CoD7'))
cmds.menuItem(label="Set Bo3 Root Folder", command=lambda x:SetRootFolder(None, 'CoD12'))
cmds.menuItem(divider=True)
cmds.radioMenuItemCollection()
games = GetCurrentGame(True)
cmds.menuItem( label="Current Game:")
cmds.menuItem( label='CoD 1', radioButton=games["CoD1"], command=lambda x:SetCurrentGame("CoD1"))
cmds.menuItem( label='CoD 2', radioButton=games["CoD2"], command=lambda x:SetCurrentGame("CoD2"))
cmds.menuItem( label='CoD MW', radioButton=games["CoD4"], command=lambda x:SetCurrentGame("CoD4"))
cmds.menuItem( label='CoD WaW', radioButton=games["CoD5"], command=lambda x:SetCurrentGame("CoD5"))
cmds.menuItem( label='CoD Bo1', radioButton=games["CoD7"], command=lambda x:SetCurrentGame("CoD7"))
cmds.menuItem( label='CoD Bo3', radioButton=games["CoD12"] , command=lambda x:SetCurrentGame("CoD12"))
cmds.setParent(settings_menu, menu=True)
# ExportX/Export2Bin Options (Deprecated)
if(USE_EXPORT_X):
cmds.menuItem("E2B", label='Use ExportX', checkBox=QueryToggableOption('E2B'), command=lambda x:SetToggableOption('E2B') )
cmds.menuItem(label="Set Path to ExportX", command=lambda x:SetExport2Bin())
# Misc. Options.
cmds.menuItem(divider=True)
cmds.menuItem("AutomaticRename", label='Automatically rename joints (J_GUN, etc.)', checkBox=QueryToggableOption('AutomaticRename'), command=lambda x:SetToggableOption('AutomaticRename') )
cmds.menuItem("PrefixNoteType", label='Automatically prefix notetracks with type (sndnt# or rmbnt#)', checkBox=QueryToggableOption('PrefixNoteType'), command=lambda x:SetToggableOption('PrefixNoteType') )
cmds.menuItem("MeshMerge", label='Merge Meshes on export', checkBox=QueryToggableOption('MeshMerge'), command=lambda x:SetToggableOption('MeshMerge') )
cmds.menuItem("AutoUpdate", label='Auto Updates', checkBox=QueryToggableOption('AutoUpdate'), command=lambda x:SetToggableOption('AutoUpdate') )
# cmds.menuItem("PrintExport", label='Print xmodel_export information.', checkBox=QueryToggableOption('PrintExport'), command=lambda x:SetToggableOption('PrintExport')" )
cmds.setParent(menu, menu=True)
cmds.menuItem(divider=True)
# For easy script updating
cmds.menuItem(label="Reload Script", command="reload(CoDMayaTools)")
# Tools Info
cmds.menuItem(label="About", command=lambda x:AboutWindow())
def SetCurrentGame(game=None):
if game is None:
return
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
except WindowsError:
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
reg.SetValueEx(storageKey, "CurrentGame", 0, reg.REG_SZ, game )
def GetCurrentGame(return_dict=False):
games = {
"CoD1" : False,
"CoD2" : False,
"CoD4" : False,
"CoD5" : False,
"CoD7" : False,
"CoD12" : False
}
# Try get the current game set.
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
game = reg.QueryValueEx(storageKey, "CurrentGame")[0]
except WindowsError:
# Failed, create it and fall back to Bo3
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
try:
reg.SetValueEx(storageKey, "CurrentGame", 0, reg.REG_SZ , 0 ,"CoD12")
game = reg.QueryValueEx(storageKey, "CurrentGame")[0]
except:
# Fall back to Black Ops III if a game isn't set, and we can't create one.
game = "CoD12"
games[game] = True
# Return dictonary for radio buttons
if return_dict:
return games
# Return current game for everything else
else:
return game
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------- Import Common --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ImportFileSelectDialog(codRootPath, type):
print(codRootPath)
importFrom = None
if cmds.about(version=True)[:4] == "2012": # There is a bug in later versions of Maya with the file browser dialog and files with no extension
importFrom = cmds.fileDialog2(fileMode=1, fileFilter="%s Files (*)" % type, caption="Import %s" % type, startingDirectory=os.path.join(codRootPath, "raw/%s/" % type.lower()))
else:
importFrom = cmds.fileDialog2(fileMode=1, dialogStyle=1, fileFilter="%s Files (*)" % type, caption="Import %s" % type, startingDirectory=os.path.join(codRootPath, "raw/%s/" % type.lower()))
if importFrom == None or len(importFrom) == 0 or importFrom[0].strip() == "":
return None
path = importFrom[0].strip()
pathSplit = os.path.splitext(path) # Fix bug with Maya 2013
if pathSplit[1] == ".*":
path = pathSplit
return path
def UnitQuaternionToDegrees(x, y, z):
w = math.sqrt(1 - x*x - y*y - z*z) # The 4th component of a quaternion can be found from the other 3 components in unit quaternions
euler = OpenMaya.MQuaternion(x, y, z, w).asEulerRotation()
return (math.degrees(euler.x), math.degrees(euler.y), math.degrees(euler.z))
def ReadJointRotation(f):
rot = struct.unpack('<hhh', f.read(6))
# Rotation is stored as a unit quaternion, but only the X, Y, and Z components are given, as integers scaled to -32768 to 32767
rot = UnitQuaternionToDegrees(rot[0] / 32768.0, rot[1] / 32768.0, rot[2] / 32768.0)
return rot
def ReadNullTerminatedString(f):
byte = f.read(1)
string = ""
while struct.unpack('B', byte)[0] != 0:
string += byte
byte = f.read(1)
return string
def AutoCapsJointName(name):
if name.startswith("tag"):
return name.upper()
name = name.capitalize()
name = name.replace("_le_", "_LE_")
name = name.replace("_ri_", "_RI_")
if name[-3:] == "_le":
name = name[:-3] + "_LE"
if name[-3:] == "_ri":
name = name[:-3] + "_RI"
# Capitalize the letter after each underscore
indices = set([m.start() for m in re.finditer("_", name)])
return "".join(c.upper() if (i-1) in indices else c for i, c in enumerate(name))
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Import XAnim --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ImportXAnim(game):
codRootPath = GetRootFolder(None, game) # Only call this once, because it might create a dialog box
xanimPath = ImportFileSelectDialog(codRootPath, "XAnim")
if not xanimPath:
return
print("Importing XAnim '%s'" % os.path.basename(xanimPath))
with open(xanimPath, "rb") as f:
# Check file version
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] != 17:
MessageBox("ERROR: Not a valid XAnim file")
return
# Header
numFrames = struct.unpack('<H', f.read(2))[0]
numJoints = struct.unpack('<H', f.read(2))[0]
fileInfoBitfield = struct.unpack('<H', f.read(2))[0]
framerate = struct.unpack('<H', f.read(2))[0]
# Get anim type as string
animType = "absolute"
if fileInfoBitfield & 2:
animType = "delta"
elif fileInfoBitfield & 256:
animType = "relative"
elif fileInfoBitfield & 1024:
animType = "additive"
# ???
if animType == "absolute":
f.read(2) # ???
else:
print("Cannot read anim type '%s'" % animType)
return
# Read joint names
joints = []
for i in range(numJoints):
joints.append(ReadNullTerminatedString(f))
print joints
# Read joint frame data
for i in range(numJoints):
numRotations = struct.unpack('<H', f.read(2))[0]
for j in range(numRotations):
rot = ReadJointRotation(f)
numPositions = struct.unpack('<H', f.read(2))[0]
for j in range(numPositions):
pos = struct.unpack('<fff', f.read(12))
print pos
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Import XModel -------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ImportXModel(game):
codRootPath = GetRootFolder(None, game) # Only call this once, because it might create a dialog box
xmodelPath = ImportFileSelectDialog(codRootPath, "XModel")
if not xmodelPath:
return
# Show progress bar
if cmds.control("w"+OBJECT_NAMES['progress'][0], exists=True):
cmds.deleteUI("w"+OBJECT_NAMES['progress'][0])
progressWindow = cmds.window("w"+OBJECT_NAMES['progress'][0], title=OBJECT_NAMES['progress'][1], width=302, height=22)
cmds.columnLayout()
progressControl = cmds.progressBar(OBJECT_NAMES['progress'][0], width=300, progress=0)
cmds.showWindow(progressWindow)
cmds.refresh() # Force the progress bar to be drawn
try:
print("Importing XModel '%s'" % os.path.basename(xmodelPath))
with open(xmodelPath, "rb") as f:
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] not in SUPPORTED_XMODELS:
MessageBox("ERROR: Not a valid XModel file")
print("")
if game == "CoD4":
f.read(25) # ???
ReadNullTerminatedString(f)
elif game == "CoD5":
f.read(26) # ???
ReadNullTerminatedString(f)
elif game == "CoD7":
f.read(28) # ???
ReadNullTerminatedString(f)
ReadNullTerminatedString(f)
f.read(5)
print(f.tell())
lods = []
for i in range(4): # 4 is possible number of lods
someInt = struct.unpack('<I', f.read(4))
lodFileName = ReadNullTerminatedString(f)
if lodFileName != "":
lods.append({"name":lodFileName})
if len(lods) == 0:
MessageBox("ERROR: This XModel has no data (no LOD files)!")
return
f.read(4) # Spacer if next int isn't 0, otherwise ???
count = struct.unpack('<I', f.read(4))[0]
print(count)
for i in range(count):
subcount = struct.unpack('<I', f.read(4))[0]
f.read((subcount * 48) + 36) # ???
for lod in lods:
materials = []
numMaterials = struct.unpack('<H', f.read(2))[0]
for i in range(numMaterials):
materials.append(ReadNullTerminatedString(f))
lod["materials"] = materials
# Load joint data (24 bytes per joint) ???
lodToLoad = lods[0]
if len(lods) > 1:
buttons = []
lodDict = {}
for lod in lods:
buttons.append(lod["name"])
lodDict[lod["name"]] = lod
buttons.sort()
result = cmds.confirmDialog(title="Select LOD level", message="This model has more than one LOD level. Select which one to import:", button=buttons, defaultButton=buttons[0], dismissString="EXIT")
if result in lodDict:
lodToLoad = lodDict[result]
lodToLoad["transformGroup"] = cmds.group(empty=True, name=lodToLoad["name"])
lodToLoad["materialMaps"] = LoadMaterials(lodToLoad, codRootPath)
lodToLoad["joints"] = LoadJoints(lodToLoad, codRootPath)
LoadSurfaces(lodToLoad, codRootPath, game)
AutoIKHandles(lodToLoad)
cmds.select(lodToLoad["transformGroup"], replace=True)
finally:
# Delete progress bar
cmds.deleteUI(progressWindow, window=True)
def LoadSurfaces(lod, codRootPath, game):
print("Loading XModel surface '%s'" % lod["name"])
with open(os.path.join(codRootPath, "raw/xmodelsurfs/%s" % lod["name"]), "rb") as f:
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] not in SUPPORTED_XMODELS:
MessageBox("ERROR: Not a valid XModel surface file")
return
numMeshes = struct.unpack('<H', f.read(2))[0]
if numMeshes != len(lod["materials"]):
MessageBox("ERROR: Different number of meshes and materials on LOD '%s'" % lod["name"])
return
meshesCreated = []
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=numMeshes*5+1, progress=0)
for i in range(numMeshes):
cmds.window("w"+OBJECT_NAMES['progress'][0], edit=True, title="Loading mesh %i..." % i)
# Read mesh header
a = struct.unpack('<B', f.read(1))[0] # ???
b = struct.unpack('<H', f.read(2))[0] # ???
numVerts = struct.unpack('<H', f.read(2))[0]
numTris = struct.unpack('<H', f.read(2))[0]
numVerts2 = struct.unpack('<H', f.read(2))[0]
physiqued = numVerts != numVerts2
if physiqued:
f.read(2) # ???
print("\tMesh %i is physiqued... this may not load correctly" % i)
if numVerts2 != 0:
while struct.unpack('H', f.read(2))[0] != 0: # Search for next 0 short ???
pass
f.read(2) # ???
else:
# If a mesh is being influenced by only 1 vert
# then it only stores vert. index. with 1.0
# influence.
bone = struct.unpack('<I', f.read(4)) # ???
single_joint_bind = lod["joints"][bone[0]]["name"]
vertexArray = OpenMaya.MFloatPointArray()
uArray = OpenMaya.MFloatArray()
vArray = OpenMaya.MFloatArray()
polygonCounts = OpenMaya.MIntArray(numTris, 3)
polygonConnects = OpenMaya.MIntArray()
normals = []
vertsWeights = []
ProgressBarStep()
bones = []
# Read vertices
for j in range(numVerts):
# f.read(12) # ???
normal = struct.unpack('<fff', f.read(12)) # ???
normal = OpenMaya.MVector(normal[0], normal[1], normal[2])
color = struct.unpack('<BBBB', f.read(4))
uv = struct.unpack('<ff', f.read(8))
if game == "CoD7":
f.read(28)
else:
f.read(24)
numWeights = 0
finalBoneNumber = 0
if physiqued:
numWeights = struct.unpack('<B', f.read(1))[0]
finalBoneNumber = struct.unpack('<H', f.read(2))[0]
pos = struct.unpack('<fff', f.read(12))
totalWeight = 0
weights = []
for k in range(numWeights):
weight = struct.unpack('<HH', f.read(4)) # [0] = bone number, [1] = weight mapped to integer (range 0-(2^16))
totalWeight += weight[1] / 65536.0
joint = lod["joints"][weight[0]]["name"]
weights.append((joint, weight[1] / 65536.0))
weights.append((lod["joints"][finalBoneNumber]["name"], 1 - totalWeight)) # Final bone gets remaining weight
vertsWeights.append(weights)
vertexArray.append(pos[0]/CM_TO_INCH, pos[1]/CM_TO_INCH, pos[2]/CM_TO_INCH)
normals.append(normal)
uArray.append(uv[0])
vArray.append(1-uv[1])
# Read face indices
tris_list = OpenMaya.MIntArray()
vert_list = OpenMaya.MIntArray()
_normals = OpenMaya.MVectorArray()
for j in range(numTris):
face = struct.unpack('<HHH', f.read(6))
tris_list.append(j)
tris_list.append(j)
tris_list.append(j)
polygonConnects.append(face[0])
polygonConnects.append(face[2])
polygonConnects.append(face[1])
vert_list.append(face[0])
vert_list.append(face[2])
vert_list.append(face[1])
_normals.append(normals[face[0]])
_normals.append(normals[face[2]])
_normals.append(normals[face[1]])
ProgressBarStep()
# Create mesh
mesh = OpenMaya.MFnMesh()
transform = mesh.create(numVerts, numTris, vertexArray, polygonCounts, polygonConnects)
mesh.setFaceVertexNormals(_normals, tris_list, vert_list)
# UV map
mesh.setUVs(uArray, vArray)
mesh.assignUVs(polygonCounts, polygonConnects)
# Rename mesh
transformDagPath = OpenMaya.MDagPath()
OpenMaya.MDagPath.getAPathTo(transform, transformDagPath)
newPath = cmds.parent(transformDagPath.fullPathName(), lod["transformGroup"])
newPath = cmds.rename(newPath, "mesh%i" % i)
meshesCreated.append(newPath)
ProgressBarStep()
# Joint weights
# TODO: Make this faster!!! Soooo sloowwwwwww
if physiqued:
skin = cmds.skinCluster(lod["joints"][0]["name"], newPath)[0] # Bind the mesh to the root joint for now
for j, vertWeights in enumerate(vertsWeights):
cmds.skinPercent(skin, "%s.vtx[%i]" % (newPath, j), zeroRemainingInfluences=True, transformValue=vertWeights)
else:
skin = cmds.skinCluster(single_joint_bind, newPath,tsb=True, mi=1)[0]
ProgressBarStep()
# Apply textures
shader = cmds.shadingNode("lambert", name=lod["materials"][i], asShader=True)
cmds.select(newPath)
cmds.hyperShade(assign=shader)
colorMap = cmds.shadingNode("file", name=lod["materials"][i] + "_colorMap", asTexture=True)
cmds.connectAttr("%s.outColor" % colorMap, "%s.color" % shader)
if "colorMap" in lod["materialMaps"][lod["materials"][i]]:
cmds.setAttr("%s.fileTextureName" % colorMap, os.path.join(codRootPath, "raw/images/%s/%s.dds" % (lod["name"], lod["materialMaps"][lod["materials"][i]]["colorMap"])), type="string")
# Merge duplicates
mel.eval("polyMergeVertex -d 0.01 -am 1 -ch 0 %s;" % newPath) # Merge duplicate verts
mel.eval("polyMergeUV -d 0.01 -ch 0 %s;" % newPath) # Merge duplicate UVs
ProgressBarStep()
if len(f.read(1)) != 0: # Check if it's at the end of the file
MessageBox("The export completed, however it's quite likely that at least one of the meshes did not import correctly. See the Script Editor output for more information.")
ProgressBarStep()
def LoadJoints(lod, codRootPath):
print("Loading XModel joints '%s'" % lod["name"])
cmds.window("w"+OBJECT_NAMES['progress'][0], edit=True, title="Loading joints...")
joints = []
if not os.path.exists(os.path.join(codRootPath, "raw/xmodelparts/%s" % lod["name"])):
# cmds.joint("tag_origin", orientation=(0,0,0), position=(0,0,0), relative=True)
return
with open(os.path.join(codRootPath, "raw/xmodelparts/%s" % lod["name"]), "rb") as f:
version = f.read(2)
if len(version) == 0 or struct.unpack('H', version)[0] not in SUPPORTED_XMODELS:
MessageBox("ERROR: Not a valid XModel parts file")
return
# Number of bones
numJoints = struct.unpack('<H', f.read(2))[0]
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=numJoints*2+1, progress=0)
if numJoints == 0: # Special case
joints.append({"parent": -1, "pos": (0.0,0.0,0.0), "rot": (0.0,0.0,0.0), "name": "TAG_ORIGIN"})
cmds.select(clear=True)
cmds.joint(name=joints[0]["name"], orientation=(0.0,0.0,0.0), position=(0.0,0.0,0.0), relative=True)
ProgressBarStep()
return joints
f.read(2) # ???
# Joint data
joints.append({"parent": -1, "pos": (0.0,0.0,0.0), "rot": (0.0,0.0,0.0)}) # parent joint
for i in range(numJoints):
parentJoint = struct.unpack('<B', f.read(1))[0]
pos = struct.unpack('<fff', f.read(12))
rot = ReadJointRotation(f)
joints.append({"parent": parentJoint, "pos": pos, "rot": rot})
ProgressBarStep()
for i in range(numJoints+1):
joints[i]["name"] = ReadNullTerminatedString(f).lower()
for joint in joints:
if joint["parent"] >= 0: # Select parent
cmds.select(joints[joint["parent"]]["name"], replace=True)
else:
cmds.select(clear=True)
cmds.joint(name=joint["name"], orientation=joint["rot"], position=(joint["pos"][0]/CM_TO_INCH, joint["pos"][1]/CM_TO_INCH, joint["pos"][2]/CM_TO_INCH), relative=True)
ProgressBarStep()
ProgressBarStep()
return joints
def LoadMaterials(lod, codRootPath):
noDupMaterials = list(set(lod["materials"]))
cmds.window("w"+OBJECT_NAMES['progress'][0], edit=True, title="Loading materials...")
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=len(noDupMaterials)*2+1, progress=0)
iwdImages = LoadMainIWDImages(codRootPath)
ProgressBarStep()
# Create output folder
if not os.path.exists(os.path.join(codRootPath, "raw/images/%s/" % lod["name"])):
os.makedirs(os.path.join(codRootPath, "raw/images/%s/" % lod["name"]))
# Create material info file
infofile = open(os.path.join(codRootPath, "raw/images/%s/%s" % (lod["name"], "%s Material Info.txt" % lod["name"])), "w")
# Load materials
outMaterialList = {}
for material in noDupMaterials:
materialMaps = {}
# http://www.diegologic.net/diegologic/Programming/CoD4%20Material.html
path = os.path.join(codRootPath, "raw/materials/%s" % material)
path = os.path.normpath(path)
print("Loading material '%s'" % material)
if not os.path.exists(path):
print("Failed loading material, path does not exist.")
continue
with open(path, "rb") as f:
f.read(48) # Skip start of header
numMaps = struct.unpack('<H', f.read(2))[0]
f.read(14) # Skip the rest of header
for i in range(numMaps):
mapTypeOffset = struct.unpack('<I', f.read(4))[0]
f.read(4) # Skip
mapNameOffset = struct.unpack('<I', f.read(4))[0]
current = f.tell()
f.seek(mapTypeOffset)
mapType = ReadNullTerminatedString(f)
f.seek(mapNameOffset)
mapName = ReadNullTerminatedString(f)
f.seek(current)
materialMaps[mapType] = mapName
infofile.write("Material: %s\n" % material)
for type, mapName in materialMaps.items():
infofile.write("\t%s: %s\n" % (type, mapName))
infofile.write("\n")
outMaterialList[material] = materialMaps
ProgressBarStep()
# Gather .iwis
rawImages = os.listdir(os.path.join(codRootPath, "raw/images/"))
for type, mapName in materialMaps.items():
outPath = os.path.join(codRootPath, "raw/images/%s/%s%s" % (lod["name"], mapName, ".iwi"))
if os.path.exists(outPath):
continue
if (mapName + ".iwi") in rawImages:
shutil.copy(os.path.join(codRootPath, "raw/images/%s%s" % (mapName, ".iwi")), os.path.join(codRootPath, "raw/images/%s/" % lod["name"]))
elif (mapName + ".iwi") in iwdImages:
iwdName = iwdImages[mapName + ".iwi"]
zip = zipfile.ZipFile(os.path.join(codRootPath, "main/%s" % iwdName), "r")
# Extract from zip
source = zip.open("images/%s%s" % (mapName, ".iwi"))
target = file(outPath, "wb")
shutil.copyfileobj(source, target)
source.close()
target.close()
if type == "colorMap":
try:
IWIToDDS(outPath)
except:
print(traceback.format_exc())
ProgressBarStep()
infofile.close()
return outMaterialList
def AutoIKHandles(lod):
if len(lod["joints"]) < 2:
return
result = cmds.confirmDialog(title="Auto IK Handles", message="Is this a character (player or AI) model?", button=["Yes", "No"], defaultButton="No", dismissString="No")
if result == "Yes":
# Arms
SafeIKHandle("IK_Arm_LE", "J_Shoulder_LE", "J_Wrist_LE")
SafeIKHandle("IK_Arm_RI", "J_Shoulder_RI", "J_Wrist_RI")
# Left hand
SafeIKHandle("IK_Index_LE", "J_Index_LE_1", "J_Index_LE_3")
SafeIKHandle("IK_Mid_LE", "J_Mid_LE_1", "J_Mid_LE_3")
SafeIKHandle("IK_Ring_LE", "J_Ring_LE_1", "J_Ring_LE_3")
SafeIKHandle("IK_Pinky_LE", "J_Pinky_LE_1", "J_Pinky_LE_3")
SafeIKHandle("IK_Thumb_LE", "J_Thumb_LE_1", "J_Thumb_LE_3")
# Right hand
SafeIKHandle("IK_Index_RI", "J_Index_RI_1", "J_Index_RI_3")
SafeIKHandle("IK_Mid_RI", "J_Mid_RI_1", "J_Mid_RI_3")
SafeIKHandle("IK_Ring_RI", "J_Ring_RI_1", "J_Ring_RI_3")
SafeIKHandle("IK_Pinky_RI", "J_Pinky_RI_1", "J_Pinky_RI_3")
SafeIKHandle("IK_Thumb_RI", "J_Thumb_RI_1", "J_Thumb_RI_3")
# Legs
SafeIKHandle("IK_Leg_LE", "J_Hip_LE", "J_Ankle_LE")
SafeIKHandle("IK_Leg_RI", "J_Hip_RI", "J_Ankle_RI")
def SafeIKHandle(name, joint1, joint2):
# Only apply the IK Handle if both joints exist
if cmds.objExists(joint1) and cmds.nodeType(joint1) == 'joint' and cmds.objExists(joint2) and cmds.nodeType(joint2) == 'joint':
cmds.ikHandle(name=name, startJoint=joint1, endEffector=joint2)
def LoadMainIWDImages(codRootPath):
iwdImages = {}
if not os.path.exists(os.path.join(codRootPath, "main/")):
return iwdImages
iwds = os.listdir(os.path.join(codRootPath, "main/"))
for iwd in iwds:
if not iwd.endswith(".iwd"):
continue
zip = zipfile.ZipFile(os.path.join(codRootPath, "main/") + iwd, "r")
images = zip.namelist()
images = [x for x in images if x.startswith("images/")]
for i in range(len(images)):
imageName = images[i][7:]
if len(imageName) > 0:
iwdImages[imageName] = iwd
return iwdImages
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------- IWI to DDS ---------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def IWIToDDS(inIWIPath):
splitPath = os.path.splitext(inIWIPath)
outDDSPath = splitPath[0] + ".dds"
supported_headers = [6, 13]
print("Converting %s to DDS" % os.path.basename(inIWIPath))
iwi_data = {}
# Offsets are different for V13 IWIs
iwi_data[6] = [8, 7]
iwi_data[13] = [9, 8]
if not os.path.exists(inIWIPath):
return False
with open(inIWIPath, 'rb') as inf:
# http://www.matejtomcik.com/Public/Projects/IWIExtractor/
if inf.read(3) != "IWi": # Read file identifier
print("\tERROR: Not a valid IWI file")
return False
header = struct.unpack('<BBBHHBBIIII', inf.read(25))
print("Header Version: %i" % header[0])
if header[0] not in supported_headers: # Make sure it's V6 or V13 IWI
print("\tERROR: Unsupported IWI version")
return False
imageType = None
if header[1] == 0xB: # DXT1
imageType = "DXT1"
elif header[1] == 0xC: # DXT3
imageType = "DXT3"
elif header[1] == 0xD: # DXT5
imageType = "DXT5"
else:
print("\tERROR: Unknown image format")
return False
print("Writing_DDS")
with open(outDDSPath, 'wb') as outf:
# http://msdn.microsoft.com/en-us/library/windows/desktop/bb943991(v=vs.85).aspx
outf.write("DDS ") # File indentifier
print("Written that stuff1")
# DDS_HEADER size, flags, height, width, pitch, depth, mipmap count
outf.write(struct.pack('<7I', 124, 659463, header[4], header[3], 0, 0, 1))
outf.write(struct.pack('<11I', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # Reserved
# DDS_PIXELFORMAT size, flags, type, masks
outf.write(struct.pack('II4s5I', 32, 4, imageType, 0, 0, 0, 0, 0))
# DDS_HEADER caps1
outf.write(struct.pack('5I', 4198408, 0, 0, 0, 0))
print("Written that stuff")
# Copy Images
# MIPMAP 0
inf.seek(header[iwi_data[header[0]][0]])
outf.write(inf.read(header[iwi_data[header[0]][1]] - header[iwi_data[header[0]][0]]))
# # MIPMAP 1
# inf.seek(header[9])
# outf.write(inf.read(header[8] - header[9]))
# # MIPMAP 2
# inf.seek(header[10])
# outf.write(inf.read(header[9] - header[10]))
return True
def IWIToDDSUser():
codRootPath = GetRootFolder() # Only call this once, because it might create a dialog box
files = cmds.fileDialog2(fileMode=4, fileFilter="IWI Images (*.iwi)", caption="Select IWI file", startingDirectory=os.path.join(codRootPath, "raw/images/"))
if files == None or len(files) == 0 or files[0].strip() == "":
return
success = True
for file in files:
if not IWIToDDS(file):
success = False
if not success:
MessageBox("One or more of the IWIs failed to convert. See the Script Editor output for more information.")
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------- Export Joints (XModel and XAnim) ----------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def GetJointList(export_type=None):
# Joints list.
joints = []
# Try get the cosmetic bone.
if export_type == "xmodel":
try:
# Get it.
cosmeticBone = cmds.getAttr(OBJECT_NAMES["xmodel"][2]+ ".Cosmeticbone").split("|")[-1].split(":")[-1]
# Does it exist in scene?
if not cmds.objExists(cosmeticBone):
# If it doesn't, don't assign a cosmetic bone.
cosmeticBone = None
else:
cosmeticBone = cosmeticBone.split("|")[-1].split(":")[-1]
except:
# No cosmetic set.
cosmeticBone = None
# Cosmetic Bones List
cosmetic_list = []
# Cosmetic Bone ID (for xmodel_export)
cosmetic_id = None
else:
# No cosmetic set.
cosmeticBone = None
# Cosmetic Bones List
cosmetic_list = []
# Cosmetic Bone ID (for xmodel_export)
cosmetic_id = None
# Get selected objects
selectedObjects = OpenMaya.MSelectionList()
OpenMaya.MGlobal.getActiveSelectionList(selectedObjects)
for i in range(selectedObjects.length()):
# Get object path and node
dagPath = OpenMaya.MDagPath()
selectedObjects.getDagPath(i, dagPath)
dagNode = OpenMaya.MFnDagNode(dagPath)
# Ignore nodes that aren't joints or arn't top-level
if not dagPath.hasFn(OpenMaya.MFn.kJoint) or not RecursiveCheckIsTopNode(selectedObjects, dagNode):
continue
# Breadth first search of joint tree
searchQueue = Queue.Queue(0)
searchQueue.put((-1, dagNode, True)) # (index = child node's parent index, child node)
while not searchQueue.empty():
node = searchQueue.get()
index = len(joints)
if node[2]:
# Don't use main root bone.
if node[0] > -1:
# Name of the bones parent, none for Root bone. Split it to remove dagpath seperator and namespace.
bone_parentname = joints[node[0]][1].split("|")[-1].split(":")[-1]
else:
# Skip.
bone_parentname = None
# Name of the bone. Split it to remove dagpath seperator and namespace.
bone_name = node[1].partialPathName().split("|")[-1].split(":")[-1]
# Check for automatic rename.
if QueryToggableOption("AutomaticRename"):
# Run over dictonary for possible joints to rename.
for potjoints, new_name in RENAME_DICTONARY.iteritems():
# Found one
if bone_name == potjoints[0]:
# Check if it's a child bone of what we want, None to rename regardless.
if potjoints[1] is None or potjoints[1] == bone_parentname:
bone_name = new_name
# Check if we have cosmetic bone.
if cosmeticBone is not None and bone_parentname == cosmeticBone:
# Append it.
cosmetic_list.append((node[0], bone_name, node[1]))
else:
# Not a cosmetic, add it to normal joints.
joints.append((node[0], bone_name, node[1]))
# Our cosmetic parent.
if bone_name == cosmeticBone:
cosmetic_id = index
else:
index = node[0]
for i in range(node[1].childCount()):
dagPath = OpenMaya.MDagPath()
childNode = OpenMaya.MFnDagNode(node[1].child(i))
childNode.getPath(dagPath)
searchQueue.put((index, childNode, selectedObjects.hasItem(dagPath) and dagPath.hasFn(OpenMaya.MFn.kJoint)))
# Cosmetic bones must be at the end, so append them AFTER we've added other bones.
joints = joints + cosmetic_list
return joints, cosmetic_list, cosmetic_id
def GetCameraList():
cameras = []
# Get selected objects
selectedObjects = OpenMaya.MSelectionList()
OpenMaya.MGlobal.getActiveSelectionList(selectedObjects)
for i in range(selectedObjects.length()):
# Get object path and node
dagPath = OpenMaya.MDagPath()
selectedObjects.getDagPath(i, dagPath)
dagNode = OpenMaya.MFnDagNode(dagPath)
# Ignore nodes that aren't cameras or arn't top-level
if not dagPath.hasFn(OpenMaya.MFn.kCamera):
ProgressBarStep()
continue
# Breadth first search of camera tree
searchQueue = Queue.Queue(0)
searchQueue.put((-1, dagNode, True)) # (index = child node's parent index, child node)
while not searchQueue.empty():
node = searchQueue.get()
index = len(cameras)
if node[2]:
cameras.append((node[0], node[1]))
else:
index = node[0]
for i in range(node[1].childCount()):
dagPath = OpenMaya.MDagPath()
childNode = OpenMaya.MFnDagNode(node[1].child(i))
childNode.getPath(dagPath)
searchQueue.put((index, childNode, selectedObjects.hasItem(dagPath) and dagPath.hasFn(OpenMaya.MFn.kCamera)))
ProgressBarStep()
return cameras
def RecursiveCheckIsTopNode(cSelectionList, currentNode): # Checks if the given node has ANY selected parent, grandparent, etc joints
if currentNode.parentCount() == 0:
return True
for i in range(currentNode.parentCount()):
parentDagPath = OpenMaya.MDagPath()
parentNode = OpenMaya.MFnDagNode(currentNode.parent(i))
parentNode.getPath(parentDagPath)
if not parentDagPath.hasFn(OpenMaya.MFn.kJoint): # Not a joint, but still check parents
if not RecursiveCheckIsTopNode(cSelectionList, parentNode):
return False # A parent joint is selected, we're done
else:
continue # No parent joints are selected, ignore this node
if cSelectionList.hasItem(parentDagPath):
return False
else:
if not RecursiveCheckIsTopNode(cSelectionList, parentNode):
return False
return True
def GetJointData(jointNode, frame=0):
# Get the joint's transform
path = OpenMaya.MDagPath()
jointNode.getPath(path)
transform = OpenMaya.MFnTransform(path)
# Get joint position
pos = transform.getTranslation(OpenMaya.MSpace.kWorld)
# Get scale (almost always 1)
scaleUtil = OpenMaya.MScriptUtil()
scaleUtil.createFromList([1,1,1], 3)
scalePtr = scaleUtil.asDoublePtr()
transform.getScale(scalePtr)
scale = [OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 0), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 1), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 2)]
# Get rotation matrix (mat is a 4x4, but the last row and column arn't needed)
rotQuaternion = OpenMaya.MQuaternion()
transform.getRotation(rotQuaternion, OpenMaya.MSpace.kWorld)
mat = rotQuaternion.asMatrix()
# Debug info: as euler rotation
#eulerRotation = rotQuaternion.asEulerRotation()
#eulerRotation.reorderIt(OpenMaya.MEulerRotation.kXYZ)
# Instead of writing it return it to Export function.
joint_offset = (pos.x*CM_TO_INCH, pos.y*CM_TO_INCH, pos.z*CM_TO_INCH)
joint_matrix = [(mat(0,0), mat(0,1), mat(0,2)),
(mat(1,0), mat(1,1), mat(1,2)),
(mat(2,0), mat(2,1), mat(2,2))]
joint_scale = (scale[0], scale[1], scale[2])
return joint_offset, joint_matrix, joint_scale
def WriteNodeFloat(f, name, value, no_p=False):
if no_p:
f.write("\"%s\" : %f \n" % (name, value))
else:
f.write("\"%s\" : %f ,\n" % (name, value))
def WriteCameraData(initial, outJSON, cameraNode):
# Get the camera's transform
path = OpenMaya.MDagPath()
cameraNode.getPath(path)
cam = OpenMaya.MFnCamera(path)
transform = OpenMaya.MFnTransform(path)
#print fov
#fov = 40
aspectRatio = cam.aspectRatio()
flen = cam.focalLength() * (CM_TO_INCH*1.732050807568877)
fov = cam.verticalFieldOfView() * (180 / M_PI) * 1.571428571428571
fdist = cam.focusDistance() * CM_TO_INCH
fstop = cam.fStop()
lense = 10
# Get camera position
pos = transform.getTranslation(OpenMaya.MSpace.kWorld)
# Get scale (almost always 1)
scaleUtil = OpenMaya.MScriptUtil()
scaleUtil.createFromList([1,1,1], 3)
scalePtr = scaleUtil.asDoublePtr()
transform.getScale(scalePtr)
scale = [OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 0), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 1), OpenMaya.MScriptUtil.getDoubleArrayItem(scalePtr, 2)]
# Get rotation matrix (mat is a 4x4, but the last row and column arn't needed)
rotQuaternion = OpenMaya.MQuaternion()
transform.getRotation(rotQuaternion, OpenMaya.MSpace.kWorld)
mat = rotQuaternion.asMatrix()
# Debug info: as euler rotation
eulerRotation = rotQuaternion.asEulerRotation()
eulerRotation.reorderIt(OpenMaya.MEulerRotation.kXYZ)
# euler rotation is in radians, not degrees
eulerRotation.x = eulerRotation.x - (3.141/2)
eulerRotation.z = eulerRotation.z - (3.141/2)
#eulerRotation.y = eulerRotation.y + (3.141/2)
#print ("%f %f %f" % (eulerRotation.x*180/3.141, eulerRotation.y*180/3.141, eulerRotation.z*180/3.141))
mat = eulerRotation.asMatrix()
# Write
if(initial):
outJSON.update({
"aspectratio" : aspectRatio
})
outJSON.update({
"origin" : [pos.y * CM_TO_INCH, pos.x * -CM_TO_INCH, pos.z * CM_TO_INCH],
"dir" : [mat(1,0), mat(1,1), mat(1,2)],
"up" : [mat(2,0), mat(2,1), mat(2,2)],
"right" : [mat(0,0), mat(0,1), mat(0,2)],
"flen" : flen,
"fov" : fov,
"fdist" : fdist,
"fstop" : fstop,
"lense" : lense
})
#outJSON["origin"] = [pos.y * CM_TO_INCH, pos.x * -CM_TO_INCH, pos.z * CM_TO_INCH]
#float(pos.x*-CM_TO_INCH), float(pos.z*CM_TO_INCH)])
#outJSON["dir"] = [mat(1,0), mat(1,1), mat(1,2)]
#outJSON["up"] = [mat(2,0), mat(2,1), mat(2,2)]
#outJSON["right"] = [mat(0,0), mat(0,1), mat(0,2)]
#outJSON["flen"] = flen
#outJSON["fov"] = fov
#outJSON["fdist"] = fdist
#outJSON["fstop"] = fstop
#outJSON["lense"] = lense
# Get count for progress bar. No impact on export speed.
def GetNumInfo(selectedObjects):
# Mesh array to check for duplicates.
meshes = []
maxValue = 0
maxValue += len(cmds.ls(selection = True, type = "joint"))
for i in range(0, selectedObjects.length()):
# Grab mesh.
object = OpenMaya.MObject()
dagPath = OpenMaya.MDagPath()
selectedObjects.getDependNode(i, object)
selectedObjects.getDagPath(i, dagPath)
# Check it's a mesh.
if not dagPath.hasFn(OpenMaya.MFn.kMesh):
continue
dagPath.extendToShape()
# Check for duplicate.
if dagPath.partialPathName() in meshes:
continue
# Append.
meshes.append(dagPath.partialPathName())
# Get vert count for this mesh.
maxValue += OpenMaya.MItMeshVertex(dagPath).count()
# Get Face found for this mesh.
maxValue += OpenMaya.MItMeshPolygon(dagPath).count()
# Return value * 2 because we will loop over 2x for getting info and writing it to export.
return maxValue
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Export XModel -------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ExportXModel(filePath, make_gdt=True):
# Get number of objects selected.
numSelectedObjects = len(cmds.ls(selection=True))
# Check if nothing is selected
if numSelectedObjects == 0:
return "Error: No objects selected for export"
# Check if we want to merge meshes into 1.
merge_mesh = QueryToggableOption('MeshMerge')
# Get Version for this xModel based off current game.
version = XMODEL_VERSION[GetCurrentGame()]
# Create Directory/ies
try:
directory = os.path.dirname(filePath)
if not os.path.exists(directory):
os.makedirs(directory)
except OSError as e:
typex, value, traceback = sys.exc_info()
return "Unable to create file:\n\n%s" % value.strerror
# Create new xModel Object
xmodel = xModel.Model()
# Get list of joints including cosmetics
joints = GetJointList("xmodel")
# Export Mesh Information
ExportMeshData(joints[0], xmodel, merge_mesh)
# Export Joints
if joints[0]:
# Run through joints
for i, joint in enumerate(joints[0]):
# Get Data for this joint
boneData = GetJointData(joint[2])
# Check for cosmetic
if(joint[0] == joints[2]):
bone.cosmetic = True
bone = xModel.Bone(joint[1], joint[0])
# Offset
bone.offset = boneData[0]
# Rotation
bone.matrix = boneData[1]
# Scale
bone.scale = boneData[2]
# Append it.
xmodel.bones.append(bone)
# No bones selected, export just TAG_ORIGIN
else:
dummy_bone = xModel.Bone("TAG_ORIGIN", -1)
dummy_bone.offset = (0, 0, 0)
dummy_bone.matrix = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
xmodel.bones.append(dummy_bone)
# Get Extension to determine export type.
extension = os.path.splitext(filePath)[-1].lower()
# Write xModel
if(extension == ".xmodel_bin"):
xmodel.WriteFile_Bin(filePath, version)
else:
xmodel.WriteFile_Raw(filePath, version)
# Seperate Conversion via Export2Bin/ExportX
# Change variable at config at top to enable.
if USE_EXPORT_X:
if QueryToggableOption('E2B'):
try:
RunExport2Bin(filePath)
except:
MessageBox("The xmodel exported successfully however Export2Bin/ExportX failed to run, the model will need to be converted manually.\n\nPlease check your paths.")
def GetMaterialsFromMesh(mesh, dagPath):
textures = {}
# http://rabidsquirrelgames.googlecode.com/svn/trunk/Maya%20plugin/fileExportCmd.py
# The code below gets a dictionary of [material name: material file name], ex: [a_material: a_material.dds]
shaders = OpenMaya.MObjectArray()
shaderIndices = OpenMaya.MIntArray()
mesh.getConnectedShaders(dagPath.instanceNumber(), shaders, shaderIndices)
for i in range(shaders.length()):
shaderNode = OpenMaya.MFnDependencyNode(shaders[i])
shaderPlug = shaderNode.findPlug("surfaceShader")
material = OpenMaya.MPlugArray()
shaderPlug.connectedTo(material, 1, 0);
for j in range(material.length()):
materialNode = OpenMaya.MFnDependencyNode(material[j].node())
colorPlug = materialNode.findPlug("color")
dgIt = OpenMaya.MItDependencyGraph(
colorPlug,
OpenMaya.MFn.kFileTexture,
OpenMaya.MItDependencyGraph.kUpstream,
OpenMaya.MItDependencyGraph.kBreadthFirst,
OpenMaya.MItDependencyGraph.kNodeLevel)
texturePath = ""
try: # If there is no texture, this part can throw an exception
dgIt.disablePruningOnFilter()
textureNode = OpenMaya.MFnDependencyNode(dgIt.currentItem())
texturePlug = textureNode.findPlug("fileTextureName")
texturePath = os.path.basename(texturePlug.asString())
except Exception:
pass
textures[i] = (materialNode.name(), texturePath)
texturesToFaces = []
for i in range(shaderIndices.length()):
if shaderIndices[i] in textures:
texturesToFaces.append(textures[shaderIndices[i]])
else:
texturesToFaces.append(None)
return texturesToFaces
# Converts a set of vertices (toConvertVertexIndices) from object-relative IDs to face-relative IDs
# vertexIndices is a list of object-relative vertex indices in face order (from polyIter.getVertices)
# toConvertVertexIndices is any set of vertices from the same faces as vertexIndices, not necessarily the same length
# Returns false if a vertex index is unable to be converted (= bad vertex values)
def VerticesObjRelToLocalRel(vertexIndices, toConvertVertexIndices):
# http://svn.gna.org/svn/cal3d/trunk/cal3d/plugins/cal3d_maya_exporter/MayaMesh.cpp
localVertexIndices = OpenMaya.MIntArray()
for i in range(toConvertVertexIndices.length()):
found = False
for j in range(vertexIndices.length()):
if toConvertVertexIndices[i] == vertexIndices[j]:
localVertexIndices.append(j)
found = True
break
if not found:
return False
return localVertexIndices
def ExportMeshData(joints, xmodel, merge_mesh = True):
meshes = []
verts = []
tris = []
materialDict = {}
materials = []
# xModel
# Convert the joints to a dictionary, for simple searching for joint indices
jointDict = {}
for i, joint in enumerate(joints):
jointDict[joint[2].partialPathName()] = i
# Get all selected objects
selectedObjects = OpenMaya.MSelectionList()
OpenMaya.MGlobal.getActiveSelectionList(selectedObjects)
# The global vert index at the start of each object
currentStartingVertIndex = 0
global_mesh = xModel.Mesh("GlobalMesh")
progressInfo = GetNumInfo(selectedObjects)
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue = progressInfo)
# Loop through all objects
for i in range(0, selectedObjects.length()):
# Get data on object
object = OpenMaya.MObject()
dagPath = OpenMaya.MDagPath()
selectedObjects.getDependNode(i, object)
selectedObjects.getDagPath(i, dagPath)
# Ignore dag nodes that aren't shapes or shape transforms
if not dagPath.hasFn(OpenMaya.MFn.kMesh):
continue
# Lower path to shape node
# Selecting a shape transform or shape will get the same dagPath to the shape using this
dagPath.extendToShape()
# Check for duplicates
if dagPath.partialPathName() in meshes:
continue
# Add shape to list
meshes.append(dagPath.partialPathName())
# Create new xMesh
xmesh = xModel.Mesh(dagPath.partialPathName())
# Get Maya Mesh
mesh = OpenMaya.MFnMesh(dagPath)
# Get skin cluster
clusterName = mel.eval("findRelatedSkinCluster " + dagPath.partialPathName())
# Check for skin
hasSkin = False
if clusterName != None and clusterName != "" and not clusterName.isspace():
hasSkin = True
selList = OpenMaya.MSelectionList()
selList.add(clusterName)
clusterNode = OpenMaya.MObject()
selList.getDependNode(0, clusterNode)
skin = OpenMayaAnim.MFnSkinCluster(clusterNode)
# Get vertices
vertIter = OpenMaya.MItMeshVertex(dagPath)
# Loop until we're done iterating over vertices
while not vertIter.isDone():
# Create Vertex
vertex = xModel.Vertex(
(
vertIter.position(OpenMaya.MSpace.kWorld).x*CM_TO_INCH,
vertIter.position(OpenMaya.MSpace.kWorld).y*CM_TO_INCH,
vertIter.position(OpenMaya.MSpace.kWorld).z*CM_TO_INCH
)
)
# Check for influences
if hasSkin:
# Get weight values
weightValues = OpenMaya.MDoubleArray()
numWeights = OpenMaya.MScriptUtil() # Need this because getWeights crashes without being passed a count
skin.getWeights(dagPath, vertIter.currentItem(), weightValues, numWeights.asUintPtr())
# Get weight names
weightJoints = OpenMaya.MDagPathArray()
skin.influenceObjects(weightJoints)
# Make sure the list of weight values and names match
if weightValues.length() != weightJoints.length():
PrintWarning("Failed to retrieve vertex weight list on '%s.vtx[%d]'; using default joints." %
(dagPath.partialPathName(), vertIter.index()))
# Remove weights of value 0 or weights from unexported joints
finalWeights = []
weightsSize = 0
for i in range(0, weightJoints.length()):
# 0.000001 is the smallest decimal in xmodel exports
if weightValues[i] < 0.000001:
continue
jointName = weightJoints[i].partialPathName()
# Check for unexported joints.
if not jointName in jointDict:
PrintWarning("Unexported joint %s is influencing vertex '%s.vtx[%d]' by %f%%\n" %
(("'%s'" % jointName).ljust(15), dagPath.partialPathName(), vertIter.index(), weightValues[i]*100))
else:
finalWeights.append([jointDict[jointName], weightValues[i]])
weightsSize += weightValues[i]
# Make sure the total weight adds up to 1
if weightsSize > 0:
weightMultiplier = 1 / weightsSize
for weight in finalWeights:
weight[1] *= weightMultiplier
vertex.weights.append((weight[0], weight[1]))
# Check if no weights were written (can happend due to deformers)
if not len(vertex.weights):
vertex.weights.append((0, 1.0))
# Add to mesh
xmesh.verts.append(vertex)
# Next vert
ProgressBarStep()
vertIter.next()
# Get materials used by this mesh
meshMaterials = GetMaterialsFromMesh(mesh, dagPath)
# Loop through all faces
polyIter = OpenMaya.MItMeshPolygon(dagPath)
# Loop until we're done iterating over polygons
while not polyIter.isDone():
# Get this poly's material
polyMaterial = meshMaterials[polyIter.index()]
# Every face must have a material
if polyMaterial == None:
PrintWarning("Found no material on face '%s.f[%d]'; ignoring face" %
(dagPath.partialPathName(), polyIter.index()))
polyIter.next()
continue
# Add this poly's material to the global list of used materials
if not polyMaterial[0] in materialDict:
materialDict[polyMaterial[0]] = len(materials)
materials.append(polyMaterial)
# Get vertex indices of this poly, and the vertex indices of this poly's triangles
trianglePoints = OpenMaya.MPointArray()
triangleIndices = OpenMaya.MIntArray()
vertexIndices = OpenMaya.MIntArray()
polyIter.getTriangles(trianglePoints, triangleIndices)
polyIter.getVertices(vertexIndices)
# localTriangleIndices is the same as triangleIndices,
# except each vertex is listed as the face-relative index
# instead of the object-realtive index
localTriangleIndices = VerticesObjRelToLocalRel(vertexIndices, triangleIndices)
if localTriangleIndices == False:
return ("Failed to convert object-relative vertices to face-relative on poly '%s.f[%d]'" %
(dagPath.partialPathName(), polyIter.index()))
# Note: UVs, normals, and colors, are "per-vertex per face", because even though two faces may share
# a vertex, they might have different UVs, colors, or normals. So, each face has to contain this info
# for each of it's vertices instead of each vertex alone
# UVs
Us = OpenMaya.MFloatArray()
Vs = OpenMaya.MFloatArray()
# Normals
normals = OpenMaya.MVectorArray()
# Attempt to get UVs
try:
polyIter.getUVs(Us, Vs)
except:
PrintWarning("Failed to aquire UVs on face '%s.f[%d]'; ignoring face" %
(dagPath.partialPathName(), polyIter.index()))
polyIter.next()
continue
# Attempt to get Normals
try:
polyIter.getNormals(normals, OpenMaya.MSpace.kWorld)
except:
PrintWarning("Failed to aquire Normals on face '%s.f[%d]'; ignoring face" %
(dagPath.partialPathName(), polyIter.index()))
polyIter.next()
continue
# Loop indices
# vertexIndices.length() has 3 values per triangle
for i in range(triangleIndices.length()/3):
# New xModel Face
xface = xModel.Face(0 if merge_mesh else len(meshes)-1 , materialDict[polyMaterial[0]])
# Put local indices into an array for easy access
faceIndices = [
localTriangleIndices[i*3],
localTriangleIndices[i*3+1],
localTriangleIndices[i*3+2]
]
# Vertex Colors
vertColors = [
OpenMaya.MColor(),
OpenMaya.MColor(),
OpenMaya.MColor()
]
# Grab colors
polyIter.getColor(vertColors[0], faceIndices[0])
polyIter.getColor(vertColors[1], faceIndices[1])
polyIter.getColor(vertColors[2], faceIndices[2])
# Face Order
face_order = [0, 2, 1]
# Export Face-Vertex Data
for e in range(3):
xface.indices[face_order[e]] = xModel.FaceVertex(
# Vertex
currentStartingVertIndex + triangleIndices[i*3 + e],
# Normal (XYZ)
(
normals[faceIndices[e]].x,
normals[faceIndices[e]].y,
normals[faceIndices[e]].z
),
# Color (RGBA)
(
vertColors[e].r,
vertColors[e].g,
vertColors[e].b,
vertColors[e].a,
),
# UV (UV)
(Us[faceIndices[e]], 1-Vs[faceIndices[e]]))
# Append face
xmesh.faces.append(xface)
# Next poly
ProgressBarStep()
polyIter.next()
# Update starting vertex index
currentStartingVertIndex = len(verts)
xmodel.meshes.append(xmesh)
# Add Materials
for material in materials:
xmodel.materials.append(
xModel.Material(material[0].split(":")[-1],
"Lambert",
{"color_map" : material[1].split(":")[-1]})
)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Export XAnim --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ExportXAnim(filePath):
# Progress bar
numSelectedObjects = len(cmds.ls(selection=True))
if numSelectedObjects == 0:
return "Error: No objects selected for export"
# Get data
joints = GetJointList()
if len(joints[0]) == 0:
return "Error: No joints selected for export"
# Get settings
frameStart = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", query=True, value=True)
frameEnd = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", query=True, value=True)
fps = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FPSField", query=True, value=True)
QMultiplier = math.pow(2,cmds.intField(OBJECT_NAMES['xanim'][0]+"_qualityField", query=True, value=True))
multiplier = 1/QMultiplier
fps = fps/multiplier
# Reverse Bool
reverse = cmds.checkBox("CoDMAYA_ReverseAnim", query=True, value=True)
# Export Tag Align
write_tag_align = cmds.checkBox("CoDMAYA_TAGALIGN", query=True, value=True)
# Frame Range
frame_range = range(int(frameStart/multiplier), int((frameEnd+1)/multiplier))
# Check if we want to reverse this anim.
if reverse:
frame_range = list(reversed(frame_range))
if frameStart < 0 or frameStart > frameEnd:
return "Error: Invalid frame range (start < 0 or start > end)"
if fps <= 0:
return "Error: Invalid FPS (fps < 0)"
if multiplier <= 0 or multiplier > 1:
return "Error: Invalid multiplier (multiplier < 0 && multiplier >= 1)"
# Set Progress bar to our frame length
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=len(frame_range) + 1)
# Create Directory/ies
try:
directory = os.path.dirname(filePath)
if not os.path.exists(directory):
os.makedirs(directory)
except OSError as e:
typex, value, traceback = sys.exc_info()
return "Unable to create file:\n\n%s" % value.strerror
# Create new xAnim
xanim = xAnim.Anim()
# Set Frame Rate
xanim.framerate = fps
# Add Joints
for i, joint in enumerate(joints[0]):
xanim.parts.append(xAnim.PartInfo(joint[1]))
# Export Tag Align (required for some anims)
if write_tag_align:
xanim.parts.append(xAnim.PartInfo("TAG_ALIGN"))
# Loop through frames
for n, i in enumerate(frame_range):
# Jump to frame
cmds.currentTime(i)
# Create Frame
frame = xAnim.Frame(n)
# Loop through joints
for j, joint in enumerate(joints[0]):
# Create Frame Part
frame_bone = xAnim.FramePart()
# Grab joint data for this part.
boneData = GetJointData(joint[2])
# Offset
frame_bone.offset = boneData[0]
# Rotation
frame_bone.matrix = boneData[1]
# Append it.
frame.parts.append(frame_bone)
# Export Tag Align (required for some anims)
if write_tag_align:
frame_bone = xAnim.FramePart()
frame_bone.offset = (0, 0, 0)
frame_bone.matrix = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
frame.parts.append(frame_bone)
# Add Frame
xanim.frames.append(frame)
# Increment Progress
ProgressBarStep()
# Get Notetracks for this Slot
slotIndex = cmds.optionMenu(OBJECT_NAMES['xanim'][0]+"_SlotDropDown", query=True, select=True)
noteList = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".notetracks[%i]" % slotIndex)) or ""
# Notes (seperated by comma)
notes = noteList.split(",")
# Run through note list
for note in notes:
# Split (frame : note string)
parts = note.split(":")
# Check for empty/bad string
if note.strip() == "" or len(parts) < 2:
continue
# Check for abc characters (allow rumble/sound notes prefixes)
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#")
if name == "":
continue
# Get Frame and attempt to parse it
frame=0
try:
frame = int(parts[1]) - frameStart
if(reverse):
frame = (len(frame_range) - 1) - frame
except ValueError:
continue
# Add to our notes list.
xanim.notes.append(xAnim.Note(frame, name))
# Get Extension
extension = os.path.splitext(filePath)[-1].lower()
# Export Bin
if(extension == ".xanim_bin"):
xanim.WriteFile_Bin(filePath, 3)
# Export Export
else:
xanim.WriteFile_Raw(filePath, 3)
# Refresh
cmds.refresh()
# Seperate Conversion via Export2Bin/ExportX
# Change variable at config at top to enable.
if USE_EXPORT_X:
if QueryToggableOption('E2B'):
try:
RunExport2Bin(filePath)
except:
MessageBox("The animation exported successfully however Export2Bin/ExportX failed to run, the animation will need to be converted manually.\n\nPlease check your paths.")
def WriteDummyTargetModelBoneRoot(outJSON, numframes):
# f.write("""
# "targetModelBoneRoots" : [
# {
# "name" : "TAG_ORIGIN",
# "animation" : [
# """)
outJSON["targetModelBoneRoots"] = [{
"name" : "TAG_ORIGIN",
"animation" : [],
},
{
"name" : "TAG_ALIGN",
"animation" : [],
}
]
for i in range(0,numframes):
outJSON["targetModelBoneRoots"][0]["animation"].append({
"frame" : i,
"offset" : [0.0,0.0,0.0],
"axis" : {
"x" : [0.0, -1.0, 0.0],
"y" : [1.0, 0.0, 0.0],
"z" : [0.0, 0.0, 1.0]
}
})
for i in range(0, numframes):
outJSON["targetModelBoneRoots"][1]["animation"].append({
"frame": i,
"offset": [0.0, 0.0, 0.0],
"axis": {
"x": [0.0, -1.0, 0.0],
"y": [1.0, 0.0, 0.0],
"z": [0.0, 0.0, 1.0]
}
})
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# -------------------------------------------------------------------------- Export XCam --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def ExportXCam(filePath):
# Progress bar
numSelectedObjects = len(cmds.ls(selection=True))
if numSelectedObjects == 0:
return "Error: No objects selected for export"
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, maxValue=numSelectedObjects+1)
# Get data
cameras = GetCameraList()
if len(cameras) == 0:
return "Error: No cameras selected for export"
# Get settings
frameStart = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", query=True, value=True)
frameEnd = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", query=True, value=True)
fps = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FPSField", query=True, value=True)
# QMultiplier = math.pow(2,cmds.intField(OBJECT_NAMES['xcam'][0]+"_qualityField", query=True, value=True))
#multiplier = 1/QMultiplier
multiplier = 1
fps = fps/multiplier
if frameStart < 0 or frameStart > frameEnd:
return "Error: Invalid frame range (start < 0 or start > end)"
if fps <= 0:
return "Error: Invalid FPS (fps < 0)"
if multiplier <= 0 or multiplier > 1:
return "Error: Invalid multiplier (multiplier < 0 && multiplier >= 1)"
# Open file
f = None
try:
# Create export directory if it doesn't exist
directory = os.path.dirname(filePath)
if not os.path.exists(directory):
os.makedirs(directory)
# Create files
f = open(filePath, 'w')
except (IOError, OSError) as e:
typex, value, traceback = sys.exc_info()
return "Unable to create files:\n\n%s" % value.strerror
fLength = ((frameEnd-frameStart+1) / multiplier)
# Write header
outputJSON = {
"version" : 1,
"framerate" : fps,
"numframes" : fLength
}
outputJSON["scene"] = os.path.normpath(os.path.abspath(cmds.file(query=True, sceneName=True))).encode('ascii', 'ignore').replace('\\','/')
outputJSON["align"] = {
"tag" : "tag_align",
"offset" : [0.0000, 0.0000, 0.0000],
"axis" : {
"x" : [0.0, -1.0, 0.0],
"y" : [1.0, 0.0, 0.0],
"z" : [0.0, 0.0, 1.0]
}
}
WriteDummyTargetModelBoneRoot(outputJSON, fLength)
# Write parts
outputJSON["cameras"] = []
currentFrame = cmds.currentTime(query=True)
for i, camera in enumerate(cameras):
name = camera[1].partialPathName().split("|")
name = name[len(name)-1].split(":") # Remove namespace prefixes
name = name[len(name)-1]
outputJSON["cameras"].append({
"name" : name,
"index" : i,
"type" : "Perspective",
"aperture" : "FOCAL_LENGTH"
});
WriteCameraData(True, outputJSON["cameras"][i], camera[1])
#outputJSON["cameras"][i]["aspectratio"] = 16.0/9.0
outputJSON["cameras"][i]["nearz"] = 4
outputJSON["cameras"][i]["farz"] = 4000
outputJSON["cameras"][i]["animation"] = []
for j in range(int(frameStart), int((frameEnd+1))):
cmds.currentTime(j)
outputJSON["cameras"][i]["animation"].append({
"frame" : j
})
WriteCameraData(False, outputJSON["cameras"][i]["animation"][j-frameStart], camera[1])
outputJSON["cameraSwitch"] = []
cmds.currentTime(currentFrame)
ProgressBarStep()
# Write notetrack
slotIndex = cmds.optionMenu(OBJECT_NAMES['xcam'][0]+"_SlotDropDown", query=True, select=True)
noteList = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
cleanNotes = []
for note in notes:
parts = note.split(":")
if note.strip() == "" or len(parts) < 2:
continue
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"])
if name == "":
continue
frame=0
try:
frame = int(parts[1])
except ValueError:
continue
cleanNotes.append((name, frame))
outputJSON["notetracks"] = []
for note in cleanNotes:
outputJSON["notetracks"].append({
"name" : note[0],
"frame" : note[1]
})
#f.write("{\n \"name\" : \"%s\",\n \"frame\" : %d\n},\n" % (note[0], note[1]))
json.dump(outputJSON, f, indent=4)
f.close()
ProgressBarStep()
cmds.refresh()
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ------------------------------------------------------------------------ Viewmodel Tools -------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def DoesObjectExist(name, type):
if not cmds.objExists(name):
MessageBox("Error: Missing %s '%s'" % (type, name))
return False
return True
def CreateNewGunsleeveMayaFile():
global WarningsDuringExport
# Save reminder
if not SaveReminder(False):
return
# Get paths
filePath = cmds.file(query=True, sceneName=True)
split1 = os.path.split(filePath)
split2 = os.path.splitext(split1[1])
exportPath = os.path.join(split1[0], "gunsleeves_" + split2[0] + ".xmodel_export")
# Create a new file and import models
cmds.file(force=True, newFile=True)
cmds.file(os.path.join(GetRootFolder(), "bin/maya/rigs/viewmodel/ViewModel_DefMesh.mb"), i=True, type="mayaBinary")
cmds.file(filePath, i=True, type="mayaBinary")
# Check to make sure objects exist
if not DoesObjectExist("J_Gun", "joint"): return
if not DoesObjectExist("tag_weapon", "tag"): return
if not DoesObjectExist("GunExport", "object set"): return
if not DoesObjectExist("DefViewSkeleton", "object set"): return
if not DoesObjectExist("tag_view", "tag"): return
if not cmds.objExists("viewmodelSleeves_OpForce") and not cmds.objExists("viewmodelSleeves_Marines"):
MessageBox("Error: Missing viewsleeves 'viewmodelSleeves_OpForce' or 'viewmodelSleeves_Marines'")
return
# Attach gun to rig
cmds.select("J_Gun", replace=True)
cmds.select("tag_weapon", add=True)
cmds.parent()
# Select things to export
cmds.select("GunExport", replace=True)
cmds.select("DefViewSkeleton", toggle=True)
cmds.select("tag_view", toggle=True)
if cmds.objExists("viewmodelSleeves_OpForce"):
cmds.select("viewmodelSleeves_OpForce", toggle=True, hierarchy=True)
else:
cmds.select("viewmodelSleeves_Marines", toggle=True, hierarchy=True)
# Export
if cmds.control("w"+OBJECT_NAMES['progress'][0], exists=True):
cmds.deleteUI("w"+OBJECT_NAMES['progress'][0])
progressWindow = cmds.window("w"+OBJECT_NAMES['progress'][0], title=OBJECT_NAMES['progress'][1], width=302, height=22, sizable=False)
cmds.columnLayout()
progressControl = cmds.progressBar(OBJECT_NAMES['progress'][0], width=300)
cmds.showWindow(progressWindow)
cmds.refresh() # Force the progress bar to be drawn
# Export
WarningsDuringExport = 0
response = None
try:
response = ExportXModel(exportPath)
except Exception as e:
response = "An unhandled error occurred during export:\n\n" + traceback.format_exc()
# Delete progress bar
cmds.deleteUI(progressWindow, window=True)
# Handle response
if type(response) == str or type(response) == unicode:
MessageBox(response)
elif WarningsDuringExport > 0:
MessageBox("Warnings occurred during export. Check the script editor output for more details.")
if type(response) != str and type(response) != unicode:
MessageBox("Export saved to\n\n" + os.path.normpath(exportPath))
def CreateNewViewmodelRigFile():
# Save reminder
if not SaveReminder(False):
return
# Get path
filePath = cmds.file(query=True, sceneName=True)
# Create a new file and import models
cmds.file(force=True, newFile=True)
cmds.file(os.path.join(GetRootFolder(), "bin/maya/rigs/viewmodel/ViewModel_Rig.mb"), reference=True, type="mayaBinary", namespace="rig", options="v=0")
cmds.file(filePath, reference=True, type="mayaBinary", namespace="VM_Gun")
# Check to make sure objects exist
if not DoesObjectExist("VM_Gun:J_Gun", "joint"): return
if not cmds.objExists("rig:DefMesh:tag_weapon") and not cmds.objExists("ConRig:DefMesh:tag_weapon"):
MessageBox("Error: Missing viewsleeves 'rig:DefMesh:tag_weapon' or 'ConRig:DefMesh:tag_weapon'")
return
# Connect gun to rig
if cmds.objExists("rig:DefMesh:tag_weapon"):
cmds.select("rig:DefMesh:tag_weapon", replace=True)
else:
cmds.select("ConRig:DefMesh:tag_weapon", replace=True)
cmds.select("VM_Gun:J_Gun", toggle=True)
cmds.parentConstraint(weight=1, name="VMParentConstraint")
cmds.select(clear=True)
def SwitchGunInCurrentRigFile():
# Save reminder
if not SaveReminder():
return
# Make sure the rig is correct
if not cmds.objExists("rig:DefMesh:tag_weapon") and not cmds.objExists("ConRig:DefMesh:tag_weapon"):
MessageBox("Error: Missing rig:DefMesh:tag_weapon' or 'ConRig:DefMesh:tag_weapon'")
return
if not DoesObjectExist("VM_Gun:J_Gun", "joint"): return
# Prompt user to select a new gun file
gunPath = cmds.fileDialog2(fileMode=1, fileFilter="Maya Files (*.ma *.mb)", caption="Select a New Gun File", startingDirectory=GetRootFolder())
if gunPath == None or len(gunPath) == 0 or gunPath[0].strip() == "":
return
gunPath = gunPath[0].strip()
# Delete the constraint
cmds.delete("VMParentConstraint")
# Delete any hand attachments
if cmds.objExists("rig:Hand_Extra_RI_GRP.Parent"):
parentRI = cmds.getAttr("rig:Hand_Extra_RI_GRP.Parent")
if parentRI != "":
cmds.delete(parentRI)
if cmds.objExists("rig:Hand_Extra_LE_GRP.Parent"):
parentLE = cmds.getAttr("rig:Hand_Extra_LE_GRP.Parent")
if parentLE != "":
cmds.delete(parentLE)
# Switch guns
cmds.file(gunPath, loadReference="VM_GunRN");
# Connect gun to rig
if cmds.objExists("rig:DefMesh:tag_weapon"):
cmds.select("rig:DefMesh:tag_weapon", replace=True)
else:
cmds.select("ConRig:DefMesh:tag_weapon", replace=True)
cmds.select("VM_Gun:J_Gun", toggle=True)
cmds.parentConstraint(weight=1, name="VMParentConstraint")
cmds.select(clear=True)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------- XModel Export Window ----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateXModelWindow():
# Create window
if cmds.control(OBJECT_NAMES['xmodel'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['xmodel'][0])
cmds.window(OBJECT_NAMES['xmodel'][0], title=OBJECT_NAMES['xmodel'][1], width=340, height=1, retain=True, maximizeButton=False)
form = cmds.formLayout(OBJECT_NAMES['xmodel'][0]+"_Form")
# Controls
slotDropDown = cmds.optionMenu(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown", changeCommand="CoDMayaTools.RefreshXModelWindow()", annotation="Each slot contains different a export path, settings, and saved selection")
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
cmds.menuItem(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown"+("_s%i" % i), label="Slot %i" % i)
separator1 = cmds.separator(style='in', height=16)
separator2 = cmds.separator(style='in')
saveToLabel = cmds.text(label="Save to:", annotation="This is where the .xmodel_export is saved to")
saveToField = cmds.textField(OBJECT_NAMES['xmodel'][0]+"_SaveToField", height=21, changeCommand="CoDMayaTools.GeneralWindow_SaveToField('xmodel')", annotation="This is where the .xmodel_export is saved to")
fileBrowserButton = cmds.button(label="...", height=21, command="CoDMayaTools.GeneralWindow_FileBrowser('xmodel')", annotation="Open a file browser dialog")
exportSelectedButton = cmds.button(label="Export Selected", command="CoDMayaTools.GeneralWindow_ExportSelected('xmodel', False)", annotation="Export all currently selected objects from the scene (current frame)\nWarning: Will automatically overwrite if the export path if it already exists")
saveSelectionButton = cmds.button(label="Save Selection", command="CoDMayaTools.GeneralWindow_SaveSelection('xmodel')", annotation="Save the current object selection")
getSavedSelectionButton = cmds.button(label="Get Saved Selection", command="CoDMayaTools.GeneralWindow_GetSavedSelection('xmodel')", annotation="Reselect the saved selection")
exportMultipleSlotsButton = cmds.button(label="Export Multiple Slots", command="CoDMayaTools.GeneralWindow_ExportMultiple('xmodel')", annotation="Automatically export multiple slots at once, using each slot's saved selection")
exportInMultiExportCheckbox = cmds.checkBox(OBJECT_NAMES['xmodel'][0]+"_UseInMultiExportCheckBox", label="Use current slot for Export Multiple", changeCommand="CoDMayaTools.GeneralWindow_ExportInMultiExport('xmodel')", annotation="Check this make the 'Export Multiple Slots' button export this slot")
setCosmeticParentbone = cmds.button(OBJECT_NAMES['xmodel'][0]+"_MarkCosmeticParent", label="Set selected as Cosmetic Parent", command="CoDMayaTools.SetCosmeticParent('xmodel')", annotation="Set this bone as our cosmetic parent. All bones under this will be cosmetic.")
RemoveCosmeticParent = cmds.button(OBJECT_NAMES['xmodel'][0]+"_ClearCosmeticParent", label="Clear Cosmetic Parent", command="CoDMayaTools.ClearCosmeticParent('xmodel')", annotation="Remove the cosmetic parent.")
# Setup form
cmds.formLayout(form, edit=True,
attachForm=[(slotDropDown, 'top', 6), (slotDropDown, 'left', 10), (slotDropDown, 'right', 10),
(separator1, 'left', 0), (separator1, 'right', 0),
(separator2, 'left', 0), (separator2, 'right', 0),
(saveToLabel, 'left', 12),
(fileBrowserButton, 'right', 10),
(exportMultipleSlotsButton, 'bottom', 6), (exportMultipleSlotsButton, 'left', 10),
(exportInMultiExportCheckbox, 'bottom', 9), (exportInMultiExportCheckbox, 'right', 6),
(exportSelectedButton, 'left', 10),
(saveSelectionButton, 'right', 10),
(setCosmeticParentbone, 'left', 10),
(RemoveCosmeticParent, 'left', 10)],
#(exportSelectedButton, 'bottom', 6), (exportSelectedButton, 'left', 10),
#(saveSelectionButton, 'bottom', 6), (saveSelectionButton, 'right', 10),
#(getSavedSelectionButton, 'bottom', 6)],
attachControl=[ (separator1, 'top', 0, slotDropDown),
(saveToLabel, 'bottom', 9, exportSelectedButton),
(saveToField, 'bottom', 5, exportSelectedButton), (saveToField, 'left', 5, saveToLabel), (saveToField, 'right', 5, fileBrowserButton),
(fileBrowserButton, 'bottom', 5, exportSelectedButton),
(exportSelectedButton, 'bottom', 5, separator2),
(saveSelectionButton, 'bottom', 5, separator2),
(setCosmeticParentbone, 'bottom', 5, separator2),
(RemoveCosmeticParent, 'bottom', 5, separator2),
(saveSelectionButton, 'bottom', 5, setCosmeticParentbone),
(exportSelectedButton, 'bottom', 5, setCosmeticParentbone),
(setCosmeticParentbone, 'bottom', 5, RemoveCosmeticParent),
(getSavedSelectionButton, 'bottom', 5, separator2), (getSavedSelectionButton, 'right', 10, saveSelectionButton),
(getSavedSelectionButton, 'bottom', 5, setCosmeticParentbone),
(separator2, 'bottom', 5, exportMultipleSlotsButton)])
def RefreshXModelWindow():
# Refresh/create node
if len(cmds.ls(OBJECT_NAMES['xmodel'][2])) == 0:
cmds.createNode("renderLayer", name=OBJECT_NAMES['xmodel'][2], skipSelect=True)
cmds.lockNode(OBJECT_NAMES['xmodel'][2], lock=False)
if not cmds.attributeQuery("slot", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="slot", attributeType='short', defaultValue=1)
if not cmds.attributeQuery("paths", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="paths", multi=True, dataType='string')
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".paths", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("selections", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="selections", multi=True, dataType='stringArray')
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".selections", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("useinmultiexport", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="useinmultiexport", multi=True, attributeType='bool', defaultValue=False)
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".useinmultiexport", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("Cosmeticbone", node=OBJECT_NAMES['xmodel'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xmodel'][2], longName="Cosmeticbone", dataType="string")
cmds.lockNode(OBJECT_NAMES['xmodel'][2], lock=True)
# Set values
slotIndex = cmds.optionMenu(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown", query=True, select=True)
path = cmds.getAttr(OBJECT_NAMES['xmodel'][2]+(".paths[%i]" % slotIndex))
cmds.setAttr(OBJECT_NAMES['xmodel'][2]+".slot", slotIndex)
cmds.textField(OBJECT_NAMES['xmodel'][0]+"_SaveToField", edit=True, fileName=path)
useInMultiExport = cmds.getAttr(OBJECT_NAMES['xmodel'][2]+(".useinmultiexport[%i]" % slotIndex))
cmds.checkBox(OBJECT_NAMES['xmodel'][0]+"_UseInMultiExportCheckBox", edit=True, value=useInMultiExport)
def SetCosmeticParent(reqarg):
selection = cmds.ls(selection = True, type = "joint")
if(len(selection) > 1):
MessageBox("Only 1 Cosmetic Parent is allowed.")
return
elif(len(selection) == 0):
MessageBox("No joint selected.")
return
cmds.setAttr(OBJECT_NAMES['xmodel'][2] + ".Cosmeticbone", selection[0], type="string")
MessageBox("\"%s\" has now been set as the cosmetic parent." % str(selection[0]))
def ClearCosmeticParent(reqarg):
cosmetic_bone = cmds.getAttr(OBJECT_NAMES["xmodel"][2]+ ".Cosmeticbone")
if cosmetic_bone is None:
cmds.error("No cosmetic bone set.")
cosmetic_bone = cosmetic_bone.split("|")[-1].split(":")[-1]
if cosmetic_bone != "" or cosmetic_bone is not None:
cmds.setAttr(OBJECT_NAMES['xmodel'][2] + ".Cosmeticbone", "", type="string")
MessageBox("Cosmetic Parent \"%s\" has now been removed." % cosmetic_bone)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------- XAnim Export Window ----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateXAnimWindow():
# Create window
if cmds.control(OBJECT_NAMES['xanim'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['xanim'][0])
cmds.window(OBJECT_NAMES['xanim'][0], title=OBJECT_NAMES['xanim'][1], width=1, height=1, retain=True, maximizeButton=False)
form = cmds.formLayout(OBJECT_NAMES['xanim'][0]+"_Form")
# Controls
slotDropDown = cmds.optionMenu(OBJECT_NAMES['xanim'][0]+"_SlotDropDown", changeCommand="CoDMayaTools.RefreshXAnimWindow()", annotation="Each slot contains different a export path, frame range, notetrack, and saved selection")
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
cmds.menuItem(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown"+("_s%i" % i), label="Slot %i" % i)
separator1 = cmds.separator(style='in')
separator2 = cmds.separator(style='in')
separator3 = cmds.separator(style='in')
framesLabel = cmds.text(label="Frames:", annotation="Range of frames to export")
framesStartField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xanim')", annotation="Starting frame to export (inclusive)")
framesToLabel = cmds.text(label="to")
framesEndField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xanim')", annotation="Ending frame to export (inclusive)")
GrabFrames = cmds.button(label="Grab Frames", width=75, command="CoDMayaTools.SetFrames('xanim')", annotation="Get frame end and start from scene.")
fpsLabel = cmds.text(label="FPS:")
fpsField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_FPSField", height=21, width=35, value=1, minValue=1, changeCommand="CoDMayaTools.UpdateFramerate('xanim')", annotation="Animation FPS")
qualityLabel = cmds.text(label="Quality (0-10)", annotation="Quality of the animation, higher values result in less jitter but produce larger files. Default is 0")
qualityField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_qualityField", height=21, width=35, value=0, minValue=0, maxValue=10, step=1, changeCommand="CoDMayaTools.UpdateMultiplier('xanim')", annotation="Quality of the animation, higher values result in less jitter but produce larger files.")
notetracksLabel = cmds.text(label="Notetrack:", annotation="Notetrack info for the animation")
noteList = cmds.textScrollList(OBJECT_NAMES['xanim'][0]+"_NoteList", allowMultiSelection=False, selectCommand="CoDMayaTools.SelectNote('xanim')", annotation="List of notes in the notetrack")
addNoteButton = cmds.button(label="Add Note", width=75, command="CoDMayaTools.AddNote('xanim')", annotation="Add a note to the notetrack")
ReadNotesButton = cmds.button(label="Grab Notes", width=75, command="CoDMayaTools.ReadNotetracks('xanim')", annotation="Grab Notes from Notetrack in Outliner")
ClearNotes = cmds.button(label="Clear Notes", width=75, command="CoDMayaTools.ClearNotes('xanim')", annotation="Clear ALL notetracks.")
RenameNoteTrack = cmds.button(label="Rename Note", command="CoDMayaTools.RenameNotes('xanim')", annotation="Rename the currently selected note.")
removeNoteButton = cmds.button(label="Remove Note", command="CoDMayaTools.RemoveNote('xanim')", annotation="Remove the currently selected note from the notetrack")
noteFrameLabel = cmds.text(label="Frame:", annotation="The frame the currently selected note is applied to")
noteFrameField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_NoteFrameField", changeCommand="CoDMayaTools.UpdateNoteFrame('xanim')", height=21, width=30, minValue=0, annotation="The frame the currently selected note is applied to")
saveToLabel = cmds.text(label="Save to:", annotation="This is where .xanim_export is saved to")
saveToField = cmds.textField(OBJECT_NAMES['xanim'][0]+"_SaveToField", height=21, changeCommand="CoDMayaTools.GeneralWindow_SaveToField('xanim')", annotation="This is where .xanim_export is saved to")
fileBrowserButton = cmds.button(label="...", height=21, command="CoDMayaTools.GeneralWindow_FileBrowser('xanim', \"XAnim Intermediate File (*.xanim_export)\")", annotation="Open a file browser dialog")
exportSelectedButton = cmds.button(label="Export Selected", command="CoDMayaTools.GeneralWindow_ExportSelected('xanim', False)", annotation="Export all currently selected joints from the scene (specified frames)\nWarning: Will automatically overwrite if the export path if it already exists")
saveSelectionButton = cmds.button(label="Save Selection", command="CoDMayaTools.GeneralWindow_SaveSelection('xanim')", annotation="Save the current object selection")
getSavedSelectionButton = cmds.button(label="Get Saved Selection", command="CoDMayaTools.GeneralWindow_GetSavedSelection('xanim')", annotation="Reselect the saved selection")
exportMultipleSlotsButton = cmds.button(label="Export Multiple Slots", command="CoDMayaTools.GeneralWindow_ExportMultiple('xanim')", annotation="Automatically export multiple slots at once, using each slot's saved selection")
exportInMultiExportCheckbox = cmds.checkBox(OBJECT_NAMES['xanim'][0]+"_UseInMultiExportCheckBox", label="Use current slot for Export Multiple", changeCommand="CoDMayaTools.GeneralWindow_ExportInMultiExport('xanim')", annotation="Check this make the 'Export Multiple Slots' button export this slot")
ReverseAnimation = cmds.checkBox("CoDMAYA_ReverseAnim", label="Export Animation Reversed", annotation="Check this if you want to export the anim. backwards. Usefule for reversing to make opposite sprints, etc.", value=False)
TagAlignExport = cmds.checkBox("CoDMAYA_TAGALIGN", label="Export TAG_ALIGN", annotation="Check this if you want to export TAG_ALIGN with the animation, required for some animations (Not needed for Viewmodel Animations)", value=False)
# Setup form
cmds.formLayout(form, edit=True,
attachForm=[(slotDropDown, 'top', 6), (slotDropDown, 'left', 10), (slotDropDown, 'right', 10),
(separator1, 'left', 0), (separator1, 'right', 0),
(framesLabel, 'left', 10),
(fpsLabel, 'left', 10),
(qualityLabel, 'left', 10),
(notetracksLabel, 'left', 10),
(noteList, 'left', 10),
(ReverseAnimation, 'left', 10),
(TagAlignExport, 'left', 10),
(addNoteButton, 'right', 10),
(ReadNotesButton, 'right', 10),
(RenameNoteTrack, 'right', 10),
(ClearNotes, 'right', 10),
(removeNoteButton, 'right', 10),
(noteFrameField, 'right', 10),
(separator2, 'left', 0), (separator2, 'right', 0),
(saveToLabel, 'left', 12),
(fileBrowserButton, 'right', 10),
(exportMultipleSlotsButton, 'bottom', 6), (exportMultipleSlotsButton, 'left', 10),
(exportInMultiExportCheckbox, 'bottom', 9), (exportInMultiExportCheckbox, 'right', 6),
(exportSelectedButton, 'left', 10),
(saveSelectionButton, 'right', 10),
(separator3, 'left', 0), (separator3, 'right', 0)],
attachControl=[ (separator1, 'top', 6, slotDropDown),
(framesLabel, 'top', 8, separator1),
(framesStartField, 'top', 5, separator1), (framesStartField, 'left', 4, framesLabel),
(framesToLabel, 'top', 8, separator1), (framesToLabel, 'left', 4+35+4, framesLabel),
(framesEndField, 'top', 5, separator1), (framesEndField, 'left', 4, framesToLabel),
(GrabFrames, 'top', 5, separator1), (GrabFrames, 'left', 4, framesEndField),
(fpsLabel, 'top', 8, framesStartField),
(fpsField, 'top', 5, framesStartField), (fpsField, 'left', 21, fpsLabel),
(qualityLabel, 'top', 8, fpsField),
(qualityField, 'top', 5, fpsField), (qualityField, 'left', 21, qualityLabel),
(notetracksLabel, 'top', 5, qualityLabel),
(noteList, 'top', 5, notetracksLabel), (noteList, 'right', 10, removeNoteButton), (noteList, 'bottom', 60, separator2),
(ReverseAnimation, 'top', 10, noteList), (ReverseAnimation, 'right', 10, removeNoteButton),
(TagAlignExport, 'top', 5, ReverseAnimation),
(addNoteButton, 'top', 5, notetracksLabel),
(ReadNotesButton, 'top', 5, addNoteButton),
(RenameNoteTrack, 'top', 5, ReadNotesButton),
(ClearNotes, 'top', 5, RenameNoteTrack),
(removeNoteButton, 'top', 5, ClearNotes),
(noteFrameField, 'top', 5, removeNoteButton),
(noteFrameLabel, 'top', 8, removeNoteButton), (noteFrameLabel, 'right', 4, noteFrameField),
(separator2, 'bottom', 5, fileBrowserButton),
(saveToLabel, 'bottom', 10, exportSelectedButton),
(saveToField, 'bottom', 5, exportSelectedButton), (saveToField, 'left', 5, saveToLabel), (saveToField, 'right', 5, fileBrowserButton),
(fileBrowserButton, 'bottom', 5, exportSelectedButton),
(exportSelectedButton, 'bottom', 5, separator3),
(saveSelectionButton, 'bottom', 5, separator3),
(getSavedSelectionButton, 'bottom', 5, separator3), (getSavedSelectionButton, 'right', 10, saveSelectionButton),
(separator3, 'bottom', 5, exportMultipleSlotsButton)
])
def RefreshXAnimWindow():
# Refresh/create node
if len(cmds.ls(OBJECT_NAMES['xanim'][2])) == 0:
cmds.createNode("renderLayer", name=OBJECT_NAMES['xanim'][2], skipSelect=True)
cmds.lockNode(OBJECT_NAMES['xanim'][2], lock=False)
if not cmds.attributeQuery("slot", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="slot", attributeType='short', defaultValue=1)
if not cmds.attributeQuery("paths", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="paths", multi=True, dataType='string')
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".paths", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("selections", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="selections", multi=True, dataType='stringArray')
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".selections", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("frameRanges", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="frameRanges", multi=True, dataType='long2')
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".frameRanges", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("framerate", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="framerate", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".framerate", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("multiplier", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="multiplier", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".multiplier", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("notetracks", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="notetracks", multi=True, dataType='string') # Formatted as "<name>:<frame>,<name>:<frame>,..."
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".notetracks", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("useinmultiexport", node=OBJECT_NAMES['xanim'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xanim'][2], longName="useinmultiexport", multi=True, attributeType='bool', defaultValue=False)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".useinmultiexport", size=EXPORT_WINDOW_NUMSLOTS)
cmds.lockNode(OBJECT_NAMES['xanim'][2], lock=True)
# Set values
slotIndex = cmds.optionMenu(OBJECT_NAMES['xanim'][0]+"_SlotDropDown", query=True, select=True)
cmds.setAttr(OBJECT_NAMES['xanim'][2]+".slot", slotIndex)
path = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".paths[%i]" % slotIndex))
cmds.textField(OBJECT_NAMES['xanim'][0]+"_SaveToField", edit=True, fileName=path)
frameRange = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".frameRanges[%i]" % slotIndex))
if frameRange == None:
cmds.setAttr(OBJECT_NAMES['xanim'][2]+(".frameRanges[%i]" % slotIndex), 0, 0, type='long2')
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", edit=True, value=0)
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", edit=True, value=0)
else:
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameStartField", edit=True, value=frameRange[0][0])
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FrameEndField", edit=True, value=frameRange[0][1])
framerate = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".framerate[%i]" % slotIndex))
cmds.intField(OBJECT_NAMES['xanim'][0]+"_FPSField", edit=True, value=framerate)
noteFrameField = cmds.intField(OBJECT_NAMES['xanim'][0]+"_NoteFrameField", edit=True, value=0)
cmds.textScrollList(OBJECT_NAMES['xanim'][0]+"_NoteList", edit=True, removeAll=True)
noteList = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
for note in notes:
parts = note.split(":")
if note.strip() == "" or len(parts) == 0:
continue
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#")
if name == "":
continue
cmds.textScrollList(OBJECT_NAMES['xanim'][0]+"_NoteList", edit=True, append=name)
useInMultiExport = cmds.getAttr(OBJECT_NAMES['xanim'][2]+(".useinmultiexport[%i]" % slotIndex))
cmds.checkBox(OBJECT_NAMES['xanim'][0]+"_UseInMultiExportCheckBox", edit=True, value=useInMultiExport)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------- XCam Export Window -----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def CreateXCamWindow():
# Create window
if cmds.control(OBJECT_NAMES['xcam'][0], exists=True):
cmds.deleteUI(OBJECT_NAMES['xcam'][0])
cmds.window(OBJECT_NAMES['xcam'][0], title=OBJECT_NAMES['xcam'][1], width=1, height=1, retain=True, maximizeButton=False)
form = cmds.formLayout(OBJECT_NAMES['xcam'][0]+"_Form")
# Controls
slotDropDown = cmds.optionMenu(OBJECT_NAMES['xcam'][0]+"_SlotDropDown", changeCommand="CoDMayaTools.RefreshXCamWindow()", annotation="Each slot contains different a export path, frame range, notetrack, and saved selection")
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
cmds.menuItem(OBJECT_NAMES['xmodel'][0]+"_SlotDropDown"+("_s%i" % i), label="Slot %i" % i)
separator1 = cmds.separator(style='in')
separator2 = cmds.separator(style='in')
separator3 = cmds.separator(style='in')
framesLabel = cmds.text(label="Frames:", annotation="Range of frames to export")
framesStartField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xcam')", annotation="Starting frame to export (inclusive)")
framesToLabel = cmds.text(label="to")
framesEndField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", height=21, width=35, minValue=0, changeCommand="CoDMayaTools.UpdateFrameRange('xcam')", annotation="Ending frame to export (inclusive)")
fpsLabel = cmds.text(label="FPS:")
fpsField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_FPSField", height=21, width=35, value=1, minValue=1, changeCommand="CoDMayaTools.UpdateFramerate('xcam')", annotation="Animation FPS")
#qualityLabel = cmds.text(label="Quality (0-10)", annotation="Quality of the animation, higher values result in less jitter but produce larger files. Default is 0")
#qualityField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_qualityField", height=21, width=35, value=0, minValue=0, maxValue=10, step=1, changeCommand=XCamWindow_UpdateMultiplier, annotation="Quality of the animation, higher values result in less jitter but produce larger files.")
notetracksLabel = cmds.text(label="Notetrack:", annotation="Notetrack info for the animation")
noteList = cmds.textScrollList(OBJECT_NAMES['xcam'][0]+"_NoteList", allowMultiSelection=False, selectCommand="CoDMayaTools.SelectNote('xcam')", annotation="List of notes in the notetrack")
addNoteButton = cmds.button(label="Add Note", width=75, command="CoDMayaTools.AddNote('xcam')", annotation="Add a note to the notetrack")
ReadNotesButton = cmds.button(label="Grab Notes", width=75, command="CoDMayaTools.ReadNotetracks('xcam')", annotation="Grab Notes from Notetrack in Outliner")
RenameNoteTrack = cmds.button(label="Rename Note", command="CoDMayaTools.RenameNotes('xcam')", annotation="Rename the currently selected note.")
removeNoteButton = cmds.button(label="Remove Note", command="CoDMayaTools.RemoveNote('xcam')", annotation="Remove the currently selected note from the notetrack")
noteFrameLabel = cmds.text(label="Frame:", annotation="The frame the currently selected note is applied to")
noteFrameField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_NoteFrameField", changeCommand="CoDMayaTools.UpdateNoteFrame('xcam')", height=21, width=30, minValue=0, annotation="The frame the currently selected note is applied to")
GrabFrames = cmds.button(label="Grab Frames", width=75, command="CoDMayaTools.SetFrames('xcam')", annotation="Get frame end and start from scene.")
ClearNotes = cmds.button(label="Clear Notes", width=75, command="CoDMayaTools.ClearNotes('xcam')", annotation="Clear ALL notetracks.")
saveToLabel = cmds.text(label="Save to:", annotation="This is where .xcam_export is saved to")
saveToField = cmds.textField(OBJECT_NAMES['xcam'][0]+"_SaveToField", height=21, changeCommand="CoDMayaTools.GeneralWindow_SaveToField('xcam')", annotation="This is where .xcam_export is saved to")
fileBrowserButton = cmds.button(label="...", height=21, command="CoDMayaTools.GeneralWindow_FileBrowser('xcam', \"XCam Intermediate File (*.xcam_export)\")", annotation="Open a file browser dialog")
exportSelectedButton = cmds.button(label="Export Selected", command="CoDMayaTools.GeneralWindow_ExportSelected('xcam', False)", annotation="Export all currently selected joints from the scene (specified frames)\nWarning: Will automatically overwrite if the export path if it already exists")
saveSelectionButton = cmds.button(label="Save Selection", command="CoDMayaTools.GeneralWindow_SaveSelection('xcam')", annotation="Save the current object selection")
getSavedSelectionButton = cmds.button(label="Get Saved Selection", command="CoDMayaTools.GeneralWindow_GetSavedSelection('xcam')", annotation="Reselect the saved selection")
exportMultipleSlotsButton = cmds.button(label="Export Multiple Slots", command="CoDMayaTools.GeneralWindow_ExportMultiple('xcam')", annotation="Automatically export multiple slots at once, using each slot's saved selection")
exportInMultiExportCheckbox = cmds.checkBox(OBJECT_NAMES['xcam'][0]+"_UseInMultiExportCheckBox", label="Use current slot for Export Multiple", changeCommand="CoDMayaTools.GeneralWindow_ExportInMultiExport('xcam')", annotation="Check this make the 'Export Multiple Slots' button export this slot")
#ReverseAnimation = cmds.checkBox("CoDMAYA_ReverseAnim", label="Export Animation Reversed", annotation="Check this if you want to export the anim. backwards. Usefule for reversing to make opposite sprints, etc.", value=False)
# Setup form
cmds.formLayout(form, edit=True,
attachForm=[(slotDropDown, 'top', 6), (slotDropDown, 'left', 10), (slotDropDown, 'right', 10),
(separator1, 'left', 0), (separator1, 'right', 0),
(framesLabel, 'left', 10),
(fpsLabel, 'left', 10),
#(qualityLabel, 'left', 10),
(notetracksLabel, 'left', 10),
(noteList, 'left', 10),
#(ReverseAnimation, 'left', 10),
(addNoteButton, 'right', 10),
(ReadNotesButton, 'right', 10),
(RenameNoteTrack, 'right', 10),
(ClearNotes, 'right', 10),
(removeNoteButton, 'right', 10),
(noteFrameField, 'right', 10),
(separator2, 'left', 0), (separator2, 'right', 0),
(saveToLabel, 'left', 12),
(fileBrowserButton, 'right', 10),
(exportMultipleSlotsButton, 'bottom', 6), (exportMultipleSlotsButton, 'left', 10),
(exportInMultiExportCheckbox, 'bottom', 9), (exportInMultiExportCheckbox, 'right', 6),
(exportSelectedButton, 'left', 10),
(saveSelectionButton, 'right', 10),
(separator3, 'left', 0), (separator3, 'right', 0)],
attachControl=[ (separator1, 'top', 6, slotDropDown),
(framesLabel, 'top', 8, separator1),
(framesStartField, 'top', 5, separator1), (framesStartField, 'left', 4, framesLabel),
(framesToLabel, 'top', 8, separator1), (framesToLabel, 'left', 4+35+4, framesLabel),
(framesEndField, 'top', 5, separator1), (framesEndField, 'left', 4, framesToLabel),
(GrabFrames, 'top', 5, separator1), (GrabFrames, 'left', 4, framesEndField),
(fpsLabel, 'top', 8, framesStartField),
(fpsField, 'top', 5, framesStartField), (fpsField, 'left', 21, fpsLabel),
#(qualityLabel, 'top', 8, fpsField),
#(qualityField, 'top', 5, fpsField), (qualityField, 'left', 21, qualityLabel),
(notetracksLabel, 'top', 5, fpsField),
(noteList, 'top', 5, notetracksLabel), (noteList, 'right', 10, removeNoteButton), (noteList, 'bottom', 60, separator2),
#(ReverseAnimation, 'top', 10, noteList), (ReverseAnimation, 'right', 10, removeNoteButton),
(addNoteButton, 'top', 5, notetracksLabel),
(ReadNotesButton, 'top', 5, addNoteButton),
(RenameNoteTrack, 'top', 5, ReadNotesButton),
(ClearNotes, 'top', 5, RenameNoteTrack),
(removeNoteButton, 'top', 5, ClearNotes),
(noteFrameField, 'top', 5, removeNoteButton),
(noteFrameLabel, 'top', 8, removeNoteButton), (noteFrameLabel, 'right', 4, noteFrameField),
(separator2, 'bottom', 5, fileBrowserButton),
(saveToLabel, 'bottom', 10, exportSelectedButton),
(saveToField, 'bottom', 5, exportSelectedButton), (saveToField, 'left', 5, saveToLabel), (saveToField, 'right', 5, fileBrowserButton),
(fileBrowserButton, 'bottom', 5, exportSelectedButton),
(exportSelectedButton, 'bottom', 5, separator3),
(saveSelectionButton, 'bottom', 5, separator3),
(getSavedSelectionButton, 'bottom', 5, separator3), (getSavedSelectionButton, 'right', 10, saveSelectionButton),
(separator3, 'bottom', 5, exportMultipleSlotsButton)
])
def RefreshXCamWindow():
# Refresh/create node
if len(cmds.ls(OBJECT_NAMES['xcam'][2])) == 0:
cmds.createNode("renderLayer", name=OBJECT_NAMES['xcam'][2], skipSelect=True)
cmds.lockNode(OBJECT_NAMES['xcam'][2], lock=False)
if not cmds.attributeQuery("slot", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="slot", attributeType='short', defaultValue=1)
if not cmds.attributeQuery("paths", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="paths", multi=True, dataType='string')
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".paths", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("selections", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="selections", multi=True, dataType='stringArray')
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".selections", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("frameRanges", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="frameRanges", multi=True, dataType='long2')
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".frameRanges", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("framerate", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="framerate", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".framerate", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("multiplier", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="multiplier", multi=True, attributeType='long', defaultValue=30)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".multiplier", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("notetracks", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="notetracks", multi=True, dataType='string') # Formatted as "<name>:<frame>,<name>:<frame>,..."
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".notetracks", size=EXPORT_WINDOW_NUMSLOTS)
if not cmds.attributeQuery("useinmultiexport", node=OBJECT_NAMES['xcam'][2], exists=True):
cmds.addAttr(OBJECT_NAMES['xcam'][2], longName="useinmultiexport", multi=True, attributeType='bool', defaultValue=False)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".useinmultiexport", size=EXPORT_WINDOW_NUMSLOTS)
cmds.lockNode(OBJECT_NAMES['xcam'][2], lock=True)
# Set values
slotIndex = cmds.optionMenu(OBJECT_NAMES['xcam'][0]+"_SlotDropDown", query=True, select=True)
cmds.setAttr(OBJECT_NAMES['xcam'][2]+".slot", slotIndex)
path = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".paths[%i]" % slotIndex))
cmds.textField(OBJECT_NAMES['xcam'][0]+"_SaveToField", edit=True, fileName=path)
frameRange = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".frameRanges[%i]" % slotIndex))
if frameRange == None:
cmds.setAttr(OBJECT_NAMES['xcam'][2]+(".frameRanges[%i]" % slotIndex), 0, 0, type='long2')
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", edit=True, value=0)
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", edit=True, value=0)
else:
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameStartField", edit=True, value=frameRange[0][0])
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FrameEndField", edit=True, value=frameRange[0][1])
framerate = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".framerate[%i]" % slotIndex))
cmds.intField(OBJECT_NAMES['xcam'][0]+"_FPSField", edit=True, value=framerate)
noteFrameField = cmds.intField(OBJECT_NAMES['xcam'][0]+"_NoteFrameField", edit=True, value=0)
cmds.textScrollList(OBJECT_NAMES['xcam'][0]+"_NoteList", edit=True, removeAll=True)
noteList = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
for note in notes:
parts = note.split(":")
if note.strip() == "" or len(parts) == 0:
continue
name = "".join([c for c in parts[0] if c.isalnum() or c=="_"])
if name == "":
continue
cmds.textScrollList(OBJECT_NAMES['xcam'][0]+"_NoteList", edit=True, append=name)
useInMultiExport = cmds.getAttr(OBJECT_NAMES['xcam'][2]+(".useinmultiexport[%i]" % slotIndex))
cmds.checkBox(OBJECT_NAMES['xcam'][0]+"_UseInMultiExportCheckBox", edit=True, value=useInMultiExport)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------- xAnim/xCam Export Data --------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def SetFrames(windowID):
"""
Querys start and end frames and set thems for the window given by windowID
"""
start = cmds.playbackOptions(minTime=True, query=True)
end = cmds.playbackOptions(maxTime=True, query=True) # Query start and end froms.
cmds.intField(OBJECT_NAMES[windowID][0] + "_FrameStartField", edit=True, value=start)
cmds.intField(OBJECT_NAMES[windowID][0] + "_FrameEndField", edit=True, value=end)
UpdateFrameRange(windowID)
def UpdateFrameRange(windowID):
"""
Updates start and end frame when set by user or by other means.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
start = cmds.intField(OBJECT_NAMES[windowID][0]+"_FrameStartField", query=True, value=True)
end = cmds.intField(OBJECT_NAMES[windowID][0]+"_FrameEndField", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".frameRanges[%i]" % slotIndex), start, end, type='long2')
def UpdateFramerate(windowID):
"""
Updates framerate when set by user or by other means.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
fps = cmds.intField(OBJECT_NAMES[windowID][0]+"_FPSField", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".framerate[%i]" % slotIndex), fps)
def UpdateMultiplier(windowID):
"""
Updates multiplier when set by user or by other means.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
fps = cmds.intField(OBJECT_NAMES[windowID][0]+"_qualityField", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".multiplier[%i]" % slotIndex), fps)
def AddNote(windowID):
"""
Add notetrack to window and attribute when user creates one.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
if cmds.promptDialog(title="Add Note to Slot %i's Notetrack" % slotIndex, message="Enter the note's name:\t\t ") != "Confirm":
return
userInput = cmds.promptDialog(query=True, text=True)
noteName = "".join([c for c in userInput if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#") # Remove all non-alphanumeric characters
if noteName == "":
MessageBox("Invalid note name")
return
existingItems = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, allItems=True)
if existingItems != None and noteName in existingItems:
MessageBox("A note with this name already exists")
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
noteList += "%s:%i," % (noteName, cmds.currentTime(query=True))
cmds.setAttr(OBJECT_NAMES['xanim'][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, append=noteName, selectIndexedItem=len((existingItems or []))+1)
SelectNote(windowID)
def __get_notetracks__():
"""Loads all the notetracks in the scene"""
if not cmds.objExists("SENotes"):
cmds.rename(cmds.spaceLocator(), "SENotes")
if not cmds.objExists("SENotes.Notetracks"):
cmds.addAttr("SENotes", longName="Notetracks",
dataType="string", storable=True)
cmds.setAttr("SENotes.Notetracks", "{}", type="string")
# Load the existing notetracks buffer, then ensure we have this notetrack
return json.loads(cmds.getAttr("SENotes.Notetracks"))
def ReadNotetracks(windowID):
"""
Read notetracks from imported animations.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
existingItems = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, allItems=True)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
# Get Notetracks
notetracks = __get_notetracks__()
# Add notetrack type prefix automatically
write_note_type = QueryToggableOption('PrefixNoteType')
for note, frames in notetracks.iteritems():
# Ignore end/loop_end
if note == "end" or note == "loop_end":
continue
# Check if we want to write notetype
# and if note is not already prefixed.
if(write_note_type and not "nt#" in note):
# Set Sound Note as Standard
note_type = "sndnt"
# Split notetrack's name
notesplit = note.split("_")
# Check is this a rumble (first word will be viewmodel/reload)
if(notesplit[0] == "viewmodel" or notesplit[0] == "reload"):
note_type = "rmbnt"
note = note.replace("viewmodel", "reload")
# Append
note = "#".join((note_type, note))
# Loop through note frames
for frame in frames:
# Append to list and scroll list
noteList += "%s:%i," % (note, frame)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, append=note, selectIndexedItem=len((existingItems or []))+1)
# Set selected note
SelectNote(windowID)
def RenameNotes(windowID):
"""
Rename selected notetrack.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
if cmds.promptDialog(title="Rename NoteTrack in slot", message="Enter new notetrack name:\t\t ") != "Confirm":
return
userInput = cmds.promptDialog(query=True, text=True)
noteName = "".join([c for c in userInput if c.isalnum() or c=="_"]).replace("sndnt", "sndnt#").replace("rmbnt", "rmbnt#") # Remove all non-alphanumeric characters
if noteName == "":
MessageBox("Invalid note name")
return
currentIndex = currentIndex[0]
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
noteInfo = notes[currentIndex-1].split(":")
note = int(noteInfo[1])
NoteTrack = userInput
# REMOVE NOTE
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, removeIndexedItem=currentIndex)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
del notes[currentIndex-1]
noteList = ",".join(notes)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
# REMOVE NOTE
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
noteList += "%s:%i," % (NoteTrack, note) # Add Notes to Aidan's list.
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, append=NoteTrack, selectIndexedItem=currentIndex)
SelectNote(windowID)
def RemoveNote(windowID):
"""
Remove Note
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
currentIndex = currentIndex[0]
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, removeIndexedItem=currentIndex)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
del notes[currentIndex-1]
noteList = ",".join(notes)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
SelectNote(windowID)
def ClearNotes(windowID):
"""
Clear ALL notetracks.
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
notes = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, allItems=True)
if notes is None:
return
for note in notes:
cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", edit=True, removeItem=note)
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notetracks = noteList.split(",")
del notetracks
noteList = ""
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
SelectNote(windowID)
def UpdateNoteFrame(windowID):
"""
Update notetrack information.
"""
newFrame = cmds.intField(OBJECT_NAMES[windowID][0] + "_NoteFrameField", query = True, value = True)
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
currentIndex = currentIndex[0]
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
parts = notes[currentIndex-1].split(":")
if len(parts) < 2:
parts("Error parsing notetrack string (A) at %i: %s" % (currentIndex, noteList))
notes[currentIndex-1] = "%s:%i" % (parts[0], newFrame)
noteList = ",".join(notes)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex), noteList, type='string')
def SelectNote(windowID):
"""
Select notetrack
"""
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
currentIndex = cmds.textScrollList(OBJECT_NAMES[windowID][0]+"_NoteList", query=True, selectIndexedItem=True)
if currentIndex != None and len(currentIndex) > 0 and currentIndex[0] >= 1:
currentIndex = currentIndex[0]
noteList = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".notetracks[%i]" % slotIndex)) or ""
notes = noteList.split(",")
parts = notes[currentIndex-1].split(":")
if len(parts) < 2:
error("Error parsing notetrack string (B) at %i: %s" % (currentIndex, noteList))
frame=0
try:
frame = int(parts[1])
except ValueError:
pass
noteFrameField = cmds.intField(OBJECT_NAMES[windowID][0]+"_NoteFrameField", edit=True, value=frame)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------- General Export Window ---------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# GeneralWindow_... are callback functions that are used by both export windows
def GeneralWindow_SaveToField(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
filePath = cmds.textField(OBJECT_NAMES[windowID][0]+"_SaveToField", query=True, fileName=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".paths[%i]" % slotIndex), filePath, type='string')
def GeneralWindow_FileBrowser(windowID, formatExtension="*"):
current_game = GetCurrentGame()
defaultFolder = GetRootFolder(None, current_game)
if windowID == 'xanim':
defaultFolder = defaultFolder + 'xanim_export/'
# Switch these around depending on title user has selected.
# and whether we're using ExportX
formatExtension = (
"XAnim Binary File (.xanim_bin) (*.xanim_bin);;"
"XAnim ASCII File (.xanim_export) (*.xanim_export)"
if GetCurrentGame() == "CoD12" and not USE_EXPORT_X else
"XAnim ASCII File (.xanim_export) (*.xanim_export);;"
"XAnim Binary File (.xanim_bin) (*.xanim_bin)")
elif windowID == 'xcam':
defaultFolder = defaultFolder + 'xanim_export/'
elif windowID == 'xmodel':
defaultFolder = defaultFolder + 'model_export/'
# Switch these around depending on title user has selected.
# and whether we're using ExportX
formatExtension = (
"XModel Binary File (.xmodel_bin) (*.xmodel_bin);;"
"XModel ASCII File (.xmodel_export) (*.xmodel_export)"
if GetCurrentGame() == "CoD12" and not USE_EXPORT_X else
"XModel ASCII File (.xmodel_export) (*.xmodel_export);;"
"XModel Binary File (.xmodel_bin) (*.xmodel_bin)")
saveTo = cmds.fileDialog2(fileMode=0, fileFilter=formatExtension, caption="Export To", startingDirectory=defaultFolder)
if saveTo == None or len(saveTo) == 0 or saveTo[0].strip() == "":
return
saveTo = saveTo[0].strip()
cmds.textField(OBJECT_NAMES[windowID][0]+"_SaveToField", edit=True, fileName=saveTo)
GeneralWindow_SaveToField(windowID)
def GeneralWindow_SaveSelection(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
selection = cmds.ls(selection=True)
if selection == None or len(selection) == 0:
return
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".selections[%i]" % slotIndex), len(selection), *selection, type='stringArray')
def GeneralWindow_GetSavedSelection(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
selection = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".selections[%i]" % slotIndex))
validSelection = []
for obj in selection:
if cmds.objExists(obj):
validSelection.append(obj)
# Remove non-existing objects from the saved list
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".selections[%i]" % slotIndex), len(validSelection), *validSelection, type='stringArray')
if validSelection == None or len(validSelection) == 0:
MessageBox("No selection saved to slot %i" % slotIndex)
return False
cmds.select(validSelection)
return True
def GeneralWindow_ExportSelected(windowID, exportingMultiple):
global WarningsDuringExport
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
# Get path
filePath = cmds.textField(OBJECT_NAMES[windowID][0]+"_SaveToField", query=True, fileName=True)
if filePath.strip() == "":
if exportingMultiple:
MessageBox("Invalid path on slot %i:\n\nPath is empty." % slotIndex)
else:
MessageBox("Invalid path:\n\nPath is empty.")
return
if os.path.isdir(filePath):
if exportingMultiple:
MessageBox("Invalid path on slot %i:\n\nPath points to an existing directory." % slotIndex)
else:
MessageBox("Invalid path:\n\nPath points to an existing directory.")
return
# Save reminder
if not exportingMultiple and not SaveReminder():
return
# Progress bar
if cmds.control("w"+OBJECT_NAMES['progress'][0], exists=True):
cmds.deleteUI("w"+OBJECT_NAMES['progress'][0])
progressWindow = cmds.window("w"+OBJECT_NAMES['progress'][0], title=OBJECT_NAMES['progress'][1], width=302, height=22, sizeable=False)
cmds.columnLayout()
progressControl = cmds.progressBar(OBJECT_NAMES['progress'][0], width=300)
if QueryToggableOption("PrintExport") and windowID == "xmodel":
cmds.scrollField("ExportLog", editable=False, wordWrap=False, width = 300)
cmds.showWindow(progressWindow)
cmds.refresh() # Force the progress bar to be drawn
# Export
if not exportingMultiple:
WarningsDuringExport = 0
response = None
try:
exec("response = %s(\"%s\")" % (OBJECT_NAMES[windowID][4], filePath))
except Exception as e:
response = "An unhandled error occurred during export:\n\n" + traceback.format_exc()
cmds.deleteUI(progressWindow, window=True)
# Handle response
if type(response) == str or type(response) == unicode:
if exportingMultiple:
MessageBox("Slot %i\n\n%s" % (slotIndex, response))
else:
MessageBox(response)
elif WarningsDuringExport > 0 and not exportingMultiple:
MessageBox("Warnings occurred during export. Check the script editor output for more details.")
def GeneralWindow_ExportMultiple(windowID):
originalSlotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
any = False
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
useInMultiExport = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".useinmultiexport[%i]" % i))
if useInMultiExport:
any = True
break
if not any:
MessageBox("No slots set to export.")
return
if not SaveReminder():
return
WarningsDuringExport = 0
originalSelection = cmds.ls(selection=True)
for i in range(1, EXPORT_WINDOW_NUMSLOTS+1):
useInMultiExport = cmds.getAttr(OBJECT_NAMES[windowID][2]+(".useinmultiexport[%i]" % i))
if useInMultiExport:
print "Exporting slot %i in multiexport" % i
cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", edit=True, select=i)
exec(OBJECT_NAMES[windowID][3] + "()") # Refresh window
if GeneralWindow_GetSavedSelection(windowID):
GeneralWindow_ExportSelected(windowID, True)
if originalSelection == None or len(originalSelection) == 0:
cmds.select(clear=True)
else:
cmds.select(originalSelection)
if WarningsDuringExport > 0:
MessageBox("Warnings occurred during export. Check the script editor output for more details.")
# Reset slot
cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", edit=True, select=originalSlotIndex)
exec(OBJECT_NAMES[windowID][3] + "()") # Refresh window
def GeneralWindow_ExportInMultiExport(windowID):
slotIndex = cmds.optionMenu(OBJECT_NAMES[windowID][0]+"_SlotDropDown", query=True, select=True)
useInMultiExport = cmds.checkBox(OBJECT_NAMES[windowID][0]+"_UseInMultiExportCheckBox", query=True, value=True)
cmds.setAttr(OBJECT_NAMES[windowID][2]+(".useinmultiexport[%i]" % slotIndex), useInMultiExport)
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# --------------------------------------------------------------------------- General GUI --------------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def SaveReminder(allowUnsaved=True):
if cmds.file(query=True, modified=True):
if cmds.file(query=True, exists=True):
result = cmds.confirmDialog(message="Save changes to %s?" % cmds.file(query=True, sceneName=True), button=["Yes", "No", "Cancel"], defaultButton="Yes", title="Save Changes")
if result == "Yes":
cmds.file(save=True)
elif result != "No":
return False
else: # The file has never been saved (has no name)
if allowUnsaved:
result = cmds.confirmDialog(message="The current scene is not saved. Continue?", button=["Yes", "No"], defaultButton="Yes", title="Save Changes")
if result != "Yes":
return False
else:
MessageBox("The scene needs to be saved first")
return False
return True
def PrintWarning(message):
global WarningsDuringExport
if WarningsDuringExport < MAX_WARNINGS_SHOWN:
print "WARNING: %s" % message
WarningsDuringExport += 1
elif WarningsDuringExport == MAX_WARNINGS_SHOWN:
print "More warnings not shown because printing text is slow...\n"
WarningsDuringExport = MAX_WARNINGS_SHOWN+1
def MessageBox(message):
cmds.confirmDialog(message=message, button='OK', defaultButton='OK', title=OBJECT_NAMES['menu'][1])
def ShowWindow(windowID):
exec(OBJECT_NAMES[windowID][3] + "()") # Refresh window
cmds.showWindow(OBJECT_NAMES[windowID][0])
def ProgressBarStep():
cmds.progressBar(OBJECT_NAMES['progress'][0], edit=True, step=1)
def LogExport(text, isWarning = False):
if QueryToggableOption("PrintExport"):
if isWarning:
global WarningsDuringExport
if WarningsDuringExport < MAX_WARNINGS_SHOWN:
cmds.scrollField("ExportLog", edit = True, insertText = text)
WarningsDuringExport += 1
elif WarningsDuringExport == MAX_WARNINGS_SHOWN:
cmds.scrollField("ExportLog", edit = True, insertText = "More warnings not shown because printing text is slow...\n")
WarningsDuringExport = MAX_WARNINGS_SHOWN+1
else:
cmds.scrollField("ExportLog", edit = True, insertText = text)
def AboutWindow():
result = cmds.confirmDialog(message="Call of Duty Tools for Maya, created by Aidan Shafran (with assistance from The Internet).\nMaintained by Ray1235 (Maciej Zaremba) & Scobalula\n\nThis script is under the GNU General Public License. You may modify or redistribute this script, however it comes with no warranty. Go to http://www.gnu.org/licenses/ for more details.\n\nVersion: %.2f" % FILE_VERSION, button=['OK', 'Visit Github Repo', 'CoD File Formats'], defaultButton='OK', title="About " + OBJECT_NAMES['menu'][1])
if result == "Visit Github Repo":
webbrowser.open("https://github.com/Ray1235/CoDMayaTools")
elif result == "CoD File Formats":
webbrowser.open("http://aidanshafran.com/codmayatools/codformats.html")
def LegacyWindow():
result = cmds.confirmDialog(message="""CoD1 mode exports models that are compatible with CoD1.
When this mode is disabled, the plugin will export models that are compatible with CoD2 and newer.
""", button=['OK'], defaultButton='OK', title="Legacy options")
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
# ----------------------------------------------------------------------- Get/Set Root Folder ----------------------------------------------------------------------
# ------------------------------------------------------------------------------------------------------------------------------------------------------------------
def SetRootFolder(msg=None, game="none"):
#if game == "none":
# game = currentGame
#if game == "none":
# res = cmds.confirmDialog(message="Please select the game you're working with", button=['OK'], defaultButton='OK', title="WARNING")
# return None
# Get current root folder (this also makes sure the reg key exists)
codRootPath = GetRootFolder(False, game)
# Open input box
#if cmds.promptDialog(title="Set Root Path", message=msg or "Change your root path:\t\t\t", text=codRootPath) != "Confirm":
# return None
codRootPath = cmds.fileDialog2(fileMode=3, dialogStyle=2)[0] + "/"
# Check to make sure the path exists
if not os.path.isdir(codRootPath):
MessageBox("Given root path does not exist")
return None
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_SET_VALUE)
reg.SetValueEx(storageKey, "%sRootPath" % game, 0, reg.REG_SZ, codRootPath)
reg.CloseKey(storageKey)
return codRootPath
def GetRootFolder(firstTimePrompt=False, game="none"):
codRootPath = ""
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
codRootPath = reg.QueryValueEx(storageKey, "%sRootPath" % game)[0]
reg.CloseKey(storageKey)
except WindowsError:
print(traceback.format_exc())
# First time, create key
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
reg.SetValueEx(storageKey, "RootPath", 0, reg.REG_SZ, "")
reg.CloseKey(storageKey)
if not os.path.isdir(codRootPath):
codRootPath = ""
# First-time prompt
if firstTimePrompt:
result = SetRootFolder("Your root folder path hasn't been confirmed yet. If the following is not\ncorrect, please fix it:", game)
if result:
codRootPath = result
return codRootPath
def RunExport2Bin(file):
p = GetExport2Bin()
directory = os.path.dirname(os.path.realpath(file))
if os.path.splitext(os.path.basename(p))[0] == "export2bin":
p = subprocess.Popen([p, "*"], cwd=directory)
elif os.path.splitext(os.path.basename(p))[0] == "exportx":
p = subprocess.Popen([p, "-f %s" % file])
p.wait()
if(QueryToggableOption('DeleteExport')):
os.remove(file)
def SetExport2Bin():
export2binpath = cmds.fileDialog2(fileMode=1, dialogStyle=2)[0]
# Check to make sure the path exists
if not os.path.isfile(export2binpath):
MessageBox("Given path does not exist")
return ""
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_SET_VALUE)
reg.SetValueEx(storageKey, "Export2BinPath", 0, reg.REG_SZ, export2binpath)
reg.CloseKey(storageKey)
return export2binpath
def GetExport2Bin(skipSet=True):
export2binpath = ""
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
export2binpath = reg.QueryValueEx(storageKey, "Export2BinPath")[0]
reg.CloseKey(storageKey)
except WindowsError:
# First time, create key
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
reg.SetValueEx(storageKey, "Export2BinPath", 0, reg.REG_SZ, "")
reg.CloseKey(storageKey)
if not os.path.isfile(export2binpath):
export2binpath = ""
if not skipSet:
result = SetExport2Bin()
if result:
export2binpath = result
return export2binpath
def CheckForUpdatesEXE():
# Check if we want updates
if QueryToggableOption("AutoUpdate"):
# Try run application
try:
p = ("%s -name %s -version %f -version_info_url %s"
% (os.path.join(WORKING_DIR, "autoUpdate.exe"),
"CoDMayaTools.py",
FILE_VERSION,
VERSION_CHECK_URL))
subprocess.Popen("%s %f" % (os.path.join(WORKING_DIR, "Updater.exe"), FILE_VERSION))
except:
# Failed, exit.
return
else:
return
#def SetGame(name):
# currentGame = name
##########################################################
# Ray's Animation Toolkit #
# #
# Credits: #
# Aidan - teaching me how to make plugins like this :) #
##########################################################
def GenerateCamAnim(reqarg=""):
useDefMesh = False
if (cmds.objExists(getObjectByAlias("camera")) == False):
print "Camera doesn't exist"
return
if (cmds.objExists(getObjectByAlias("weapon")) == False):
print "Weapon doesn't exist"
return
animStart = cmds.playbackOptions(query=True, minTime=True)
animEnd = cmds.playbackOptions(query=True, maxTime=True)
jointGun = cmds.xform(getObjectByAlias("weapon"), query=True, rotation=True)
jointGunPos = cmds.xform(getObjectByAlias("weapon"), query=True, translation=True)
GunMoveXorig = jointGunPos[0]*-0.025
GunRotYAddorig = jointGunPos[0]*-0.5
GunRotXAddorig = jointGunPos[1]*-0.25
progressW = cmds.progressWindow(minValue=animStart,maxValue=animEnd)
for i in range(int(animStart),int(animEnd+1)):
cmds.currentTime(i)
jointGun = cmds.xform(getObjectByAlias("weapon"), query=True, rotation=True)
jointGunPos = cmds.xform(getObjectByAlias("weapon"), query=True, translation=True)
GunMoveX = jointGunPos[0]*-0.025
GunRotYAdd = jointGunPos[0]*-0.5
GunRotXAdd = jointGunPos[1]*-0.25
GunRot = jointGun
GunRot[0] = jointGun[0]
GunRot[0] = GunRot[0] * 0.025
GunRot[1] = jointGun[1]
GunRot[1] = GunRot[1] * 0.025
GunRot[2] = jointGun[2]
GunRot[2] = GunRot[2] * 0.025
print GunRot
print jointGun
cmds.select(getObjectByAlias("camera"), replace=True)
# cmds.rotate(GunRot[0], GunRot[1], GunRot[2], rotateXYZ=True)
cmds.setKeyframe(v=(GunMoveX-GunMoveXorig),at='translateX')
cmds.setKeyframe(v=GunRot[0]+(GunRotXAdd-GunRotXAddorig),at='rotateX')
cmds.setKeyframe(v=(GunRot[1]+(GunRotYAdd-GunRotYAddorig)),at='rotateY')
cmds.setKeyframe(v=GunRot[2],at='rotateZ')
cmds.progressWindow(edit=True,step=1)
cmds.progressWindow(edit=True,endProgress=True)
def RemoveCameraKeys(reqarg=""):
if (cmds.objExists(getObjectByAlias("camera")) == False):
print "ERROR: Camera doesn't exist"
return
else:
print "Camera exists!"
jointCamera = cmds.joint(getObjectByAlias("camera"), query=True)
animStart = cmds.playbackOptions(query=True, minTime=True)
animEnd = cmds.playbackOptions(query=True, maxTime=True)
cmds.select(getObjectByAlias("camera"), replace=True)
#cmds.setAttr('tag_camera.translateX',0)
#cmds.setAttr('tag_camera.translateY',0)
#cmds.setAttr('tag_camera.translateZ',0)
#cmds.setAttr('tag_camera.rotateX',0)
#cmds.setAttr('tag_camera.rotateY',0)
#cmds.setAttr('tag_camera.rotateZ',0)
# cmds.rotate(GunRot[0], GunRot[1], GunRot[2], rotateXYZ=True)
cmds.cutKey(clear=True,time=(animStart,animEnd+1))
def RemoveCameraAnimData(reqarg=""):
if (cmds.objExists(getObjectByAlias("camera")) == False):
print "ERROR: Camera doesn't exist"
return
else:
print "Camera exists!"
jointCamera = cmds.joint(getObjectByAlias("camera"), query=True)
animStart = cmds.playbackOptions(query=True, animationStartTime=True)
animEnd = cmds.playbackOptions(query=True, animationEndTime=True)
cmds.cutKey(clear=True,time=(animStart,animEnd+1))
cmds.select(getObjectByAlias("camera"), replace=True)
cmds.setAttr(getObjectByAlias("camera")+'.translateX',0)
cmds.setAttr(getObjectByAlias("camera")+'.translateY',0)
cmds.setAttr(getObjectByAlias("camera")+'.translateZ',0)
cmds.setAttr(getObjectByAlias("camera")+'.rotateX',0)
cmds.setAttr(getObjectByAlias("camera")+'.rotateY',0)
cmds.setAttr(getObjectByAlias("camera")+'.rotateZ',0)
def setObjectAlias(aname):
if len(cmds.ls("CoDMayaTools")) == 0:
cmds.createNode("renderLayer", name="CoDMayaTools", skipSelect=True)
if not cmds.attributeQuery("objAlias%s" % aname, node="CoDMayaTools", exists=True):
cmds.addAttr("CoDMayaTools", longName="objAlias%s" % aname, dataType='string')
objects = cmds.ls(selection=True);
if len(objects) == 1:
print "Marking selected object as %s" % aname
else:
print "Selected more than 1 object or none at all"
return
obj = objects[0]
cmds.setAttr("CoDMayaTools.objAlias%s" % aname, obj, type='string')
def getObjectByAlias(aname):
if len(cmds.ls("CoDMayaTools")) == 0:
cmds.createNode("renderLayer", name="CoDMayaTools", skipSelect=True)
if not cmds.attributeQuery("objAlias%s" % aname, node="CoDMayaTools", exists=True):
return ""
return cmds.getAttr("CoDMayaTools.objAlias%s" % aname) or ""
# Bind the weapon to hands
def WeaponBinder():
# Call of Duty specific
for x in xrange(0, len(GUN_BASE_TAGS)):
try:
# Select both tags and parent them
cmds.select(GUN_BASE_TAGS[x], replace = True)
cmds.select(VIEW_HAND_TAGS[x], toggle = True)
# Connect
cmds.connectJoint(connectMode = True)
# Parent
mel.eval("parent " + GUN_BASE_TAGS[x] + " " + VIEW_HAND_TAGS[x])
# Reset the positions of both bones
cmds.setAttr(GUN_BASE_TAGS[x] + ".t", 0, 0, 0)
cmds.setAttr(GUN_BASE_TAGS[x] + ".jo", 0, 0, 0)
cmds.setAttr(GUN_BASE_TAGS[x] + ".rotate", 0, 0, 0)
# Reset the rotation of the parent tag
cmds.setAttr(VIEW_HAND_TAGS[x] + ".jo", 0, 0, 0)
cmds.setAttr(VIEW_HAND_TAGS[x] + ".rotate", 0, 0, 0)
# Remove
cmds.select(clear = True)
except:
pass
def SetToggableOption(name="", val=0):
if not val:
val = int(cmds.menuItem(name, query=True, checkBox=True ))
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
except WindowsError:
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
reg.SetValueEx(storageKey, "Setting_%s" % name, 0, reg.REG_DWORD, val )
def QueryToggableOption(name=""):
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
reg.QueryValueEx(storageKey, "Setting_%s" % name)[0]
except WindowsError:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1], 0, reg.KEY_ALL_ACCESS)
try:
reg.SetValueEx(storageKey, "Setting_%s" % name, 0, reg.REG_DWORD , 0 )
except:
return 1
return reg.QueryValueEx(storageKey, "Setting_%s" % name)[0]
# ---- Create windows ----
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
except WindowsError:
storageKey = reg.CreateKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1]) # Seems to fail because above in the bin function it tries to open the key but doesn't exist and stops there, so I heck it and added this.
try:
storageKey = reg.OpenKey(GLOBAL_STORAGE_REG_KEY[0], GLOBAL_STORAGE_REG_KEY[1])
codRootPath = reg.QueryValueEx(storageKey, "RootPath")[0]
reg.CloseKey(storageKey)
except WindowsError:
cmds.confirmDialog(message="It looks like this is your first time running CoD Maya Tools.\nYou will be asked to choose your game's root path.", button=['OK'], defaultButton='OK', title="First time configuration") #MessageBox("Please set your root path before starting to work with CoD Maya Tools")
result = cmds.confirmDialog(message="Which Game will you be working with? (Can be changed in settings)\n\nCoD4 = MW, CoD5 = WaW, CoD7 = BO1, CoD12 = Bo3", button=['CoD1', 'CoD2', 'CoD4', "CoD5", "CoD7", "CoD12"], defaultButton='OK', title="First time configuration") #MessageBox("Please set your root path before starting to work with CoD Maya Tools")
SetCurrentGame(result)
SetRootFolder(None, result)
res = cmds.confirmDialog(message="Enable Automatic Updates?", button=['Yes', 'No'], defaultButton='No', title="First time configuration")
if res == "Yes":
SetToggableOption(name="AutoUpdate", val=1)
else:
SetToggableOption(name="AutoUpdate", val=0)
cmds.confirmDialog(message="You're set! You can now export models and anims to any CoD!")
CheckForUpdatesEXE()
CreateMenu()
CreateXAnimWindow()
CreateXModelWindow()
CreateXCamWindow()
print "CoDMayaTools initialized."
|
gpl-3.0
| 279,508,431,598,202,340
| 48.054038
| 523
| 0.586234
| false
| 3.794299
| false
| false
| false
|
pbanaszkiewicz/amy
|
amy/workshops/migrations/0012_auto_20150612_0807.py
|
1
|
2658
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
TRANSLATE_NAMES = {
'Git': ['swc/git'],
'Make': ['swc/make'],
'Matlab': ['swc/matlab'],
'Mercurial': ['swc/hg'],
'Python': ['swc/python', 'dc/python'],
'R': ['swc/r', 'dc/r'],
'Regexp': ['swc/regexp'],
'SQL': ['swc/sql', 'dc/sql'],
'Subversion': ['swc/svn'],
'Unix': ['swc/shell', 'dc/shell'],
None: ['dc/spreadsheet', 'dc/cloud']
}
EXTRA_LEGACY_NAMES = ['MATLAB']
def add_new_lesson_names(apps, schema_editor):
'''Add instances of Lesson named after lessons.'''
Lesson = apps.get_model('workshops', 'Lesson')
for (old_name, new_names) in TRANSLATE_NAMES.items():
for name in new_names:
Lesson.objects.create(name=name)
def fix_duplicate_names(apps, schema_editor):
'''Fix references to lessons with case sensitivity in names.'''
Lesson = apps.get_model('workshops', 'Lesson')
Qualification = apps.get_model('workshops', 'Qualification')
try:
right_lesson = Lesson.objects.get(name='Matlab')
wrong_lesson = Lesson.objects.get(name='MATLAB')
Qualification.objects.filter(lesson=wrong_lesson) \
.update(lesson=right_lesson)
except Lesson.DoesNotExist:
pass
def replace_qualifications(apps, schema_editor):
'''Add qualification entries with new lesson names and delete old ones.'''
Lesson = apps.get_model('workshops', 'Lesson')
Qualification = apps.get_model('workshops', 'Qualification')
for q in Qualification.objects.all():
old_name = q.lesson.name
new_names = TRANSLATE_NAMES[old_name]
for name in new_names:
lesson = Lesson.objects.get(name=name)
Qualification.objects.create(lesson=lesson,
person=q.person)
q.delete()
def remove_old_skill_names(apps, schema_editor):
'''Remove legacy instances of Lesson named after skills.'''
Lesson = apps.get_model('workshops', 'Lesson')
for (old_name, new_names) in TRANSLATE_NAMES.items():
if old_name:
Lesson.objects.filter(name=old_name).delete()
for old_name in EXTRA_LEGACY_NAMES:
Lesson.objects.filter(name=old_name).delete()
class Migration(migrations.Migration):
dependencies = [
('workshops', '0011_auto_20150612_0803'),
]
operations = [
migrations.RunPython(add_new_lesson_names),
migrations.RunPython(fix_duplicate_names),
migrations.RunPython(replace_qualifications),
migrations.RunPython(remove_old_skill_names)
]
|
mit
| 4,028,732,690,800,716,000
| 32.64557
| 78
| 0.624153
| false
| 3.339196
| false
| false
| false
|
google/shaka-player
|
build/checkversion.py
|
1
|
2659
|
#!/usr/bin/env python
#
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Checks that all the versions match."""
from __future__ import print_function
import logging
import os
import re
import shakaBuildHelpers
def player_version():
"""Gets the version of the library from player.js."""
path = os.path.join(shakaBuildHelpers.get_source_base(), 'lib', 'player.js')
with shakaBuildHelpers.open_file(path, 'r') as f:
match = re.search(r'shaka\.Player\.version = \'(.*)\'', f.read())
return match.group(1) if match else ''
def changelog_version():
"""Gets the version of the library from the CHANGELOG."""
path = os.path.join(shakaBuildHelpers.get_source_base(), 'CHANGELOG.md')
with shakaBuildHelpers.open_file(path, 'r') as f:
match = re.search(r'## (.*) \(', f.read())
return match.group(1) if match else ''
def main(_):
"""Checks that all the versions in the library match."""
changelog = changelog_version()
player = player_version()
git = shakaBuildHelpers.git_version()
npm = shakaBuildHelpers.npm_version()
print('git version: ' + git)
print('npm version: ' + npm)
print('player version: ' + player)
print('changelog version: ' + changelog)
ret = 0
if 'dirty' in git:
logging.error('Git version is dirty.')
ret = 1
elif 'unknown' in git:
logging.error('Git version is not a tag.')
ret = 1
elif not re.match(r'^v[0-9]+\.[0-9]+\.[0-9]+(?:-[a-z0-9]+)?$', git):
logging.error('Git version is a malformed release version.')
logging.error('It should be a \'v\', followed by three numbers')
logging.error('separated by dots, optionally followed by a hyphen')
logging.error('and a pre-release identifier. See http://semver.org/')
ret = 1
if 'v' + npm != git:
logging.error('NPM version does not match git version.')
ret = 1
if player != git + '-uncompiled':
logging.error('Player version does not match git version.')
ret = 1
if 'v' + changelog != git:
logging.error('Changelog version does not match git version.')
ret = 1
return ret
if __name__ == '__main__':
shakaBuildHelpers.run_main(main)
|
apache-2.0
| 7,412,155,136,251,303,000
| 30.654762
| 78
| 0.67469
| false
| 3.475817
| false
| false
| false
|
Ambuj-UF/ConCat-1.0
|
src/Utils/Bio/SearchIO/_model/_base.py
|
1
|
2621
|
# Copyright 2012 by Wibowo Arindrarto. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Abstract base classes for the SearchIO object model."""
import sys
# Add path to Bio
sys.path.append('../../..')
from Bio._utils import getattr_str, trim_str
__docformat__ = "restructuredtext en"
class _BaseSearchObject(object):
"""Abstract class for SearchIO objects."""
_NON_STICKY_ATTRS = ()
def _transfer_attrs(self, obj):
"""Transfer instance attributes to the given object.
This method is used to transfer attributes set externally (for example
using `setattr`) to a new object created from this one (for example
from slicing).
The reason this method is necessary is because different parsers will
set different attributes for each QueryResult, Hit, HSP, or HSPFragment
objects, depending on the attributes they found in the search output
file. Ideally, we want these attributes to 'stick' with any new instance
object created from the original one.
"""
# list of attribute names we don't want to transfer
for attr in self.__dict__:
if attr not in self._NON_STICKY_ATTRS:
setattr(obj, attr, self.__dict__[attr])
class _BaseHSP(_BaseSearchObject):
"""Abstract base class for HSP objects."""
def _str_hsp_header(self):
"""Prints the alignment header info."""
lines = []
# set query id line
qid_line = trim_str(' Query: %s %s' %
(self.query_id, self.query_description), 80, '...')
# set hit id line
hid_line = trim_str(' Hit: %s %s' %
(self.hit_id, self.hit_description), 80, '...')
lines.append(qid_line)
lines.append(hid_line)
# coordinates
query_start = getattr_str(self, 'query_start')
query_end = getattr_str(self, 'query_end')
hit_start = getattr_str(self, 'hit_start')
hit_end = getattr_str(self, 'hit_end')
# strands
try:
qstrand = self.query_strand
hstrand = self.hit_strand
except ValueError:
qstrand = self.query_strand_all[0]
hstrand = self.hit_strand_all[0]
lines.append('Query range: [%s:%s] (%r)' % (query_start, query_end,
qstrand))
lines.append(' Hit range: [%s:%s] (%r)' % (hit_start, hit_end,
hstrand))
return '\n'.join(lines)
|
gpl-2.0
| 8,958,869,091,809,492,000
| 33.038961
| 80
| 0.604349
| false
| 3.923653
| false
| false
| false
|
jck/uhdl
|
uhdl/helpers.py
|
1
|
1634
|
import functools
import wrapt
from myhdl import SignalType, ResetSignal, delay, always, instance, Simulation
class Clock(SignalType):
"""Clock class for use in simulations"""
def __init__(self, period=2):
self.period = period
if period % 2 != 0:
raise ValueError("period must be divisible by 2")
super(Clock, self).__init__(False)
def gen(self):
@always(delay(self.period/2))
def _clock():
self.next = not self
return _clock
class Reset(ResetSignal):
"""Reset class for use in simulations"""
def __init__(self, val=0, active=0, async=True):
super(Reset, self).__init__(val, active, async)
def pulse(self, time=5):
@instance
def _reset():
self.next = self.active
yield delay(time)
self.next = not self.active
return _reset
def run_sim(*args, **kwargs):
return Simulation(*args).run(**kwargs)
def sim(wrapped=None, duration=None, quiet=False):
"""Decorator which simplifies running a :class:`myhdl.Simulation`
Usage:
.. code-block:: python
@sim
def function_which_returns_generators(...):
...
@sim(duration=n, quiet=False)
def function_which_returns_generators(...):
...
"""
if wrapped is None:
return functools.partial(sim, duration=duration, quiet=quiet)
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
return run_sim(wrapped(*args, **kwargs), duration=duration, quiet=quiet)
return wrapper(wrapped)
|
bsd-3-clause
| -5,990,776,191,799,713,000
| 24.53125
| 80
| 0.588739
| false
| 4.064677
| false
| false
| false
|
omniti-labs/circus
|
src/circus/module/activate_metrics.py
|
1
|
2178
|
#!/usr/bin/env python
__cmdname__ = 'activate_metrics'
__cmdopts__ = ''
import sys
import log
import util
class Module(object):
def __init__(self, api, account):
self.api = api
self.account = account
def command(self, opts, pattern, *metrics_to_enable):
"""Activate metrics for checks
Arguments:
pattern -- Pattern for checks
metrics_to_enable -- List of metrics to enable
"""
checks, groups = util.find_checks(self.api, pattern)
already_enabled = {}
# Pick only one check per check bundle
bundles = {}
for c in checks:
if c['bundle_id'] in bundles:
continue
bundles[c['bundle_id']] = c
log.msg("Retrieving metrics for checks")
count = 0
for c in bundles.values():
count += 1
print "\r%s/%s" % (count, len(bundles)),
sys.stdout.flush()
rv = self.api.list_metrics(check_id=c['check_id'])
already_enabled[c['check_id']] = []
for metric in sorted(rv):
if metric['enabled']:
already_enabled[c['check_id']].append(metric['name'])
log.msg("Metrics to enable: %s" % (', '.join(metrics_to_enable)))
log.msg("About to enable metrics for the following checks")
for c in bundles.values():
log.msg(" %s (%s)" % (c['name'],
', '.join(already_enabled[c['check_id']])))
if util.confirm():
for c in bundles.values():
# Enable metrics here
log.msgnb("%s..." % c['name'])
all_metrics = set(already_enabled[c['check_id']]) \
| set(metrics_to_enable)
if all_metrics != set(already_enabled[c['check_id']]):
# The set of metrics has changed, apply the edit
self.api.edit_check_bundle(
bundle_id=c['bundle_id'],
metric_name=list(all_metrics))
log.msgnf("Done")
else:
log.msgnf("No changes")
|
isc
| -3,565,853,606,667,211,000
| 33.571429
| 73
| 0.495409
| false
| 4.140684
| false
| false
| false
|
calico/basenji
|
bin/basenji_sad_ref_multi.py
|
1
|
6256
|
#!/usr/bin/env python
# Copyright 2017 Calico LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from optparse import OptionParser
import glob
import os
import pickle
import shutil
import subprocess
import sys
import h5py
import numpy as np
try:
import zarr
except ImportError:
pass
import slurm
from basenji_sad_multi import collect_h5
"""
basenji_sad_ref_multi.py
Compute SNP expression difference scores for variants in a VCF file,
using multiple processes.
"""
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <params_file> <model_file> <vcf_file>'
parser = OptionParser(usage)
# sad
parser.add_option('-c', dest='center_pct',
default=0.25, type='float',
help='Require clustered SNPs lie in center region [Default: %default]')
parser.add_option('-f', dest='genome_fasta',
default='%s/data/hg19.fa' % os.environ['BASENJIDIR'],
help='Genome FASTA for sequences [Default: %default]')
parser.add_option('--flip', dest='flip_ref',
default=False, action='store_true',
help='Flip reference/alternate alleles when simple [Default: %default]')
parser.add_option('-n', dest='norm_file',
default=None,
help='Normalize SAD scores')
parser.add_option('-o',dest='out_dir',
default='sad',
help='Output directory for tables and plots [Default: %default]')
parser.add_option('--pseudo', dest='log_pseudo',
default=1, type='float',
help='Log2 pseudocount [Default: %default]')
parser.add_option('--rc', dest='rc',
default=False, action='store_true',
help='Average forward and reverse complement predictions [Default: %default]')
parser.add_option('--shifts', dest='shifts',
default='0', type='str',
help='Ensemble prediction shifts [Default: %default]')
parser.add_option('--stats', dest='sad_stats',
default='SAD',
help='Comma-separated list of stats to save. [Default: %default]')
parser.add_option('-t', dest='targets_file',
default=None, type='str',
help='File specifying target indexes and labels in table format')
parser.add_option('--ti', dest='track_indexes',
default=None, type='str',
help='Comma-separated list of target indexes to output BigWig tracks')
parser.add_option('--threads', dest='threads',
default=False, action='store_true',
help='Run CPU math and output in a separate thread [Default: %default]')
parser.add_option('-u', dest='penultimate',
default=False, action='store_true',
help='Compute SED in the penultimate layer [Default: %default]')
# multi
parser.add_option('-e', dest='conda_env',
default='tf2.2-gpu',
help='Anaconda environment [Default: %default]')
parser.add_option('--name', dest='name',
default='sad', help='SLURM name prefix [Default: %default]')
parser.add_option('--max_proc', dest='max_proc',
default=None, type='int',
help='Maximum concurrent processes [Default: %default]')
parser.add_option('-p', dest='processes',
default=None, type='int',
help='Number of processes, passed by multi script')
parser.add_option('-q', dest='queue',
default='gtx1080ti',
help='SLURM queue on which to run the jobs [Default: %default]')
parser.add_option('-r', dest='restart',
default=False, action='store_true',
help='Restart a partially completed job [Default: %default]')
(options, args) = parser.parse_args()
if len(args) != 3:
parser.error('Must provide parameters and model files and VCF file')
else:
params_file = args[0]
model_file = args[1]
vcf_file = args[2]
#######################################################
# prep work
if os.path.isdir(options.out_dir):
if not options.restart:
print('Please remove %s' % options.out_dir, file=sys.stderr)
exit(1)
else:
os.mkdir(options.out_dir)
# pickle options
options_pkl_file = '%s/options.pkl' % options.out_dir
options_pkl = open(options_pkl_file, 'wb')
pickle.dump(options, options_pkl)
options_pkl.close()
#######################################################
# launch worker threads
jobs = []
for pi in range(options.processes):
if not options.restart or not job_completed(options, pi):
cmd = '. /home/drk/anaconda3/etc/profile.d/conda.sh;'
cmd += ' conda activate %s;' % options.conda_env
cmd += ' basenji_sad_ref.py %s %s %d' % (
options_pkl_file, ' '.join(args), pi)
name = '%s_p%d' % (options.name, pi)
outf = '%s/job%d.out' % (options.out_dir, pi)
errf = '%s/job%d.err' % (options.out_dir, pi)
j = slurm.Job(cmd, name,
outf, errf,
queue=options.queue, gpu=1,
mem=22000, time='14-0:0:0')
jobs.append(j)
slurm.multi_run(jobs, max_proc=options.max_proc, verbose=True,
launch_sleep=10, update_sleep=60)
#######################################################
# collect output
collect_h5('sad.h5', options.out_dir, options.processes)
# for pi in range(options.processes):
# shutil.rmtree('%s/job%d' % (options.out_dir,pi))
def job_completed(options, pi):
"""Check whether a specific job has generated its
output file."""
out_file = '%s/job%d/sad.h5' % (options.out_dir, pi)
return os.path.isfile(out_file) or os.path.isdir(out_file)
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
|
apache-2.0
| 9,131,499,811,372,269,000
| 34.146067
| 84
| 0.598465
| false
| 3.728248
| false
| false
| false
|
ecell/ecell3
|
ecell/frontend/session-monitor/ecell/ui/osogo/Window.py
|
1
|
7162
|
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
# This file is part of the E-Cell System
#
# Copyright (C) 1996-2016 Keio University
# Copyright (C) 2008-2016 RIKEN
# Copyright (C) 2005-2009 The Molecular Sciences Institute
#
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
#
# E-Cell System is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# E-Cell System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with E-Cell System -- see the file COPYING.
# If not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#END_HEADER
#
#'Design: Kenta Hashimoto <kem@e-cell.org>',
#'Design and application Framework: Koichi Takahashi <shafi@e-cell.org>',
#'Programming: Yuki Fujita',
# 'Yoshiya Matsubara',
# 'Yuusuke Saito'
#
# modified by Masahiro Sugimoto <sugi@bioinformatics.org> at
# E-Cell Project, Lab. for Bioinformatics, Keio University.
#
import os
import gtk
import gtk.gdk
from ecell.ui.osogo.config import *
import ecell.ui.osogo.glade_compat as glade
class Window:
"""The super class of Window class.
[Note]:This class is not Window widget itself, but has widget instance.
"""
def __init__( self, gladeFile=None, rootWidget=None ):
"""Constructor
gladeFile -- a glade file name (str:absolute path/relative path)
rootWidget -- a root widget (str or None)
"""
self.gladeFile = gladeFile # glade file name
self.rootWidget = rootWidget # a root property
self.widgets = None # widgets instance
# Default title is classname of this class.
self.theTitle = self.__class__.__name__
def openWindow( self ):
"""
loads Glade file
Returns None
[Note]:If IOError happens during reading Glade file,
throws an exception.
"""
# ------------------------------------------------
# checks and loads glade file
# ------------------------------------------------
if os.access( self.gladeFile, os.R_OK ):
if self.rootWidget != None:
self.widgets = glade.XML( self.gladeFile, root= str( self.rootWidget ) )
else:
self.widgets = glade.XML( self.gladeFile, root= None )
else:
raise IOError( "can't read %s." %self.gladeFile )
def addHandlers( self, aHandlers ):
"""sets handlers
aHandlers -- a signal handler map (dict)
Returns None
"""
if type(aHandlers) != dict:
raise TypeError("%s must be dict." %str(aHandlers) )
self.widgets.signal_autoconnect( aHandlers )
def __getitem__( self, aKey ):
"""returns wiget specified by the key
aKey -- a widget name (str)
Returns a widget (gtk.Widget)
[Note]:When this window has not the widget specified by the key,
throws an exception.
"""
return self.widgets.get_widget( aKey )
def getWidget( self, aKey ):
"""returns wiget specified by the key
aKey -- a widget name (str)
Returns a widget (gtk.Widget)
[Note]:This method is same as __getitem__ method.
"""
return self[ aKey ]
def setIconList( self, anIconFile16, anIconFile32 ):
"""sets the window icon according to icon size
anIconFile16 --- icon 16x16 filename
anIconFile32 --- icon 32x32 filename
"""
aPixbuf16 = gtk.gdk.pixbuf_new_from_file(anIconFile16)
aPixbuf32 = gtk.gdk.pixbuf_new_from_file(anIconFile32)
theWidget=self[ self.__class__.__name__ ]
if theWidget!=None:
theWidget.set_icon_list( aPixbuf16, aPixbuf32 )
def editTitle( self, aTitle ):
"""edits and saves title
aTitle -- a title to save (str)
Returns None
"""
# save title
# Although self.theTitle looks verbose, self.getTitle() method
# returns self.theTitle. See the comment of getTitle() method
self.theTitle = aTitle
# get window widget ( The name of window widget is class name )
theWidget=self[ self.__class__.__name__ ]
# There are some cases theWidget is None.
# - When this method is called after 'destroy' signal.
# - When this window is attached other Window.
# In those cases, do not change title.
if theWidget!=None:
theWidget.set_title( self.theTitle )
def getTitle( self ):
"""gets title of this Window
Returns a title (str)
[Note]: This method returs not the title of widget but self.theTitle.
Because when this method is called after 'destroy' signal,
all widgets are None.
"""
return self.theTitle
def getParent( self ):
"""Returns a Parent Window (Window) # Not gtk.Window
"""
if self.rootWidget == None:
return self
else:
return self.__getParent( self.rootWidget )
def __getParent( self, *arg ):
"""Returns a Parent Window (Window) # Not gtk.Window
"""
if arg[0].rootWidget == None:
return arg[0]
else:
return arg[0].__getParent( self.rootWidget )
def getAllChildren( self ):
"""Returns all widget on this Window (list of widget)
Other windows in same glade file are not included.
"""
aChildren = self[self.__class__.__name__].get_children()
return self.__getChildren( aChildren )
def __getChildren( self, aChildren ):
"""Returns all widget on this Window (list of widget)
Other windows in same glade file are not included.
"""
aChildrenList = [] # list of widget
for aChild in aChildren:
# when aChild has no children, append it to list.
try:
aChild.get_children()
except AttributeError:
aChildrenList.append( aChild )
else:
# when aChild has no children, append it to list.
if len(aChild.get_children()) == 0:
aChildrenList.append( aChild )
else:
# when aChild has children, call this method.
aChildrenList += self.__getChildren( aChild.get_children() )
return aChildren + aChildrenList
def show_all( self ):
"""shows all widgets of this window
Returns None
"""
self[self.__class__.__name__].show_all()
# end of Window
|
lgpl-3.0
| 2,354,077,049,409,439,000
| 32.46729
| 88
| 0.573164
| false
| 4.092571
| false
| false
| false
|
quadrismegistus/prosodic
|
prosodic/lib/MeterPosition.py
|
1
|
2254
|
import string
from copy import copy
from Parse import Parse
class MeterPosition(Parse):
def __init__(self, meter, meterVal): # meterVal represents whether the position is 's' or 'w'
self.slots=[]
self.children=self.slots
self.meter = meter
self.constraintScores = {}
for constraint in meter.constraints:
self.constraintScores[constraint] = 0
self.meterVal = meterVal
for slot in self.slots:
slot.meter=meterVal
self.feat('prom.meter',(meterVal=='s'))
#self.feat('meter',self.meterVal2)
#self.token = ""
def __copy__(self):
other = MeterPosition(self.meter, self.meterVal)
other.slots = self.slots[:]
for k,v in list(self.constraintScores.items()):
other.constraintScores[k]=copy(v)
return other
@property
def has_viol(self):
return bool(sum(self.constraintScores.values()))
@property
def violated(self):
viold=[]
for c,viol in list(self.constraintScores.items()):
if viol:
viold+=[c]
return viold
@property
def isStrong(self):
return self.meterVal.startswith("s")
def append(self,slot):
#self.token = ""
self.slots.append(slot)
@property
def meterVal2(self):
return ''.join([self.meterVal for x in self.slots])
@property
def mstr(self):
return ''.join([self.meterVal for n in range(len(self.slots))])
def posfeats(self):
posfeats={'prom.meter':[]}
for slot in self.slots:
for k,v in list(slot.feats.items()):
if (not k in posfeats):
posfeats[k]=[]
posfeats[k]+=[v]
posfeats['prom.meter']+=[self.meterVal]
for k,v in list(posfeats.items()):
posfeats[k]=tuple(v)
return posfeats
#
# def __repr__(self):
#
# if not self.token:
# slotTokens = []
#
# for slot in self.slots:
# #slotTokens.append(self.u2s(slot.token))
# slotTokens.append(slot.token)
#
# self.token = '.'.join(slotTokens)
#
# if self.meterVal == 's':
# self.token = self.token.upper()
# else:
# self.token = self.token.lower()
# return self.token
def __repr__(self):
return self.token
@property
def token(self):
if not hasattr(self,'_token') or not self._token:
token = '.'.join([slot.token for slot in self.slots])
token=token.upper() if self.meterVal=='s' else token.lower()
self._token=token
return self._token
|
gpl-3.0
| 392,770,631,571,258,000
| 22.479167
| 94
| 0.661934
| false
| 2.793061
| false
| false
| false
|
lundjordan/releasewarrior-2.0
|
releasewarrior/balrog.py
|
1
|
6332
|
import logging
import re
import requests
from copy import deepcopy
from mozilla_version.balrog import BalrogReleaseName
BALROG_API_ROOT = 'https://aus5.mozilla.org/api/v1'
log = logging.getLogger(name=__name__)
class BalrogError(Exception):
pass
class TooManyBlobsFoundError(BalrogError):
def __init__(self, blob_name, found_blobs):
super().__init__('Multiple blobs found for "{}": {}'.format(blob_name, found_blobs))
class NoBlobFoundError(BalrogError):
def __init__(self, blob_name):
super().__init__('No blob found for "{}"'.format(blob_name))
def get_release_blob(blob_name):
url = '{}/releases/{}'.format(BALROG_API_ROOT, blob_name)
req = requests.get(url, verify=True, timeout=4)
req.raise_for_status()
return req.json()
def get_releases(blob_name, name_prefix=None):
url = '{}/releases'.format(BALROG_API_ROOT)
params = {
'product': extract_product_from_blob_name(blob_name),
'name_prefix': blob_name if name_prefix is None else name_prefix,
'names_only': True
}
req = requests.get(url, verify=True, params=params, timeout=4)
req.raise_for_status()
return req.json()['names']
def extract_product_from_blob_name(blob_name):
return blob_name.split('-')[0]
def ensure_blob_name_exists_on_balrog(blob_name):
releases = get_releases(blob_name)
if len(releases) > 1:
raise TooManyBlobsFoundError(blob_name, releases)
if len(releases) < 1:
raise NoBlobFoundError(blob_name)
def craft_wnp_blob(orig_blob, wnp_url, for_channels, for_locales=None, for_version=None):
blob_name = orig_blob['name']
for_channels = [channel.strip() for channel in for_channels.split(',')]
for_locales = get_for_locales(blob_name, for_locales)
for_version = get_for_version(blob_name, for_version)
new_blob = deepcopy(orig_blob)
update_rules = new_blob.setdefault('updateLine', [])
existing_wnp_rules = [
rule for rule in update_rules if rule.get('fields', {}).get('actions', '') == "showURL"
]
number_of_existing_rules = len(existing_wnp_rules)
if number_of_existing_rules > 1:
raise NotImplementedError('Cannot handle releases that have more than 1 WNP rule')
elif number_of_existing_rules == 1:
existing_wnp_rule = existing_wnp_rules[0]
log.warn('replacing existing rule: {}'.format(existing_wnp_rule))
update_rules.remove(existing_wnp_rule)
wnp_rule = {
'fields': {
'actions': 'showURL',
'openURL': wnp_url,
},
'for': {
'channels': for_channels,
'locales': for_locales,
'versions': [for_version],
},
}
update_rules.append(wnp_rule)
log.info('New updateLine rules: {}'.format(update_rules))
return new_blob
def get_for_locales(blob_name, for_locales=None):
if for_locales is None:
product = extract_product_from_blob_name(blob_name)
all_releases_names_for_product = get_releases(blob_name, name_prefix=product)
previous_release = find_previous_release(blob_name, all_releases_names_for_product)
previous_release_blob = get_release_blob(blob_name=previous_release)
for_locales = _get_locales_from_blob(previous_release_blob, previous_release)
log.info('for_locales gotten from previous "{}": {}'.format(previous_release, for_locales))
else:
for_locales = [locale.strip() for locale in for_locales.split(',')]
log.info('Using for_locales from command line: {}'.format(for_locales))
if not isinstance(for_locales, list):
raise BalrogError('{} is not a list'.format(for_locales))
return for_locales
_ENDS_WITH_BUILD_REGEX = re.compile(r'build\d+$')
def find_previous_release(blob_name, all_releases_names_for_product):
original_release = BalrogReleaseName.parse(blob_name)
# ends_with_build strips out nightly blobs and the ones that were created manually
ends_with_build = [
release
for release in all_releases_names_for_product
if _ENDS_WITH_BUILD_REGEX.search(release)
]
balrog_releases = [BalrogReleaseName.parse(release) for release in ends_with_build]
same_type = [
release
for release in balrog_releases
if release.version.version_type == original_release.version.version_type
]
if original_release.version.is_release:
same_type = [
release for release in same_type if release.version.is_release
] # strips ESR out
elif original_release.version.is_esr:
same_type = [
release for release in same_type if release.version.is_esr
] # strips release out
sorted_releases = same_type
sorted_releases.sort(reverse=True)
for release in sorted_releases:
if release < original_release:
previous_release = str(release)
log.info('Previous release was: {}'.format(previous_release))
return previous_release
raise BalrogError('Could not find a version smaller than {}'.format(original_release))
def _get_locales_from_blob(blob, blob_name):
locales = []
for rule in blob.get('updateLine', []):
candidate_locales = rule.get('for', {}).get('locales', [])
if candidate_locales:
if locales:
raise BalrogError(
'Too many locales defined in blob "{}". Found {} and {}'.format(
blob_name, candidate_locales, locales
)
)
locales = candidate_locales
if not locales:
raise BalrogError('No locales found in blob "{}"'.format(blob_name))
return locales
_FOR_VERSION_PATTERN = re.compile(r'<\d+\.0')
def get_for_version(blob_name, for_version=None):
if for_version is None:
balrog_release = BalrogReleaseName.parse(blob_name)
for_version = '<{}.0'.format(balrog_release.version.major_number)
log.info('for_version build from original blob: {}'.format(for_version))
else:
log.info('Using for_version from command line: {}'.format(for_version))
if _FOR_VERSION_PATTERN.match(for_version) is None:
raise BalrogError('{} does not match a valid for_version pattern'.format(for_version))
return for_version
|
mpl-2.0
| -8,396,954,852,463,452,000
| 32.680851
| 99
| 0.643399
| false
| 3.647465
| false
| false
| false
|
ImTheLucKyOne/check_mk_emcunity
|
emcunity300/perfometer/emcunity_lun.py
|
1
|
2232
|
#!/usr/bin/env python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
#
# Written / Edited by Philipp Näther
# philipp.naether@stadt-meissen.de
# Perf-O-Meters for Check_MK's checks
#
# They are called with:
# 1. row -> a dictionary of the data row with at least the
# keys "service_perf_data", "service_state" and "service_check_command"
# 2. The check command (might be extracted from the performance data
# in a PNP-like manner, e.g if perfdata is "value=10.5;0;100.0;20;30 [check_disk]
# 3. The parsed performance data as a list of 7-tuples of
# (varname, value, unit, warn, crit, min, max)
def perfometer_emcunity_lun(row, check_command, perf_data):
used_mb = perf_data[0][1]
maxx = perf_data[0][-1]
# perf data might be incomplete, if trending perfdata is off...
uncommitted_mb = 0
for entry in perf_data:
if entry[0] == "uncommitted":
uncommitted_mb = entry[1]
break
perc_used = 100 * (float(used_mb) / float(maxx))
perc_uncommitted = 100 * (float(uncommitted_mb) / float(maxx))
perc_totally_free = 100 - perc_used - perc_uncommitted
h = '<table><tr>'
if perc_used + perc_uncommitted <= 100:
# Regular handling, no overcommitt
h += perfometer_td(perc_used, "#00ffc6")
h += perfometer_td(perc_uncommitted, "#eeccff")
h += perfometer_td(perc_totally_free, "white")
else:
# Visualize overcommitted space by scaling to total overcommittment value
# and drawing the capacity as red line in the perfometer
total = perc_used + perc_uncommitted
perc_used_bar = perc_used * 100 / total
perc_uncommitted_bar = perc_uncommitted * 100 / total
perc_free = (100 - perc_used) * 100 / total
h += perfometer_td(perc_used_bar, "#00ffc6")
h += perfometer_td(perc_free, "#eeccff")
h += perfometer_td(1, "red") # This line visualizes the capacity
h += perfometer_td(perc_uncommitted - perc_free, "#eeccff")
h += "</tr></table>"
legend = "%0.2f%%" % perc_used
if uncommitted_mb:
legend += " (+%0.2f%%)" % perc_uncommitted
return legend, h
perfometers["check_mk-emcunity_lun"] = perfometer_emcunity_lun
|
gpl-3.0
| 892,500,301,874,742,900
| 38.839286
| 84
| 0.628418
| false
| 3.051984
| false
| false
| false
|
kennethcc2005/yahoo_finance_stocks
|
candle_output.py
|
1
|
85023
|
'''
Candlestick pattern functions in class type.
Only need to run output function to build the dataframe for all patterns for one symbol.
'''
import numpy as np
import pandas as pd
import json
import pandas.io.data as web
from datetime import date, datetime, timedelta
from collections import defaultdict
start = datetime(2010, 1, 1)
end = date.today()
df1 = pd.read_csv('data/companylist.csv')
df2 = pd.read_csv('data/companylist1.csv')
df3 = pd.read_csv('data/companylist2.csv')
data = web.DataReader("F", 'yahoo', start, end)
symbols = np.append(df1.Symbol.values, df2.Symbol.values)
symbols = np.append(symbols, df3.Symbol.values)
class candle(object):
def __init__(self,data):
self.data=data
def output(self):
out_df=pd.DataFrame(index=[0])
out_df['a_new_price']=self.eight_new_price()
out_df['eight_new_price']=self.eight_new_price()
out_df['ten_new_price']=self.ten_new_price()
out_df['twelve_new_price']=self.twelve_new_price()
out_df['thirteen_new_price']=self.thirteen_new_price()
out_df['bearish_abandoned_baby']=self.bearish_abandoned_baby()
out_df['bullish_abandoned_baby']=self.bullish_abandoned_baby()
out_df['above_stomach']=self.above_stomach()
out_df['advance_block']=self.advance_block()
out_df['below_stomach']=self.below_stomach()
out_df['bearish_belt_hold']=self.bearish_belt_hold()
out_df['bearish_breakaway']=self.bearish_breakaway()
out_df['bearish_doji_star']=self.bearish_doji_star()
out_df['bearish_engulfing']=self.bearish_engulfing()
out_df['bearish_harami']=self.bearish_harami()
out_df['bearish_harami_cross']=self.bearish_harami_cross()
out_df['bearish_kicking']=self.bearish_kicking()
out_df['bearish_meeting_lines']=self.bearish_meeting_lines()
out_df['bearish_separating_lines']=self.bearish_separating_lines()
out_df['bearish_side_by_side_white_lines']=self.bearish_side_by_side_white_lines()
out_df['bearish_three_line_strike']=self.bearish_three_line_strike()
out_df['bearish_tri_star']=self.bearish_tri_star()
out_df['bullish_belt_hold']=self.bullish_belt_hold()
out_df['bullish_breakaway']=self.bullish_breakaway()
out_df['bullish_doji_star']=self.bullish_doji_star()
out_df['bullish_engulfing']=self.bullish_engulfing()
out_df['bullish_harami']=self.bullish_harami()
out_df['bullish_harami_cross']=self.bullish_harami_cross()
out_df['bullish_kicking']=self.bullish_kicking()
out_df['bullish_meeting_lines']=self.bullish_meeting_lines()
out_df['bullish_separating_lines']=self.bullish_separating_lines()
out_df['bullish_side_by_side_white_lines']=self.bullish_side_by_side_white_lines()
out_df['bullish_three_line_strike']=self.bullish_three_line_strike()
out_df['bullish_tri_star']=self.bullish_tri_star()
out_df['collapsing_doji_star']=self.collapsing_doji_star()
out_df['conceling_baby_swallow']=self.conceling_baby_swallow()
out_df['dark_cloud_cover']=self.dark_cloud_cover()
out_df['deliberation']=self.deliberation()
out_df['gapping_down_doji']=self.gapping_down_doji()
out_df['gapping_up_doji']=self.gapping_up_doji()
out_df['northern_doji']=self.northern_doji()
out_df['southern_doji']=self.southern_doji()
out_df['bearish_doji_star']=self.bearish_doji_star()
out_df['bullish_doji_star']=self.bullish_doji_star()
out_df['evening_doji']=self.evening_doji()
out_df['downside_gap_three_methods']=self.downside_gap_three_methods()
out_df['downside_tasuki_gap']=self.downside_tasuki_gap()
out_df['falling_three_methods']=self.falling_three_methods()
out_df['falling_window']=self.falling_window()
out_df['hammer']=self.hammer()
out_df['inverted_hammer']=self.inverted_hammer()
out_df['hanging_man']=self.hanging_man()
out_df['high_wave']=self.high_wave()
out_df['homing_pigeon']=self.homing_pigeon()
out_df['identical_three_crows']=self.identical_three_crows()
out_df['in_neck']=self.in_neck()
out_df['ladder_bottom']=self.ladder_bottom()
out_df['last_engulfing_bottom']=self.last_engulfing_bottom()
out_df['last_engulfing_top']=self.last_engulfing_top()
out_df['matching_low']=self.matching_low()
out_df['mat_hold']=self.mat_hold()
out_df['morning_doji_star']=self.morning_doji_star()
out_df['morning_star']=self.morning_star()
out_df['on_neck']=self.on_neck()
out_df['piercing_pattern']=self.piercing_pattern()
out_df['rickshaw_man']=self.rickshaw_man()
out_df['rising_three_methods']=self.rising_three_methods()
out_df['rising_window']=self.rising_window()
out_df['shooting_star_1']=self.shooting_star_1()
out_df['shooting_star_2']=self.shooting_star_2()
out_df['stick_sandwich']=self.stick_sandwich()
out_df['takuri_line']=self.takuri_line()
out_df['three_black_crows']=self.three_black_crows()
out_df['three_inside_down']=self.three_inside_down()
out_df['three_inside_up']=self.three_inside_up()
out_df['three_outside_down']=self.three_outside_down()
out_df['three_outside_up']=self.three_outside_up()
out_df['three_stars_in_south']=self.three_stars_in_south()
out_df['three_white_soldiers']=self.three_white_soldiers()
out_df['thrusting']=self.thrusting()
out_df['tweezers_bottom']=self.tweezers_bottom()
out_df['tweezers_top']=self.tweezers_top()
out_df['two_black_gapping']=self.two_black_gapping()
out_df['two_crows']=self.two_crows()
out_df['unique_three_river_bottom']=self.unique_three_river_bottom()
out_df['upside_gap_three_methods']=self.upside_gap_three_methods()
out_df['upside_gap_two_crows']=self.upside_gap_two_crows()
out_df['upside_tasuki_gap']=self.upside_tasuki_gap()
return out_df
def doji(self,data_pt):
if float(max(data_pt['Close'], data_pt['Open']))/float(min(data_pt['Close'], data_pt['Open'])) < 1.001:
return True
else:
return False
def dragonfly_doji(self,data_pt):
'''
Look for a long lower shadow with a small body
(open and close are within pennies of each other).
'''
a = self.doji(data_pt)
b = ((data_pt['Close']-data_pt['Low'])/data_pt['Close']) > 0.03
c = self.similar_price(data_pt['Open'], data_pt['High'])
if a and b and c:
return True
else:
return False
def gravestone_doji(self,data_pt):
'''
Look for a candle with a tall upper shadow and little or no lower one.
The opening and closing prices should be within pennies of each other.
'''
a = self.doji(data_pt)
b = ((data_pt['High']-data_pt['Open'])/data_pt['Open']) > 0.03
c = self.similar_price(data_pt['Open'], data_pt['Low'])
if a and b and c:
return True
else:
return False
def long_legged_doji(self,data_pt):
'''
Look for a doji (opening and closing prices are within a few pennies of each other) accompanied by long shadows.
'''
a = self.doji(data_pt)
b = ((data_pt['High']-data_pt['Open'])/data_pt['Open']) > 0.03
c = ((data_pt['Close']-data_pt['Low'])/data_pt['Close']) > 0.03
if a and b and c:
return True
else:
return False
def body_candle(self,data_pt):
return abs(data_pt['Close'] - data_pt['Open'])
def black_candle(self,data_pt):
if (data_pt['Close'] > data_pt['Open']) and (not self.doji(data_pt)):
return False
else:
return True
def tall_black_candle(self,data_pt):
if self.black_candle(data_pt) and float(data_pt['Open'])/(data_pt['Close']) > 1.02:
return True
else:
return False
def small_black_candle(self,data_pt):
if self.black_candle(data_pt) and (not self.tall_black_candle(data_pt)):
return True
else:
return False
def white_candle(self,data_pt):
if (data_pt['Close'] > data_pt['Open']) and (not self.doji(data_pt)):
return True
else:
return False
def tall_white_candle(self,data_pt):
if self.black_candle(data_pt) and float(data_pt['Close'])/(data_pt['Open']) > 1.02:
return True
else:
return False
def small_white_candle(self,data_pt):
if self.white_candle(data_pt) and not self.tall_white_candle(data_pt):
return True
else:
return False
def white_marubozu_candle(self,data_pt):
if self.white_candle(data_pt) and (data_pt['Open'] == data_pt['Low']) and (data_pt['Close'] == data_pt['High']):
return True
else:
return False
def black_marubozu_candle(self,data_pt):
if self.black_candle(data_pt) and (data_pt['Open'] == data_pt['High']) and (data_pt['Close'] == data_pt['Low']):
return True
else:
return False
def closing_black_marubozu_candle(self,data_pt):
'''
Look for a tall black candle with an upper shadow but no lower one.
'''
if self.tall_black_candle(data_pt) and (data_pt['Open'] != data_pt['High']) and (data_pt['Close'] == data_pt['Low']):
return True
else:
return False
def closing_white_marubozu_candle(self,data_pt):
'''
Look for a tall white candle with an lower shadow but no upper one.
'''
if self.tall_white_candle(data_pt) and (data_pt['Open'] != data_pt['Low']) and (data_pt['Close'] == data_pt['High']):
return True
else:
return False
def black_spinning_top_candle(self,data_pt):
'''
Look for a small black body with shadows taller than the body.
'''
a = self.small_black_candle(data_pt)
b = (data_pt['Close'] - data_pt['Low']) > 2 * self.body_candle(data_pt)
c = (data_pt['High'] - data_pt['Open']) > 2 * self.body_candle(data_pt)
if a and b and c:
return True
else:
return False
def black_spinning_top_candle(self,data_pt):
'''
Look for a small white bodied candle with tall shadows.
'''
a = self.small_white_candle(data_pt)
b = (data_pt['Close'] - data_pt['Low']) > 2 * self.body_candle(data_pt)
c = (data_pt['High'] - data_pt['Open']) > 2 * self.body_candle(data_pt)
if a and b and c:
return True
else:
return False
def up_price_trend(self,data_pt, data_pt1, data_pt2):
'''
data_pt: the first day for the pattern
data_pt1: the day before the pattern, last day for the upward trend
data_pt2: the first day to compare as upward trend
'''
if ((data_pt1['Close'] /float(data_pt2['Open'])) > 1.03):
return True
else:
return False
def down_price_trend(self,data_pt, data_pt1, data_pt2):
'''
data_pt: the first day for the pattern
data_pt1: the day before the pattern, last day for the upward trend
data_pt2: the first day to compare as upward trend
'''
if ((float(data_pt2['Open']/data_pt1['Close'])) > 1.03):
return True
else:
return False
def similar_price(self,data_pt1,data_pt2, percent = 0.001):
a = (abs(data_pt1 - data_pt2)/(data_pt2)) < percent
if a :
return True
else:
return False
def eight_new_price(self):
for i in xrange(1,9):
if not (self.data.iloc[-i]['High'] > self.data.iloc[-i-1]['High']):
return False
if self.data.iloc[-9]['High'] < self.data.iloc[-10]['High']:
return True
else:
return False
def ten_new_price(self):
for i in xrange(1,11):
if not (self.data.iloc[-i]['High'] > self.data.iloc[-i-1]['High']):
return False
if self.data.iloc[-11]['High'] < self.data.iloc[-12]['High']:
return True
else:
return False
def twelve_new_price(self):
for i in xrange(1,13):
if not (self.data.iloc[-i]['High'] > self.data.iloc[-i-1]['High']):
return False
if self.data.iloc[-13]['High'] < self.data.iloc[-14]['High']:
return True
else:
return False
def thirteen_new_price(self):
for i in xrange(1,14):
if not (self.data.iloc[-i]['High'] > self.data.iloc[-i-1]['High']):
return False
if self.data.iloc[-14]['High'] < self.data.iloc[-15]['High']:
return True
else:
return False
def bearish_abandoned_baby(self):
a = self.data.iloc[-1]['Close'] < self.data.iloc[-1]['Open']
b = float(self.data.iloc[-1]['Open'])/(self.data.iloc[-1]['Close']) > 1.02
c = self.data.iloc[-1]['High'] < self.data.iloc[-2]['Low']
d = float(max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']))/float(min(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open'])) < 1.001
e = self.data.iloc[-2]['Low'] > self.data.iloc[-3]['High']
f = float(self.data.iloc[-3]['Close'])/(self.data.iloc[-3]['Open']) > 1.02
g = self.up_price_trend(self.data.iloc[-3],self.data.iloc[-4], self.data.iloc[-6])
if a and b and c and d and e and f and g:
return True
else:
return False
# if self.data.iloc[-1]['Close'] < self.data.iloc[-1]['Open']:
# if float(self.data.iloc[-1]['Open'])/(self.data.iloc[-1]['Close']) > 1.03:
# if self.data.iloc[-1]['High'] < self.data.iloc[-2]['Low']:
# if float(max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']))/float(min(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open'])) < 1.01:
# if self.data.iloc[-2]['Low'] > self.data.iloc[-3]['High']:
# if float(self.data.iloc[-3]['Close'])/(self.data.iloc[-3]['Open']) > 1.03:
def bullish_abandoned_baby(self):
a = self.data.iloc[-1]['Close'] > self.data.iloc[-1]['Open']
b = float(self.data.iloc[-1]['Close'])/(self.data.iloc[-1]['Open']) > 1.02
c = self.data.iloc[-1]['Low'] > self.data.iloc[-2]['High']
d = float(max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']))/float(min(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open'])) < 1.001
e = self.data.iloc[-2]['High'] < self.data.iloc[-3]['Low']
f = float(self.data.iloc[-3]['Open'])/(self.data.iloc[-3]['Close']) > 1.02
g = self.down_price_trend(self.data.iloc[-3],self.data.iloc[-4], self.data.iloc[-6])
if a and b and c and d and e and f and g:
return True
else:
return False
def above_stomach(self):
a = self.data.iloc[-2]['Close'] < self.data.iloc[-2]['Open']
b = self.data.iloc[-2]['Open']/float(self.data.iloc[-2]['Close']) > 1.02
c = (self.data.iloc[-1]['Close'] > self.data.iloc[-1]['Open']) and (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Open'])
d = self.data.iloc[-1]['Close']/float(self.data.iloc[-1]['Open']) > 1.02
e = self.data.iloc[-1]['Open'] > ((float(self.data.iloc[-2]['Open'])+self.data.iloc[-2]['Close'])/2)
f = self.data.iloc[-2]['Open'] > self.data.iloc[-1]['Open']
g = self.up_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
if a and b and c and d and e and g:
return True
else:
return False
def advance_block(self):
a = self.white_candle(self.data.iloc[-1])
b = self.white_candle(self.data.iloc[-2])
c = self.white_candle(self.data.iloc[-3])
day1_body = self.data.iloc[-3]['Close']/float(self.data.iloc[-3]['Open'])
day2_body = self.data.iloc[-2]['Close']/float(self.data.iloc[-2]['Open'])
day3_body = self.data.iloc[-1]['Close']/float(self.data.iloc[-1]['Open'])
d = day1_body > 1.03
e = (day2_body > 1.005) and ( day2_body < day1_body)
f = (day3_body > 1.005) and ( day3_body < day1_body)
g = (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open'])
h = (self.data.iloc[-2]['Open'] < self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['Open'] > self.data.iloc[-3]['Open'])
j = (self.data.iloc[-1]['High'] - self.data.iloc[-1]['Close']) > (self.data.iloc[-1]['Close'] - self.data.iloc[-1]['Open'])
k = (self.data.iloc[-2]['High'] - self.data.iloc[-2]['Close']) > (self.data.iloc[-2]['Close'] - self.data.iloc[-2]['Open'])
l = self.up_price_trend(self.data.iloc[-3],self.data.iloc[-4], self.data.iloc[-6])
if a and b and c and d and e and f and g and h and j and k and l:
return True
else:
return False
def below_stomach(self):
'''
Look for a tall white candle followed by a candle that has a body below the middle of the white candle.
The second candle as black, but the guidelines I saw did not mentions this as a requirement.
'''
a = self.black_candle(self.data.iloc[-1])
b = self.white_candle(self.data.iloc[-2])
c = self.data.iloc[-1]['Open']/float(self.data.iloc[-1]['Close']) > 1.02
d = self.data.iloc[-2]['Close']/float(self.data.iloc[-2]['Open']) > 1.02
e = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Open'] < (float(self.data.iloc[-2]['Open'])+self.data.iloc[-2]['Close'])/2)
f = self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Open']
g = self.up_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
if a and b and c and d and e and f and g:
return True
else:
return False
def bearish_belt_hold(self):
'''
Price opens at the high for the day and closes near the low, forming a tall black candle, often with a small lower shadow.
'''
a = self.tall_black_candle(self.data.iloc[-1])
b = (self.data.iloc[-1]['Close']/float(self.data.iloc[-1]['Low']) < 1.01) and (self.data.iloc[-1]['Close'] < float(self.data.iloc[-1]['Low']))
c = (self.data.iloc[-1]['Open'] == self.data.iloc[-1]['High'])
d = self.white_candle(self.data.iloc[-2])
e = self.up_price_trend(self.data.iloc[-1],self.data.iloc[-2], self.data.iloc[-4])
if a and b and c and d and e:
return True
else:
return False
def bearish_breakaway(self):
'''
Look for 5 candle lines in an upward price trend with the first candle being a tall white one.
The second day should be a white candle with a gap between the two bodies, but the shadows can overlap.
Day three should have a higher close and the candle can be any color.
Day 4 shows a white candle with a higher close.
The last day is a tall black candle with a close within the gap between the bodies of the first two candles.
'''
a = self.tall_white_candle(self.data.iloc[-5])
b = self.white_candle(self.data.iloc[-4])
c = self.data.iloc[-4]['Open'] > self.data.iloc[-5]['Close']
d = self.data.iloc[-3]['Close'] > self.data.iloc[-4]['Close']
e = self.data.iloc[-2]['Close'] > self.data.iloc[-3]['Close']
f = self.white_candle(self.data.iloc[-2])
g = self.tall_black_candle(self.data.iloc[-1])
h = (self.data.iloc[-1]['Close'] < self.data.iloc[-4]['Open']) and (self.data.iloc[-1]['Close'] > self.data.iloc[-5]['Close'])
i = self.up_price_trend(self.data.iloc[-5],self.data.iloc[-6], self.data.iloc[-8])
if a and b and c and d and e and f and g and h and i:
return True
else:
return False
def bearish_doji_star(self):
'''
Look for a two-candle pattern in an uptrend.
The first candle is a long white one.
The next day, price gaps higher and the body remains above the prior body.
A doji forms with the opening and closing prices within pennies of each other.
The shadows on the doji should be comparatively short.
'''
a = self.tall_white_candle(self.data.iloc[-2])
b = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Close'])
c = self.doji(self.data.iloc[-1])
d = (self.data.iloc[-1]['High'] - self.data.iloc[-1]['Low']) < self.body_candle(self.data.iloc[-2])
e = self.up_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
if a and b and c and d and e:
return True
else:
return False
def bearish_engulfing(self):
'''
Look for a two candle pattern in an upward price trend.
The first candle is white and the second is black.
The body of the black candle is taller and overlaps the candle of the white body.
Shadows are unimportant.
'''
a = self.white_candle(self.data.iloc[-2])
b = self.black_candle(self.data.iloc[-1])
c = (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close'])
d = self.up_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
if a and b and c and d:
return True
else:
return False
def bearish_harami(self):
'''
Look for a tall white candle followed by a small black one.
The opening and closing prices must be within the body of the white candle.
Ignore the shadows.
Either the tops of the bodies or the bottoms (or both) must be a different price.
'''
a = self.tall_white_candle(self.data.iloc[-2])
b = (self.black_candle(self.data.iloc[-1])) and (not self.tall_black_candle(self.data.iloc[-1]))
c = (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Open'])
d = (self.data.iloc[-1]['High'] != self.data.iloc[-2]['High']) or (self.data.iloc[-1]['Low'] != self.data.iloc[-2]['Low'])
e = self.up_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
if a and b and c and d:
return True
else:
return False
def bearish_harami_cross(self):
'''
Look for a tall white candle in an upward price trend.
The next day, a doji appears that is inside (including the shadows) the trading range of the white candle.
'''
a = self.tall_white_candle(self.data.iloc[-2])
b = self.doji(self.data.iloc[-1])
c = (self.data.iloc[-1]['High'] < self.data.iloc[-2]['High']) and (self.data.iloc[-1]['Low'] > self.data.iloc[-2]['Low'])
d = self.up_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
if a and b and c and d:
return True
else:
return False
def bearish_kicking(self):
'''
The first days is a white marubozu candle followed by a black marubozu. Between the two candles must be a gap.
'''
a = self.white_marubozu_candle(self.data.iloc[-2])
b = self.black_marubozu_candle(self.data.iloc[-1])
c = self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close']
if a and b and c:
return True
else:
return False
def bearish_meeting_lines(self):
'''
Look for a tall white candle in an upward price trend.
Following that, the next candle should be a tall black one.
The closes of the two candles should be "near" one another, whatever that means.
'''
a = self.up_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_white_candle(self.data.iloc[-2])
c = self.tall_black_candle(self.data.iloc[-1])
d = (abs(self.data.iloc[-1]['Close'] - self.data.iloc[-2]['Close'])/(self.data.iloc[-1]['Close'])) < 0.001
if a and b and c and d:
return True
else:
return False
def bearish_separating_lines(self):
'''
Look for a tall white candle in a downward price trend followed by a tall black candle.
The opening price of the two candles should be similar.
'''
a = self.down_price_trend(self.data.iloc[-2],self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_white_candle(self.data.iloc[-2])
c = self.tall_black_candle(self.data.iloc[-1])
d = (abs(self.data.iloc[-1]['Open'] - self.data.iloc[-2]['Open'])/(self.data.iloc[-1]['Open'])) < 0.001
if a and b and c and d:
return True
else:
return False
def bearish_side_by_side_white_lines(self):
'''
Look for a black candle in a downward price trend.
Following that, find two white candles with bodies about the same size and similar opening prices.
The closing prices of both white candles must remain below the body of the black candle.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.black_candle(self.data.iloc[-3])
c = self.white_candle(self.data.iloc[-2])
d = self.white_candle(self.data.iloc[-1])
e = self.similar_price(self.data.iloc[-2]['Close'],self.data.iloc[-1]['Close'])
f = self.similar_price(self.data.iloc[-2]['Open'],self.data.iloc[-1]['Open'])
g = self.data.iloc[-2]['Close'] < self.data.iloc[-3]['Close']
if a and b and c and d and e and f and g:
return True
else:
return False
def bearish_three_line_strike(self):
'''
Look for three black candles forming lower lows followed by a tall white candle that
opens below the prior close and closes above the first day's open.
In other words, the last candle spans most of the price action of the prior three days.
'''
a = self.down_price_trend(self.data.iloc[-4], self.data.iloc[-5], self.data.iloc[-7])
b = self.black_candle(self.data.iloc[-2])
c = self.black_candle(self.data.iloc[-3])
d = self.black_candle(self.data.iloc[-4])
e = (self.data.iloc[-2]['Low'] < self.data.iloc[-3]['Low']) and (self.data.iloc[-2]['Close'] < self.data.iloc[-3]['Close'])
f = (self.data.iloc[-3]['Low'] < self.data.iloc[-4]['Low']) and (self.data.iloc[-3]['Close'] < self.data.iloc[-4]['Close'])
g = self.tall_white_candle(self.data.iloc[-1])
h = (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Close'] > self.data.iloc[-4]['Open'])
if a and b and c and d and e and f and g and h:
return True
else:
return False
def bearish_tri_star(self):
'''
Look for three doji candles, the middle one has a body above the other two.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.doji(self.data.iloc[-3])
c = self.doji(self.data.iloc[-2])
d = self.doji(self.data.iloc[-1])
e = min(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']) > max(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open'])
f = min(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']) > max(self.data.iloc[-3]['Close'], self.data.iloc[-3]['Open'])
if a and b and c and d and e and f:
return True
else:
return False
def bullish_belt_hold(self):
'''
Look for a white candle with no lower shadow, but closing near the high.
'''
a = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.white_candle(self.data.iloc[-1])
c = self.data.iloc[-1]['Low'] == self.data.iloc[-1]['Open']
d = self.similar_price(self.data.iloc[-1]['High'], self.data.iloc[-1]['Close'])
if a and b and c and d:
return True
else:
return False
def bullish_breakaway(self):
'''
Look for a series of five candles in a downtrend.
The first candle is tall and black followed by another black one that opens lower,
leaving a gap between the two bodies (but shadows can overlap).
The third day is a candle of any color but it should have a lower close.
Day four is a black candle with a lower close.
The final day is a tall white candle that closes within the body gap of the first two candles.
'''
a = self.down_price_trend(self.data.iloc[-5],self.data.iloc[-6], self.data.iloc[-8])
b = self.tall_black_candle(self.data.iloc[-5])
c = (self.black_candle(self.data.iloc[-4])) and (self.data.iloc[-4]['Open'] < self.data.iloc[-5]['Close'])
d = self.data.iloc[-3]['Close'] < self.data.iloc[-4]['Close']
e = (self.black_candle(self.data.iloc[-2])) and (self.data.iloc[-2]['Close'] < self.data.iloc[-3]['Close'])
f = self.tall_white_candle(self.data.iloc[-1])
g = (self.data.iloc[-1]['Close'] > self.data.iloc[-4]['Open']) and (self.data.iloc[-1]['Close'] < self.data.iloc[-5]['Close'])
if a and b and c and d and e and f and g:
return True
else:
return False
def bullish_doji_star(self):
'''
Look for a tall black candle on the first day followed by a doji
(where the opening and closing prices are within pennies of each other)
that gaps below the prior candle's body.
The shadows can overlap, but the doji's shadows should not be unusually long, whatever that means.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = (self.tall_black_candle(self.data.iloc[-2])) and self.doji(self.data.iloc[-1])
c = max(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open']) < self.data.iloc[-2]['Close']
d = (self.data.iloc[-1]['High']-self.data.iloc[-1]['Low']) < self.body_candle(self.data.iloc[-2])
if a and b and c and d:
return True
else:
return False
def bullish_engulfing(self):
'''
Look for two candles in a downward price trend.
The first is a black candle followed by a taller white one.
The white candle should have a close above the prior open and an open below the prior close.
In other words, the body of the white candle should engulf or overlap the body of the black candle.
Ignore the shadows.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.black_candle(self.data.iloc[-2])
c = self.tall_white_candle(self.data.iloc[-1])
d = (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close'])
if a and b and c and d:
return True
else:
return False
def bullish_harami(self):
'''
Look for a tall black candle in a downward price trend.
The next day a white candle should be nestled within the body of the prior candle.
Ignore the shadows. The tops or bottoms of the bodies can be the same price, but not both.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.white_candle(self.data.iloc[1])
d = (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close'])
if a and b and c and d:
return True
else:
return False
def bullish_harami_cross(self):
'''
Look for a two candle pattern in a downward price trend.
The first line is a tall black candle followed by a doji that fits within the high-low price range of the prior day.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.doji(self.data.iloc[-1])
d = (self.data.iloc[-1]['High'] < self.data.iloc[-2]['High']) and (self.data.iloc[-1]['Low'] < self.data.iloc[-2]['Low'])
if a and b and c and d:
return True
else:
return False
def bullish_kicking(self):
'''
Look for a tall black marubozu candle followed by an upward gap then a tall white marubozu candle.
'''
a = self.tall_black_candle(self.data.iloc[-2])
b = self.black_marubozu_candle(self.data.iloc[-2])
c = self.tall_white_candle(self.data.iloc[-1])
d = self.white_marubozu_candle(self.data.iloc[-1])
e = self.data.iloc[-1]['Low'] > self.data.iloc[-2]['High']
if a and b and c and d and e:
return True
else:
return False
def bullish_meeting_lines(self):
'''
Look for a tall black candle followed by a tall white candle in an upward price trend.
The two closes should be near one another.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.tall_white_candle(self.data.iloc[-1])
d = self.similar_price(self.data.iloc[-1]['Close'], self.data.iloc[-2]['Close'])
if a and b and c and d:
return True
else:
return False
def bullish_separating_lines(self):
'''
Look for a tall black candle in an upward price trend followed by a tall white candle.
The two candles share a common opening price.
'''
a = self.up_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.tall_white_candle(self.data.iloc[-1])
d = self.similar_price(self.data.iloc[-1]['Open'], self.data.iloc[-2]['Open'])
if a and b and c and d:
return True
else:
return False
def bullish_side_by_side_white_lines(self):
'''
Look for three white candles in an upward price trend.
The last two candles should have bodies of similar size,
open near the same price and above the top of the body of the first white candle.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.white_candle(self.data.iloc[-1]) and self.white_candle(self.data.iloc[-2]) and self.white_candle(self.data.iloc[-3])
c = (self.similar_price(self.data.iloc[-1]['Open'], self.data.iloc[-2]['Open'])) and (self.similar_price(self.data.iloc[-1]['Close'], self.data.iloc[-2]['Close']))
d = (self.data.iloc[-1]['Open'] > self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['Open'] > self.data.iloc[-3]['Close'])
if a and b and c and d:
return True
else:
return False
def bullish_three_line_strike(self):
'''
Look for three white candles each with a higher close.
A tall black candle should open higher, but close below the open of the first candle.
'''
a = self.up_price_trend(self.data.iloc[-4], self.data.iloc[-5], self.data.iloc[-7])
b = (self.white_candle(self.data.iloc[-4])) and (self.white_candle(self.data.iloc[-3])) and (self.white_candle(self.data.iloc[-2]))
c = (self.data.iloc[-4]['Close'] < self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['Close'] > self.data.iloc[-3]['Close'])
d = self.tall_black_candle(self.data.iloc[-1])
e = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Close'] < self.data.iloc[-4]['Open'])
if a and b and c and d and e:
return True
else:
return False
def bullish_tri_star(self):
'''
Look for three doji after a downward price trend.
The middle doji has a body below the other two.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = (self.doji(self.data.iloc[-3])) and (self.doji(self.data.iloc[-2])) and (self.doji(self.data.iloc[-1]))
c = max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']) < min(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open'])
d = max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']) < min(self.data.iloc[-3]['Close'], self.data.iloc[-3]['Open'])
if a and b and c and d:
return True
else:
return False
def collapsing_doji_star(self):
'''
Look for a white candle in an upward price trend.
Following that, find a doji that gaps below yesterday's low.
The last day is a black candle that also gaps below the doji.
None of the shadows on the three candles should overlap, so there should be gaps surrounding the doji.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.white_candle(self.data.iloc[-3])
c = (self.doji(self.data.iloc[-2])) and (self.data.iloc[-2]['High'] < self.data.iloc[-3]['Low'])
d = (self.black_candle(self.data.iloc[-1])) and (self.data.iloc[-1]['High'] < self.data.iloc[-2]['Low'])
if a and b and c and d:
return True
else:
return False
def conceling_baby_swallow(self):
'''
Look for four black candles.
The first two are long black marubozu candles followed the next day by a candle with a tall upper shadow.
The candle gaps open downward but price trades into the body of the prior day.
The last candle engulfs the prior day, including the shadows (a higher high and lower low than the prior day).
'''
a = self.down_price_trend(self.data.iloc[-4], self.data.iloc[-5], self.data.iloc[-7])
b = (self.tall_black_candle(self.data.iloc[-4])) and (self.black_marubozu_candle(self.data.iloc[-4]))
c = (self.tall_black_candle(self.data.iloc[-3])) and (self.black_marubozu_candle(self.data.iloc[-3]))
d = self.black_candle(self.data.iloc[-2]) and ((self.data.iloc[-2]['High'] - self.data.iloc[-2]['Open']) > self.body_candle(self.data.iloc[-2]))
e = (self.data.iloc[-2]['Open'] < self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['High'] > self.data.iloc[-3]['Close'])
f = (self.data.iloc[-1]['High'] < self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Low'] > self.data.iloc[-2]['Close'])
if a and b and c and d and e and f:
return True
else:
return False
def dark_cloud_cover(self):
'''
Look for two candles in an upward price trend.
The first candle is a tall white one followed by a black candle with an opening price above the top of the white candle
(an opening price above the prior high), but a close below the mid point of the white body.
'''
a = self.up_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_white_candle(self.data.iloc[-2])
c = (self.black_candle(self.data.iloc[-1])) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['High'])
d = (self.data.iloc[-1]['Close'] < (self.data.iloc[-2]['Open'] + self.data.iloc[-2]['Close'])/2.)
if a and b and c and d:
return True
else:
return False
def deliberation(self):
'''
Look for three white candlesticks in an upward price trend.
The first two are tall bodied candles but the third has a small body that opens near the second day's close.
Each candle opens and closes higher than the previous one.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_white_candle(self.data.iloc[-3]) and self.tall_white_candle(self.data.iloc[-2])
c = self.white_candle(self.data.iloc[-1]) and (not self.tall_white_candle(self.data.iloc[-1]))
d = self.similar_price(self.data.iloc[-1]['Open'], self.data.iloc[-2]['Close'])
e = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open']) and (self.data.iloc[-2]['Open'] > self.data.iloc[-3]['Open'])
f = (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-2]['Close'] > self.data.iloc[-3]['Close'])
if a and b and c and d and e and f:
return True
else:
return False
def gapping_down_doji(self):
'''
In a downtrend, price gaps lower and forms a doji
(a candle in which the opening and closing prices are no more than a few pennies apart).
'''
a = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.doji(self.data.iloc[-1])
c = self.data.iloc[-1]['High'] < self.data.iloc[-2]['Low']
if a and b and c:
return True
else:
return False
def gapping_up_doji(self):
'''
Price gaps higher, including the shadows, in an uptrend and forms a doji candle.
A doji is one in which the opening and closing prices are within pennies of each other.
'''
a = self.up_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.doji(self.data.iloc[-1])
c = self.data.iloc[-1]['Low'] > self.data.iloc[-2]['High']
if a and b and c:
return True
else:
return False
def northern_doji(self):
'''
Look for a candle in which the opening and closing prices are within pennies of each other (a doji) in an up trend.
'''
a = self.up_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.doji(self.data.iloc[-1])
if a and b:
return True
else:
return False
def southern_doji(self):
'''
Look for a doji candlestick (one in which the opening and closing prices are a few pennies from each other) in a downward price trend.
'''
a = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.doji(self.data.iloc[-1])
if a and b:
return True
else:
return False
def bearish_doji_star(self):
'''
Look for a two-candle pattern in an uptrend.
The first candle is a long white one.
The next day, price gaps higher and the body remains above the prior body.
A doji forms with the opening and closing prices within pennies of each other.
The shadows on the doji should be comparatively short.
'''
a = self.up_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_white_candle(self.data.iloc[-2])
c = self.doji(self.data.iloc[-1]) and (not self.dragonfly_doji(self.data.iloc[-1])) and (not self.gravestone_doji(self.data.iloc[-1])) and (not self.long_legged_doji(self.data.iloc[-1]))
d = min(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close']) > self.data.iloc[-1]['Close']
if a and b and c and d:
return True
else:
return False
def bullish_doji_star(self):
'''
Look for a tall black candle on the first day followed by a doji
(where the opening and closing prices are within pennies of each other)
that gaps below the prior candle's body.
The shadows can overlap, but the doji's shadows should not be unusually long, whatever that means.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.doji(self.data.iloc[-1]) and (not self.dragonfly_doji(self.data.iloc[-1])) and (not self.gravestone_doji(self.data.iloc[-1])) and (not self.long_legged_doji(self.data.iloc[-1]))
d = max(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close']) < self.data.iloc[-1]['Close']
if a and b and c and d:
return True
else:
return False
def evening_doji(self):
'''
Look for a tall white candle in an upward price trend followed by a doji whose body gaps above the two surrounding days.
Ignore the shadows. The last day is a tall black candle that closes at or below the mid point of the first day.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_white_candle(self.data.iloc[-3])
c = self.doji(self.data.iloc[-2])
d = (min(self.data.iloc[-2]['Open'],self.data.iloc[-2]['Close']) > self.data.iloc[-3]['Close']) and (min(self.data.iloc[-2]['Open'],self.data.iloc[-2]['Close']) > self.data.iloc[-1]['Open'])
e = self.tall_black_candle(self.data.iloc[-1])
f = self.data.iloc[-1]['Close'] <= (self.data.iloc[-3]['Close'] + self.data.iloc[-3]['Open'])/2.
if a and b and c and d and e and f:
return True
else:
return False
def downside_gap_three_methods(self):
'''
Look for two long black bodied candles in a downward price trend.
The second candle should have a gap between them (shadows do not overlap).
The last day is a white candle that opens within the body of the prior day and
closes within the body of the first day, closing the gap between the two black candles.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.black_candle(self.data.iloc[-3]) and self.black_candle(self.data.iloc[-2])
c = self.data.iloc[-3]['Low'] > self.data.iloc[-2]['High']
d = self.white_candle(self.data.iloc[-1])
e = (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Open'])and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close'])
f = (self.data.iloc[-1]['Close'] < self.data.iloc[-3]['Open'])and (self.data.iloc[-1]['Close'] > self.data.iloc[-3]['Close'])
if a and b and c and d and e and f:
return True
else:
return False
def downside_tasuki_gap(self):
'''
Look for a black candle in a downward price trend followed by another black candle,
but this one gaps lower with no shadow overlap between the two candles.
The final day sees a white candle print on the chart,
one that opens within the body of the second candle and closes within the gap between the first and second candles.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.black_candle(self.data.iloc[-3]) and self.black_candle(self.data.iloc[-2])
c = self.data.iloc[-3]['Low'] > self.data.iloc[-2]['High']
d = self.white_candle(self.data.iloc[-1])
e = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Open'])
f = (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['High']) and (self.data.iloc[-1]['Close'] < self.data.iloc[-3]['Low'])
if a and b and c and d and e and f:
return True
else:
return False
def falling_three_methods(self):
'''
Look for a series of five candles in a downward price trend.
The first day should be a tall black candle followed by three up trending small white candles
(except the middle of the three, which can be either black or white),
followed by another tall black candle with a close below the first day's close.
The three middle candles should remain within the high-low range of the first candle.
'''
a = self.down_price_trend(self.data.iloc[-5], self.data.iloc[-6], self.data.iloc[-8])
b = self.tall_black_candle(self.data.iloc[-5])
c = self.small_white_candle(self.data.iloc[-4]) and self.small_white_candle(self.data.iloc[-2]) and (self.small_black_candle(self.data.iloc[-3]) or self.small_white_candle(self.data.iloc[-3]))
d = self.tall_black_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Close'] < self.data.iloc[-5]['Close'])
e = (self.data.iloc[-4]['High'] < self.data.iloc[-5]['High']) and (self.data.iloc[-3]['High'] < self.data.iloc[-5]['High']) and (self.data.iloc[-2]['High'] < self.data.iloc[-5]['High'])
f = (self.data.iloc[-4]['Low'] > self.data.iloc[-5]['Low']) and (self.data.iloc[-3]['Low'] > self.data.iloc[-5]['Low']) and (self.data.iloc[-2]['Low'] > self.data.iloc[-5]['Low'])
if a and b and c and d and e and f:
return True
else:
return False
def falling_window(self):
'''
Find a pattern in which yesterday's low is above today's high.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.data.iloc[-2]['Low'] > self.data.iloc[-1]['High']
if a and b:
return True
else:
return False
def hammer(self):
'''
Look for the hammer to appear in a downward price trend and
have a long lower shadow at least two or three times the height of the body with little or no upper shadow.
'''
a = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = (min(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close']) - self.data.iloc[-1]['Low']) > 2 * self.body_candle(self.data.iloc[-1])
c = self.similar_price(self.data.iloc[-1]['High'], max(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close']))
if a and b and c:
return True
else:
return False
def inverted_hammer(self):
'''
Look for a tall black candle with a close near the day's low followed by a short candle with a tall upper shadow and little or no lower shadow.
The second candle cannot be a doji
(opening and closing prices cannot be within pennies of each other) and
the open on the second candle must be below the prior candle's close.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2]) and self.similar_price(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Low'])
c = (not self.doji(self.data.iloc[-1])) and (self.small_white_candle(self.data.iloc[-1]) or self.small_black_candle(self.data.iloc[-1]))
d = self.similar_price(self.data.iloc[-1]['Low'], min(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close']))
e = (self.data.iloc[-1]['High'] - max(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close'])) > 2 * self.body_candle(self.data.iloc[-1])
f = self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close']
if a and b and c and d and e and f:
return True
else:
return False
def hanging_man(self):
'''
Look for a small bodied candle atop a long lower shadow in an uptrend.
'''
a = self.up_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.small_white_candle(self.data.iloc[-1]) or self.small_black_candle(self.data.iloc[-1])
c = self.hammer()
if a and b and c :
return True
else:
return False
def high_wave(self):
'''
Look for tall upper and lower shadows attached to a small body.
The body is not a doji (meaning that the opening and closing prices must be more than a few pennies apart.
'''
a = self.small_white_candle(self.data.iloc[-1]) or self.small_black_candle(self.data.iloc[-1])
b = not self.doji(self.data.iloc[-1])
c = (self.data.iloc[-1]['High'] - max(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close'])) > 2 * self.body_candle(self.data.iloc[-1])
d = (min(self.data.iloc[-1]['Open'], self.data.iloc[-1]['Close']) - self.data.iloc[-1]['Low']) > 2 * self.body_candle(self.data.iloc[-1])
if a and b and c and d:
return True
else:
return False
def homing_pigeon(self):
'''
Look for a two line candle in a downward price trend.
The first day should be a tall black body followed by a small black body that fits inside the body of the prior day.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.small_black_candle(self.data.iloc[-1])
d = self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Close']
e = self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Open']
if a and b and c and d and e:
return True
else:
return False
def identical_three_crows(self):
'''
Look for three tall black candles, the last two opening near the prior candle's close.
Some sources require each candle to be similar in size, but this one is rare enough without that restriction.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = (self.tall_black_candle(self.data.iloc[-3])) and (self.tall_black_candle(self.data.iloc[-2])) and (self.tall_black_candle(self.data.iloc[-1]))
c = self.similar_price(self.data.iloc[-2]['Open'], self.data.iloc[-3]['Close']) and self.similar_price(self.data.iloc[-1]['Open'], self.data.iloc[-2]['Close'])
if a and b and c:
return True
else:
return False
def in_neck(self):
'''
Look for a tall black candle in a downward price trend.
The next day, a white candle opens below the black day's low, but closes just into the body of the black candle.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.white_candle(self.data.iloc[-1])
d = (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Low']) and (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Close'] < (self.data.iloc[-2]['Close']+self.data.iloc[-2]['Open'])/2.)
if a and b and c and d:
return True
else:
return False
def ladder_bottom(self):
'''
Look for a series of 5 candles in a downward price trend.
The first three days should be tall black candles, each with a lower open and close.
The 4th day should be a black candle with an upper shadow,
and the last day should be a white candle that gaps open above the body of the prior day.
'''
a = self.down_price_trend(self.data.iloc[-5], self.data.iloc[-6], self.data.iloc[-8])
b = self.tall_black_candle(self.data.iloc[-5]) and self.tall_black_candle(self.data.iloc[-4]) and self.tall_black_candle(self.data.iloc[-3])
c = (self.data.iloc[-4]['Close'] < self.data.iloc[-5]['Close']) and (self.data.iloc[-3]['Close'] < self.data.iloc[-4]['Close'])
d = (self.data.iloc[-4]['Open'] < self.data.iloc[-5]['Open']) and (self.data.iloc[-3]['Open'] < self.data.iloc[-4]['Open'])
e = self.black_candle(self.data.iloc[-2]) and (self.data.iloc[-2]['High'] > self.data.iloc[-2]['Open'])
f = self.white_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open'])
if a and b and c and d and e and f:
return True
else:
return False
def last_engulfing_bottom(self):
'''
Look for a white candle on the first day in a downward price trend followed by a black candle that engulfs the body of the white candle.
That means the black candle has a body this is above the top and below the bottom of the white candle.
Ignore the shadows.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.white_candle(self.data.iloc[-2]) and self.black_candle(self.data.iloc[-1])
c = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Open'])
if a and b and c:
return True
else:
return False
def last_engulfing_top(self):
'''
Look for a black candle followed by a white candle that overlaps the prior black candle's body.
The white candle should have a body above the prior candle's top and below the prior candle's bottom.
'''
a = self.up_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.white_candle(self.data.iloc[-1]) and self.black_candle(self.data.iloc[-2])
c = (self.data.iloc[-2]['Low'] > self.data.iloc[-1]['Open']) and (self.data.iloc[-2]['High'] < self.data.iloc[-1]['Close'])
if a and b and c:
return True
else:
return False
def matching_low(self):
'''
Look for a black candle with a tall body.
Following that, find a black body with a close (not the low) that matches the prior close.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2]) and self.black_candle(self.data.iloc[-1])
c = self.data.iloc[-1]['Close'] == self.data.iloc[-2]['Close']
if a and b and c:
return True
else:
return False
def mat_hold(self):
'''
Look for a tall white candle to start the pattern.
The next day a small black candle has a higher close.
The third day can be any color but it is also a small candle.
The fourth day is, again, a small black candle and all three candles (days 2 to 4)
show a downward price trend but their bodies remain above the low of the first day.
The last day is another tall white candle with a close above the high of the prior four candles.
'''
a = self.up_price_trend(self.data.iloc[-5], self.data.iloc[-6], self.data.iloc[-8])
b = self.tall_white_candle(self.data.iloc[-5])
c = self.small_black_candle(self.data.iloc[-4]) and (self.data.iloc[-4]['Close'] > self.data.iloc[-5]['Close'])
d = self.small_black_candle(self.data.iloc[-3]) or self.small_white_candle(self.data.iloc[-3])
e = self.small_black_candle(self.data.iloc[-2]) and self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
f = (self.data.iloc[-2]['Close'] > self.data.iloc[-5]['Low']) and (min(self.data.iloc[-3]['Close'], self.data.iloc[-3]['Open'])> self.data.iloc[-5]['Low']) \
and (self.data.iloc[-4]['Close'] > self.data.iloc[-5]['Low'])
g = self.tall_white_candle(self.data.iloc[-1]) and self.data.iloc[-1]['Close'] > max(self.data.iloc[-2]['High'], self.data.iloc[-3]['High'], self.data.iloc[-4]['High'], self.data.iloc[-5]['High'])
if a and b and c and d and e and f and g:
return True
else:
return False
def morning_doji_star(self):
'''
Look for a tall black candle in a downward price trend.
The next day, a doji appears and its body gaps below the prior candle's body.
The final day is a tall white candle whose body gaps above the doji's.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_black_candle(self.data.iloc[-3]) and self.doji(self.data.iloc[-2])
c = max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']) < self.data.iloc[-3]['Close']
d = self.tall_white_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Open'] > max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']))
if a and b and c and d:
return True
else:
return False
def morning_star(self):
'''
Look for a tall black candle in a downward price trend.
Following that, a small bodied candle of any color appears, one whose body gaps below the prior body.
The last day is a tall white candle that gaps above the body of the second candle and closes at least midway into the body of the first day.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_black_candle(self.data.iloc[-3]) and (self.small_black_candle(self.data.iloc[-2]) or self.small_white_candle(self.data.iloc[-2]))
c = max(self.data.iloc[-2]['Open'], self.data.iloc[-2]['Close']) < self.data.iloc[-3]['Close']
d = self.tall_white_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Open'] > max(self.data.iloc[-2]['Close'], self.data.iloc[-2]['Open']))
if a and b and c and d:
return True
else:
return False
def on_neck(self):
'''
Look for a tall black candle in a downward price trend. Following that, a white candle has a close that matches (or nearly matches) the prior low.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.tall_black_candle(self.data.iloc[-2])
c = self.white_candle(self.data.iloc[-1]) and self.similar_price(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Low'])
if a and b and c:
return True
else:
return False
def piercing_pattern(self):
'''
Look for a black candle followed by a white one that opens below the black candle's low and closes between the midpoint of the black body and opening price.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.black_candle(self.data.iloc[-2]) and self.white_candle(self.data.iloc[-1])
c = self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Low']
d = (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Close'] > (self.data.iloc[-2]['Open'] - self.body_candle(self.data.iloc[-2])/2.))
if a and b and c and d:
return True
else:
return False
def rickshaw_man(self):
'''
Look for the opening and closing prices to be within pennies of each other,
unusually tall upper and lower shadows, and the body to be near the middle of the candlestick.
'''
a = self.long_legged_doji(self.data.iloc[-1])
b = self.similar_price(self.data.iloc[-1]['Open'], (self.data.iloc[-1]['High'] + self.data.iloc[-1]['Low'])/2.) or self.similar_price(self.data.iloc[-1]['Close'], (self.data.iloc[-1]['High'] + self.data.iloc[-1]['Low'])/2.)
if a and b:
return True
else:
return False
def rising_three_methods(self):
'''
Look for a tall white candle followed by three small candles that trend lower but close within the high-low range of the first candle.
Candles 2 and 4 are black, but day 3 can be any color.
The final candle in the pattern is a tall white one that closes above the close of the first day.
'''
a = self.up_price_trend(self.data.iloc[-5], self.data.iloc[-6], self.data.iloc[-8])
b = self.tall_white_candle(self.data.iloc[-5])
c = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
d = self.small_black_candle(self.data.iloc[-4]) and (self.data.iloc[-4]['Close'] < self.data.iloc[-5]['High']) and (self.data.iloc[-4]['Close'] > self.data.iloc[-5]['Low'])
e = (self.small_black_candle(self.data.iloc[-3]) or self.small_white_candle(self.data.iloc[-3])) and (self.data.iloc[-3]['Close'] < self.data.iloc[-5]['High']) and (self.data.iloc[-3]['Close'] > self.data.iloc[-5]['Low'])
f = self.small_black_candle(self.data.iloc[-2]) and (self.data.iloc[-2]['Close'] < self.data.iloc[-5]['High']) and (self.data.iloc[-2]['Close'] > self.data.iloc[-5]['Low'])
g = self.tall_white_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Close'] > self.data.iloc[-5]['Close'])
if a and b and c and d and e and f and g:
return True
else:
return False
def rising_window(self):
'''
Find a pattern in which yesterday's high is below today's low.
'''
a = self.up_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.data.iloc[-2]['High'] < self.data.iloc[-1]['Low']
if a and b:
return True
else:
return False
def shooting_star_1(self):
'''
Look for a small bodied candle (but not a doji) with little or no lower shadow and
a tall upper shadow at least twice the height of the body.
'''
a = self.up_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.small_black_candle(self.data.iloc[-1]) or self.small_white_candle(self.data.iloc[-1])
c = self.similar_price(self.data.iloc[-1]['Low'], min(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open']))
d = (self.data.iloc[-1]['High'] - max(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open'])) > 2 * self.body_candle(self.data.iloc[-1])
if a and b and c and d:
return True
else:
return False
def shooting_star_2(self):
'''
Look for two candles in an upward price trend.
The first candle is white followed by a small bodied candle with an upper shadow at least three times the height of the body.
The candle has no lower shadow or a very small one and there is a gap between the prices of the two bodies.
The second candle can be any color.
'''
a = self.up_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.white_candle(self.data.iloc[-2])
c = self.small_black_candle(self.data.iloc[-1]) or self.small_white_candle(self.data.iloc[-1])
d = (self.data.iloc[-1]['High'] - max(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open'])) > 3 * self.body_candle(self.data.iloc[-1])
e = self.similar_price(self.data.iloc[-1]['Low'], min(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open']))
f = self.data.iloc[-1]['Low'] > self.data.iloc[-2]['Close']
if a and b and c and d and e and f:
return True
else:
return False
def stick_sandwich(self):
'''
Look for a black candle in a falling price trend.
The second candle is white and it trades above the close of the prior day.
The last candle is a black one that closes at or near the close of the first day.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.black_candle(self.data.iloc[-3]) and self.white_candle(self.data.iloc[-2]) and self.black_candle(self.data.iloc[-1])
c = (self.data.iloc[-2]['Low'] > self.data.iloc[-3]['Close'])
d = self.similar_price(self.data.iloc[-1]['Close'], self.data.iloc[-3]['Close'])
if a and b and c and d:
return True
else:
return False
def takuri_line(self):
'''
A small bodied candle with a lower shadow at least three times the height of the body and little or no upper shadow.
'''
a = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.small_black_candle(self.data.iloc[-1]) or self.small_white_candle(self.data.iloc[-1])
c = self.similar_price(self.data.iloc[-1]['High'], max(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open']))
d = abs(self.data.iloc[-1]['Low'] - min(self.data.iloc[-1]['Close'], self.data.iloc[-1]['Open'])) > 3 * self.body_candle(self.data.iloc[-1])
if a and b and c and d:
return True
else:
return False
def three_black_crows(self):
'''
Look for three tall black candles that appear in an upward price trend.
Candles 2 and 3 of the pattern should open within the body of the prior candle,
and all three should close near their lows, making new lows along the way.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_black_candle(self.data.iloc[-3]) and self.tall_black_candle(self.data.iloc[-2]) and self.tall_black_candle(self.data.iloc[-1])
c = (self.data.iloc[-2]['Open'] > self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['Open'] < self.data.iloc[-3]['Open'])
d = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Open'])
e = self.similar_price(self.data.iloc[-3]['Low'], self.data.iloc[-3]['Close']) and self.similar_price(self.data.iloc[-2]['Low'], self.data.iloc[-2]['Close']) and self.similar_price(self.data.iloc[-1]['Low'], self.data.iloc[-1]['Close'])
f = (self.data.iloc[-3]['Low'] > self.data.iloc[-2]['Low']) and (self.data.iloc[-2]['Low'] > self.data.iloc[-1]['Low'])
if a and b and c and d and e and f:
return True
else:
return False
def three_inside_down(self):
'''
Look for a tall white candle in an upward price trend.
Following that, a small black candle appears with the open and close within the body of the first day.
The tops or bottoms of the two bodies can be the same price, but not both.
The last day must close lower, but can be any color.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_white_candle(self.data.iloc[-3])
c = self.small_black_candle(self.data.iloc[-2])
d = (self.data.iloc[-2]['Open'] < self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['Close'] > self.data.iloc[-3]['Open'])
e = (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Close'])
if a and b and c and d and e:
return True
else:
return False
def three_inside_up(self):
'''
Look for a tall black candle in a downward price trend.
The next day, a small bodied white candle has a body that is within the body of the prior candle.
The tops or bottoms of the bodies can be the same price, but not both.
The last day is a white candle that closes above the prior close.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_black_candle(self.data.iloc[-3])
c = self.small_white_candle(self.data.iloc[-2])
d = (self.data.iloc[-2]['Open'] > self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['Close'] < self.data.iloc[-3]['Open'])
e = self.white_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Close'])
if a and b and c and d and e:
return True
else:
return False
def three_outside_down(self):
'''
Look for a white candle in an upward price trend.
Following that, a black candle opens higher and closes lower than the prior candle's body.
The last day is a candle with a lower close.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.white_candle(self.data.iloc[-3])
c = self.black_candle(self.data.iloc[-2]) and (self.data.iloc[-2]['Open'] > self.data.iloc[-2]['Close']) and (self.data.iloc[-2]['Close'] < self.data.iloc[-2]['Open'])
d = self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Close']
if a and b and c and d:
return True
else:
return False
def three_outside_up(self):
'''
Look for a black candle in a downward price trend.
Following that, a white candle opens below the prior body and closes above it, too.
The last day is a candle in which price closes higher, according to Morris who developed the candle.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.black_candle(self.data.iloc[-3])
c = self.white_candle(self.data.iloc[-2]) and (self.data.iloc[-2]['Open'] < self.data.iloc[-3]['Close']) and (self.data.iloc[-2]['Close'] < self.data.iloc[-3]['Open'])
d = self.data.iloc[-1]['Close'] > self.data.iloc[-2]['Close']
if a and b and c and d:
return True
else:
return False
def three_stars_in_south(self):
'''
Look for a tall black candle with a long lower shadow to appear in a downward price trend.
The second day should be similar to the first day, but smaller and with a higher low.
The last day is a black marubozu that squeezes inside the high-low range of the prior day.
Good luck finding one.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_black_candle(self.data.iloc[-3]) and ((self.data.iloc[-3]['Close']-self.data.iloc[-3]['Low']) > self.body_candle(self.data.iloc[-3]))
c = self.tall_black_candle(self.data.iloc[-2]) and ((self.data.iloc[-2]['Close']-self.data.iloc[-2]['Low']) > self.body_candle(self.data.iloc[-2]))
d = self.data.iloc[-2]['Low'] > self.data.iloc[-3]['Low']
e = self.black_marubozu_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['High'] < self.data.iloc[-2]['High']) and (self.data.iloc[-1]['Low'] > self.data.iloc[-2]['Low'])
if a and b and c and d and e:
return True
else:
return False
def three_white_soldiers(self):
'''
Look for three tall white candles, each with a close near the high, higher closes, and
bodies that overlap (an opening price within the prior candle's body.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_white_candle(self.data.iloc[-3]) and self.tall_white_candle(self.data.iloc[-2]) and self.tall_white_candle(self.data.iloc[-1])
c = self.similar_price(self.data.iloc[-3]['High'], self.data.iloc[-3]['Close']) and self.similar_price(self.data.iloc[-2]['High'], self.data.iloc[-2]['Close']) and self.similar_price(self.data.iloc[-1]['High'], self.data.iloc[-1]['Close'])
d = (self.data.iloc[-3]['High'] < self.data.iloc[-2]['High']) and (self.data.iloc[-2]['High'] < self.data.iloc[-1]['High'])
e = (self.data.iloc[-2]['Open'] > self.data.iloc[-3]['Open']) and (self.data.iloc[-2]['Open'] < self.data.iloc[-3]['Close'])
f = (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close'])
if a and b and c and d and e and f:
return True
else:
return False
def thrusting(self):
'''
Look for a black candle in a downward price trend followed by a white candle that
opens below the prior low but closes near but below the midpoint of the black candle's body.
'''
a = self.down_price_trend(self.data.iloc[-2], self.data.iloc[-3], self.data.iloc[-5])
b = self.black_candle(self.data.iloc[-2]) and self.white_candle(self.data.iloc[-1])
c = (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Low']) and (self.data.iloc[-1]['Close'] < (self.data.iloc[-2]['Open'] - self.body_candle(self.data.iloc[-2])/2.)) and \
(self.data.iloc[-1]['Close'] > (self.data.iloc[-2]['Close'] + self.body_candle(self.data.iloc[-2])/4.))
if a and b and c:
return True
else:
return False
def tweezers_bottom(self):
'''
Look for two candles sharing the same low price.
'''
a = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.data.iloc[-1]['Low'] == self.data.iloc[-2]['Low']
if a and b:
return True
else:
return False
def tweezers_top(self):
'''
Look for two adjacent candlesticks with the same (or nearly the same) high price in an uptrend.
'''
a = self.up_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.similar_price(self.data.iloc[-1]['High'], self.data.iloc[-2]['High'])
if a and b:
return True
else:
return False
def two_black_gapping(self):
'''
Look for a price gap followed by two black candles.
The second black candle should have a high below the prior candle's high.
'''
a = self.down_price_trend(self.data.iloc[-1], self.data.iloc[-2], self.data.iloc[-4])
b = self.black_candle(self.data.iloc[-2]) and self.black_candle(self.data.iloc[-1])
c = self.data.iloc[-2]['High'] < self.data.iloc[-3]['Low']
d = self.data.iloc[-1]['High'] < self.data.iloc[-2]['High']
if a and b and c and d:
return True
else:
return False
def two_crows(self):
'''
Look for a tall white candle in an upward price trend.
Following that, a black candle has a body that gaps above the prior candle's body.
The last day is another black candle, but this one opens within the prior candle's body and closes within the body of the first candle in the pattern.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_white_candle(self.data.iloc[-3])
c = self.black_candle(self.data.iloc[-2]) and (self.data.iloc[-2]['Close'] > self.data.iloc[-3]['Close'])
d = self.black_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Close'])
e = (self.data.iloc[-2]['Close'] > self.data.iloc[-3]['Open']) and (self.data.iloc[-2]['Close'] < self.data.iloc[-3]['Close'])
if a and b and c and d and e:
return True
else:
return False
def unique_three_river_bottom(self):
'''
Look for a tall bodied black candle in a downward price trend.
Following that, another black body rests inside the prior body, but the lower shadow is below the prior day's low.
The last day is a short bodied white candle that remains below the body of the prior candle.
'''
a = self.down_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_black_candle(self.data.iloc[-3])
c = self.black_candle(self.data.iloc[-2]) and (self.data.iloc[-2]['Low'] < self.data.iloc[-3]['Low'])
d = (self.data.iloc[-2]['Open'] < self.data.iloc[-3]['Open']) and (self.data.iloc[-2]['Close'] < self.data.iloc[-3]['Close'])
e = self.small_white_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Close'])
if a and b and c and d and e:
return True
else:
return False
def upside_gap_three_methods(self):
'''
Look for two tall white candles in an upward price trend.
There should be a gap between them, including between the shadows.
The last day is a black candle that fills the gap created by the first two days.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_white_candle(self.data.iloc[-3]) and self.tall_white_candle(self.data.iloc[-2])
c = self.data.iloc[-3]['High'] < self.data.iloc[-2]['Low']
d = self.black_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Close'] < self.data.iloc[-3]['Close']) and (self.data.iloc[-1]['Close'] > self.data.iloc[-3]['Open'])
e = (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open'])
if a and b and c and d and e:
return True
else:
return False
def upside_gap_two_crows(self):
'''
Look for a tall white candle in an upward price trend.
Then find a black candle with a body gapping above the prior candle's body.
The last day is another black candle that engulfs the body of the middle day with a close that
remains above the close of the first candle.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.tall_white_candle(self.data.iloc[-3])
c = self.black_candle(self.data.iloc[-2]) and (self.data.iloc[-3]['Close'] < self.data.iloc[-2]['Close'])
d = self.black_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Close']) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open'])
e = self.data.iloc[-2]['Close'] > self.data.iloc[-3]['Close']
if a and b and c and d and e:
return True
else:
return False
def upside_tasuki_gap(self):
'''
Look for a white candle in an upward price trend.
Following that, find another white candle, but this one gaps higher and that includes a gap between the shadows of the two candles.
The last day is a black candle that opens in the body of the prior candle and closes within the gap created between the first two candles.
'''
a = self.up_price_trend(self.data.iloc[-3], self.data.iloc[-4], self.data.iloc[-6])
b = self.white_candle(self.data.iloc[-3])
c = self.white_candle(self.data.iloc[-2]) and (self.data.iloc[-2]['Low'] > self.data.iloc[-3]['High'])
d = self.black_candle(self.data.iloc[-1]) and (self.data.iloc[-1]['Open'] > self.data.iloc[-2]['Open']) and (self.data.iloc[-1]['Open'] < self.data.iloc[-2]['Close'])
e = (self.data.iloc[-1]['Close'] > self.data.iloc[-3]['Close']) and (self.data.iloc[-1]['Close'] < self.data.iloc[-2]['Open'])
if a and b and c and d and e:
return True
else:
return False
|
mit
| 3,866,151,280,402,048,000
| 51.540932
| 247
| 0.576738
| false
| 3.292147
| false
| false
| false
|
amlight/ofp_sniffer
|
ofp_sniffer.py
|
1
|
7990
|
#!/usr/bin/env python3.6
"""
This code is the AmLight OpenFlow Sniffer
Author: AmLight Dev Team <dev@amlight.net>
"""
import sys
import logging.config
import time
import threading
import yaml
from libs.core.printing import PrintingOptions
from libs.core.sanitizer import Sanitizer
from libs.core.topo_reader import TopoReader
from libs.core.cli import get_params
from libs.core.save_to_file import save_to_file
from libs.core.custom_exceptions import *
from libs.gen.packet import Packet
from apps.oess_fvd import OessFvdTracer
from apps.ofp_stats import OFStats
from apps.ofp_proxies import OFProxy
from apps.influx_client import InfluxClient
from apps.notifications import Notifications
class RunSniffer(object):
"""
The RunSniffer class is the main class for the OpenFlow Sniffer.
This class instantiate all auxiliary classes, captures the packets,
instantiate new OpenFlow messages and triggers all applications.
"""
def __init__(self):
self.printing_options = PrintingOptions()
self.sanitizer = Sanitizer()
self.oft = None
self.stats = None
self.influx = None
self.notifications = None
self.trigger_event = threading.Event()
self.cap = None
self.packet_number = None
self.load_apps = dict()
self.packet_count = 1
self.topo_reader = TopoReader()
self.save_to_file = None
self.ofp_proxy = None
self.load_config()
def load_config(self):
"""
Parses the parameters received and instantiates the
apps requested.
"""
# Get CLI params and call the pcapy loop
self.cap, self.packet_number, \
self.load_apps, sanitizer, \
topo_file, is_to_save = get_params(sys.argv)
self.sanitizer.process_filters(sanitizer)
# Load TopologyReader
self.topo_reader.readfile(topo_file)
# Save to File
self.save_to_file = save_to_file(is_to_save)
# Start Apps
self.ofp_proxy = OFProxy()
if 'oess_fvd' in self.load_apps:
self.oft = OessFvdTracer(self.load_apps['oess_fvd'])
if 'statistics' in self.load_apps:
self.stats = OFStats()
if 'influx' in self.load_apps:
self.influx = InfluxClient(trigger_event=self.trigger_event)
if 'notifications' in self.load_apps:
self.notifications = Notifications(self.load_apps['notifications'])
def run(self):
"""
cap.loop continuously capture packets w/ pcapy. For every
captured packet, self.process_packet method is called.
Exits:
0 - Normal, reached end of file
1 - Normal, user requested with CRTL + C
2 - Error
3 - Interface or file not found
"""
exit_code = 0
# DEBUG:
# self.cap.loop(-1, self.process_packet)
try:
self.cap.loop(-1, self.process_packet)
except EndOfPcapFile:
exit_code = 3
except KeyboardInterrupt:
exit_code = 1
except Exception as exception:
print('Error on packet %s: %s ' % (self.packet_count, exception))
exit_code = 2
finally:
if 'statistics' in self.load_apps:
# If OFP_Stats is running, set a timer
# before closing the app. Useful in cases
# where the ofp_sniffer is reading from a
# pcap file instead of a NIC.
time.sleep(200)
# pass
print('Exiting with code: %s' % exit_code)
# gracefully shut down
if 'influx' in self.load_apps:
self.influx.stop_event.set()
sys.exit(exit_code)
def process_packet(self, header, packet):
"""
Every packet captured by cap.loop is then processed here.
If packets are bigger than 62 Bytes, we process them.
If it is 0, means there are no more packets. If it is
something in between, it is a fragment, we ignore for now.
Args:
header: header of the captured packet
packet: packet captured from file or interface
"""
if len(packet) >= 54:
# Verify if user asked for just one specific packet
if self.was_packet_number_defined():
if not self.is_the_packet_number_specified():
self.packet_count += 1
return
# DEBUG:
# print("Packet Number: %s" % self.packet_count)
pkt = Packet(packet, self.packet_count, header)
if pkt.reconnect_error:
if isinstance(self.stats, OFStats):
# OFStats counts reconnects
self.stats.process_packet(pkt)
if isinstance(self.notifications, Notifications):
# Send notifications via Slack
self.notifications.send_msg(pkt)
elif pkt.is_openflow_packet:
valid_result = pkt.process_openflow_messages()
if valid_result:
# Apps go here:
if isinstance(self.oft, OessFvdTracer):
# FVD_Tracer does not print the packets
self.oft.process_packet(pkt)
if isinstance(self.ofp_proxy, OFProxy):
# OFP_PROXY associates IP:PORT to DPID
self.ofp_proxy.process_packet(pkt)
if isinstance(self.stats, OFStats):
# OFStats print the packets
self.stats.process_packet(pkt)
if isinstance(self.notifications, Notifications):
# Send notifications via Slack
self.notifications.send_msg(pkt)
if not isinstance(self.oft, OessFvdTracer):
# Print Packets
pkt.print_packet()
if self.influx:
# tell influx to wake up and update immediately
self.trigger_event.set()
del pkt
if self.is_the_packet_number_specified():
# If a specific packet was selected, end here.
raise EndOfPcapFile
elif len(packet) is 0:
return 3
self.packet_count += 1
def was_packet_number_defined(self):
"""
In case user wants to see a specific packet inside a
specific pcap file, provide file name with the specific
packet number after ":"
-r file.pcap:packet_number
Returns:
True if a packet number was specified
False: if a packet number was not specified
"""
if self.packet_number != 0:
return True
return False
def is_the_packet_number_specified(self):
"""
If user wants to see a specific packet inside a
specific pcap file and the packet_count is that
number, return True. Otherwise, return false
Returns:
True if packet_count matches
False: if packet_count does not match
"""
return True if self.packet_count == self.packet_number else False
def main():
"""
Main function.
Instantiates RunSniffer and run it
"""
try:
logging.config.dictConfig(yaml.safe_load(open('logging.yml', 'r')))
logger = logging.getLogger(__name__)
sniffer = RunSniffer()
logger.info("OFP_Sniffer started.")
sniffer.run()
except ErrorFilterFile as msg:
print(msg)
sys.exit(4)
except FileNotFoundError as msg:
print(msg)
sys.exit(5)
if __name__ == "__main__":
main()
|
apache-2.0
| -7,528,601,546,497,908,000
| 31.479675
| 79
| 0.561702
| false
| 4.380482
| false
| false
| false
|
kitchenbudapest/vr
|
hud.py
|
1
|
4413
|
## INFO ########################################################################
## ##
## plastey ##
## ======= ##
## ##
## Oculus Rift + Leap Motion + Python 3 + C + Blender + Arch Linux ##
## Version: 0.2.0.980 (20150510) ##
## File: hud.py ##
## ##
## For more information about the project, visit ##
## <http://plastey.kibu.hu>. ##
## Copyright (C) 2015 Peter Varo, Kitchen Budapest ##
## ##
## This program is free software: you can redistribute it and/or modify it ##
## under the terms of the GNU General Public License as published by the ##
## Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, but ##
## WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ##
## See the GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program, most likely a file in the root directory, ##
## called 'LICENSE'. If not, see <http://www.gnu.org/licenses>. ##
## ##
######################################################################## INFO ##
# Import python modules
from collections import deque
#------------------------------------------------------------------------------#
class Text:
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def __init__(self, text_first_object,
text_other_object,
time_getter,
interval):
self._text_first = text_first_object
self._text_other = text_other_object
self._get_time = time_getter
self._interval = interval
self._last_time = time_getter()
self._messages = deque()
self._still_empty = True
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def _update(self):
# Write the changed and constructed messages to display
messages = iter(self._messages)
try:
self._text_first.text = next(messages)
self._text_other.text = '\n'.join(messages)
except StopIteration:
self._text_first.text = self._text_other.text = ''
# Update timer
self._last_time = self._get_time()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def clear(self):
self._messages = deque()
self._update()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def update(self):
# If there are any messages left
if len(self._messages):
# If interval passed
if (self._last_time + self._interval) <= self._get_time():
# Remove oldest item
self._messages.pop()
# Update display
self._update()
# If deque just become empty
elif not self._still_empty:
# Switch state flag and update display
self._still_empty = True
self._update()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def write(self, message):
# Add new message and update display
self._messages.appendleft(message)
self._update()
|
gpl-3.0
| 2,802,582,969,299,939,300
| 48.58427
| 80
| 0.363245
| false
| 4.765659
| false
| false
| false
|
SakuradaJun/django-rest-auth
|
rest_auth/registration/serializers.py
|
1
|
3313
|
from django.http import HttpRequest
from rest_framework import serializers
from requests.exceptions import HTTPError
from allauth.socialaccount.helpers import complete_social_login
class SocialLoginSerializer(serializers.Serializer):
access_token = serializers.CharField(required=False)
code = serializers.CharField(required=False)
def validate(self, attrs):
view = self.context.get('view')
request = self.context.get('request')
if not isinstance(request, HttpRequest):
request = request._request
if not view:
raise serializers.ValidationError(
'View is not defined, pass it as a context variable'
)
self.adapter_class = getattr(view, 'adapter_class', None)
if not self.adapter_class:
raise serializers.ValidationError(
'Define adapter_class in view'
)
self.adapter = self.adapter_class()
app = self.adapter.get_provider().get_app(request)
# More info on code vs access_token
# http://stackoverflow.com/questions/8666316/facebook-oauth-2-0-code-and-token
# We have the access_token straight
if('access_token' in attrs):
access_token = attrs.get('access_token')
# We did not get the access_token, but authorization code instead
elif('code' in attrs):
self.callback_url = getattr(view, 'callback_url', None)
self.client_class = getattr(view, 'client_class', None)
if not self.callback_url:
raise serializers.ValidationError(
'Define callback_url in view'
)
if not self.client_class:
raise serializers.ValidationError(
'Define client_class in view'
)
if not self.callback_url:
raise serializers.ValidationError(
'Define callback_url in view'
)
if not self.client_class:
raise serializers.ValidationError(
'Define client_class in view'
)
code = attrs.get('code')
provider = self.adapter.get_provider()
scope = provider.get_scope(request)
client = self.client_class(
request,
app.client_id,
app.secret,
self.adapter.access_token_method,
self.adapter.access_token_url,
self.callback_url,
scope
)
token = client.get_access_token(code)
access_token = token['access_token']
token = self.adapter.parse_token({'access_token': access_token})
token.app = app
try:
login = self.adapter.complete_login(
request,
app,
token,
response=access_token,
)
login.token = token
complete_social_login(request, login)
except HTTPError:
raise serializers.ValidationError('Incorrect value')
if not login.is_existing:
login.lookup()
login.save(request, connect=True)
attrs['user'] = login.account.user
return attrs
|
mit
| -789,387,178,647,441,900
| 32.464646
| 86
| 0.56233
| false
| 4.829446
| false
| false
| false
|
lord63/zhihudaily
|
zhihudaily/views/utils.py
|
1
|
1116
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from flask import send_file, g, Blueprint
from zhihudaily.configs import Config
from zhihudaily.crawler import Crawler
from zhihudaily._compat import StringIO
utils = Blueprint('utils', __name__)
@utils.before_app_request
def before_request():
g.db = Config.database
g.db.connect()
@utils.after_app_request
def after_request(response):
g.db.close()
return response
@utils.route('/img/<server>/<path:hash_string>')
def image(server, hash_string):
"""Handle image, use redis to cache image."""
image_url = 'https://{0}.zhimg.com/{1}'.format(server, hash_string)
cached = Config.redis_server.get(image_url)
if cached:
buffer_image = StringIO(cached)
buffer_image.seek(0)
else:
r = Crawler().send_request(image_url)
buffer_image = StringIO(r.content)
buffer_image.seek(0)
Config.redis_server.setex(image_url, (60*60*24*4),
buffer_image.getvalue())
return send_file(buffer_image, mimetype='image/jpeg')
|
mit
| 1,451,385,733,581,734,100
| 25.571429
| 71
| 0.653226
| false
| 3.392097
| false
| false
| false
|
geometalab/G4SE-Compass
|
compass-api/G4SE/api/migrations/0004_geoservicemetadata.py
|
1
|
3940
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3.dev20161004124613 on 2016-10-10 12:42
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('api', '0003_recordtag'),
]
operations = [
migrations.CreateModel(
name='GeoServiceMetadata',
fields=[
('api_id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='uuid')),
('identifier', models.CharField(max_length=255, verbose_name='external identifier')),
('language', models.CharField(choices=[('de', 'de'), ('fr', 'fr'), ('en', 'en')], default='de', max_length=20, verbose_name='language')),
('title', models.CharField(max_length=255, verbose_name='title')),
('abstract', models.TextField(verbose_name='abstract')),
('publication_year', models.IntegerField(verbose_name='publication year')),
('publication_lineage', models.CharField(blank=True, max_length=255, null=True, verbose_name='history')),
('is_latest', models.BooleanField(default=False, max_length=255, verbose_name='latest of series')),
('geography', models.CharField(default='Schweiz', max_length=255, verbose_name='geography')),
('extent', models.CharField(blank=True, help_text='needs follow the form `BOX(x1 y1,x2 y2)`', max_length=255, null=True, verbose_name='extent')),
('geodata_type', models.CharField(choices=[('raster', 'raster'), ('vector', 'vector'), ('other', 'other')], max_length=255, verbose_name='geodata type')),
('source', models.CharField(max_length=2083, verbose_name='source')),
('metadata_link', models.URLField(max_length=2083, verbose_name='metadata link')),
('access_link', models.URLField(max_length=2083, verbose_name='access link')),
('base_link', models.URLField(blank=True, max_length=2083, null=True, verbose_name='access to data')),
('collection', models.CharField(blank=True, max_length=255, null=True, verbose_name='group name')),
('dataset', models.CharField(blank=True, max_length=255, null=True, verbose_name='dataset name')),
('arcgis_layer_link', models.URLField(blank=True, max_length=2083, null=True, verbose_name='ArcGIS layer link')),
('qgis_layer_link', models.URLField(blank=True, max_length=2083, null=True, verbose_name='QGIS layer link')),
('arcgis_symbology_link', models.URLField(blank=True, max_length=2083, null=True, verbose_name='ArcGIS symbology link')),
('qgis_symbology_link', models.URLField(blank=True, max_length=2083, null=True, verbose_name='QGIS symbology link')),
('service_type', models.CharField(blank=True, max_length=255, null=True, verbose_name='service type')),
('crs', models.CharField(max_length=20, verbose_name='coordinate reference system')),
('term_link', models.URLField(max_length=2083, verbose_name='terms of use')),
('proved', models.DateField(blank=True, null=True, verbose_name='proving date')),
('visibility', models.CharField(choices=[('public', 'public'), ('test', 'test'), ('hsr-internal', 'hsr-internal')], default='public', max_length=255, verbose_name='access restriction')),
('login_name', models.CharField(max_length=255, verbose_name='login name')),
('modified', models.DateTimeField(auto_now=True, null=True, verbose_name='last modification')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='created on')),
('imported', models.BooleanField(default=False, editable=False, verbose_name='imported')),
],
),
]
|
mit
| 7,452,857,091,811,495,000
| 76.254902
| 202
| 0.624365
| false
| 3.943944
| false
| false
| false
|
teran/bootloader-web
|
bootloader/tools/api/__init__.py
|
1
|
1159
|
from django.contrib.contenttypes.models import ContentType
from rest_framework import viewsets
from rest_framework.permissions import IsAdminUser
from tools.models import Agent, Credential
from tools.serializers import AgentSerializer, CredentialSerializer
class CredentialViewSet(viewsets.ModelViewSet):
queryset = Credential.objects.all()
serializer_class = CredentialSerializer
permission_classes = (IsAdminUser,)
def get_queryset(self):
filterq = {}
if 'object' in self.request.query_params.keys():
filterq['content_type'] = ContentType.objects.get(
model=self.request.query_params['object']).pk
if 'object_id' in self.request.query_params.keys():
filterq['object_id'] = self.request.query_params['object_id']
if 'name' in self.request.query_params.keys():
filterq['name'] = self.request.query_params['name']
queryset = Credential.objects.filter(**filterq)
return queryset
class AgentViewSet(viewsets.ModelViewSet):
queryset = Agent.objects.all()
serializer_class = AgentSerializer
permission_classes = (IsAdminUser,)
|
gpl-2.0
| 867,047,386,432,015,700
| 34.121212
| 73
| 0.704918
| false
| 4.184116
| false
| false
| false
|
euccas/CodingPuzzles-Python
|
leet/source/searchDFS/combinations.py
|
1
|
1336
|
class Solution:
"""
@param n: Given the range of numbers
@param k: Given the numbers of combinations
@return: All the combinations of k numbers out of 1..n
"""
def combine(self, n, k):
# write your code here
if n is None or k is None:
return []
self.result = []
self.dfs(n, k, 0, [])
return self.result
def dfs(self, n, k, startpos, combination):
if len(combination) == k:
self.result.append(combination)
for i in range(startpos, n):
#if len(combination) > k:
# return
self.dfs(n, k, i+1, combination+[i+1])
class Solution1():
"""
Faster than Solution, because no need calculate len(curr_result)
"""
def combine(self, n, k):
"""
:type n: int
:type k: int
:rtype: List[List[int]]
"""
if n is None or k is None or k == 0:
return []
result = []
self.dfs(n, k, 1, [], result)
return result
def dfs(self, n, k, start_pos, curr_result, result):
if k == 0:
result.append(curr_result[:])
return
for i in range(start_pos, n+1):
curr_result.append(i)
self.dfs(n, k-1, i+1, curr_result, result)
curr_result.pop()
|
mit
| -4,516,779,564,583,097,300
| 25.72
| 68
| 0.505988
| false
| 3.660274
| false
| false
| false
|
gkoelln/youtube-dl
|
youtube_dl/extractor/svt.py
|
1
|
9890
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_parse_qs,
compat_urllib_parse_urlparse,
)
from ..utils import (
determine_ext,
dict_get,
int_or_none,
try_get,
urljoin,
compat_str,
)
class SVTBaseIE(InfoExtractor):
_GEO_COUNTRIES = ['SE']
def _extract_video(self, video_info, video_id):
formats = []
for vr in video_info['videoReferences']:
player_type = vr.get('playerType') or vr.get('format')
vurl = vr['url']
ext = determine_ext(vurl)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
vurl, video_id,
ext='mp4', entry_protocol='m3u8_native',
m3u8_id=player_type, fatal=False))
elif ext == 'f4m':
formats.extend(self._extract_f4m_formats(
vurl + '?hdcore=3.3.0', video_id,
f4m_id=player_type, fatal=False))
elif ext == 'mpd':
if player_type == 'dashhbbtv':
formats.extend(self._extract_mpd_formats(
vurl, video_id, mpd_id=player_type, fatal=False))
else:
formats.append({
'format_id': player_type,
'url': vurl,
})
if not formats and video_info.get('rights', {}).get('geoBlockedSweden'):
self.raise_geo_restricted(
'This video is only available in Sweden',
countries=self._GEO_COUNTRIES)
self._sort_formats(formats)
subtitles = {}
subtitle_references = dict_get(video_info, ('subtitles', 'subtitleReferences'))
if isinstance(subtitle_references, list):
for sr in subtitle_references:
subtitle_url = sr.get('url')
subtitle_lang = sr.get('language', 'sv')
if subtitle_url:
if determine_ext(subtitle_url) == 'm3u8':
# TODO(yan12125): handle WebVTT in m3u8 manifests
continue
subtitles.setdefault(subtitle_lang, []).append({'url': subtitle_url})
title = video_info.get('title')
series = video_info.get('programTitle')
season_number = int_or_none(video_info.get('season'))
episode = video_info.get('episodeTitle')
episode_number = int_or_none(video_info.get('episodeNumber'))
duration = int_or_none(dict_get(video_info, ('materialLength', 'contentDuration')))
age_limit = None
adult = dict_get(
video_info, ('inappropriateForChildren', 'blockedForChildren'),
skip_false_values=False)
if adult is not None:
age_limit = 18 if adult else 0
return {
'id': video_id,
'title': title,
'formats': formats,
'subtitles': subtitles,
'duration': duration,
'age_limit': age_limit,
'series': series,
'season_number': season_number,
'episode': episode,
'episode_number': episode_number,
}
class SVTIE(SVTBaseIE):
_VALID_URL = r'https?://(?:www\.)?svt\.se/wd\?(?:.*?&)?widgetId=(?P<widget_id>\d+)&.*?\barticleId=(?P<id>\d+)'
_TEST = {
'url': 'http://www.svt.se/wd?widgetId=23991§ionId=541&articleId=2900353&type=embed&contextSectionId=123&autostart=false',
'md5': '33e9a5d8f646523ce0868ecfb0eed77d',
'info_dict': {
'id': '2900353',
'ext': 'mp4',
'title': 'Stjärnorna skojar till det - under SVT-intervjun',
'duration': 27,
'age_limit': 0,
},
}
@staticmethod
def _extract_url(webpage):
mobj = re.search(
r'(?:<iframe src|href)="(?P<url>%s[^"]*)"' % SVTIE._VALID_URL, webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
widget_id = mobj.group('widget_id')
article_id = mobj.group('id')
info = self._download_json(
'http://www.svt.se/wd?widgetId=%s&articleId=%s&format=json&type=embed&output=json' % (widget_id, article_id),
article_id)
info_dict = self._extract_video(info['video'], article_id)
info_dict['title'] = info['context']['title']
return info_dict
class SVTPlayBaseIE(SVTBaseIE):
_SVTPLAY_RE = r'root\s*\[\s*(["\'])_*svtplay\1\s*\]\s*=\s*(?P<json>{.+?})\s*;\s*\n'
class SVTPlayIE(SVTPlayBaseIE):
IE_DESC = 'SVT Play and Öppet arkiv'
_VALID_URL = r'https?://(?:www\.)?(?:svtplay|oppetarkiv)\.se/(?:video|klipp)/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://www.svtplay.se/video/5996901/flygplan-till-haile-selassie/flygplan-till-haile-selassie-2',
'md5': '2b6704fe4a28801e1a098bbf3c5ac611',
'info_dict': {
'id': '5996901',
'ext': 'mp4',
'title': 'Flygplan till Haile Selassie',
'duration': 3527,
'thumbnail': r're:^https?://.*[\.-]jpg$',
'age_limit': 0,
'subtitles': {
'sv': [{
'ext': 'wsrt',
}]
},
},
}, {
# geo restricted to Sweden
'url': 'http://www.oppetarkiv.se/video/5219710/trollflojten',
'only_matching': True,
}, {
'url': 'http://www.svtplay.se/klipp/9023742/stopptid-om-bjorn-borg',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
data = self._parse_json(
self._search_regex(
self._SVTPLAY_RE, webpage, 'embedded data', default='{}',
group='json'),
video_id, fatal=False)
thumbnail = self._og_search_thumbnail(webpage)
if data:
video_info = try_get(
data, lambda x: x['context']['dispatcher']['stores']['VideoTitlePageStore']['data']['video'],
dict)
if video_info:
info_dict = self._extract_video(video_info, video_id)
info_dict.update({
'title': data['context']['dispatcher']['stores']['MetaStore']['title'],
'thumbnail': thumbnail,
})
return info_dict
video_id = self._search_regex(
r'<video[^>]+data-video-id=["\']([\da-zA-Z-]+)',
webpage, 'video id', default=None)
if video_id:
data = self._download_json(
'https://api.svt.se/videoplayer-api/video/%s' % video_id,
video_id, headers=self.geo_verification_headers())
info_dict = self._extract_video(data, video_id)
if not info_dict.get('title'):
info_dict['title'] = re.sub(
r'\s*\|\s*.+?$', '',
info_dict.get('episode') or self._og_search_title(webpage))
return info_dict
class SVTSeriesIE(SVTPlayBaseIE):
_VALID_URL = r'https?://(?:www\.)?svtplay\.se/(?P<id>[^/?&#]+)'
_TESTS = [{
'url': 'https://www.svtplay.se/rederiet',
'info_dict': {
'id': 'rederiet',
'title': 'Rederiet',
'description': 'md5:505d491a58f4fcf6eb418ecab947e69e',
},
'playlist_mincount': 318,
}, {
'url': 'https://www.svtplay.se/rederiet?tab=sasong2',
'info_dict': {
'id': 'rederiet-sasong2',
'title': 'Rederiet - Säsong 2',
'description': 'md5:505d491a58f4fcf6eb418ecab947e69e',
},
'playlist_count': 12,
}]
@classmethod
def suitable(cls, url):
return False if SVTIE.suitable(url) or SVTPlayIE.suitable(url) else super(SVTSeriesIE, cls).suitable(url)
def _real_extract(self, url):
series_id = self._match_id(url)
qs = compat_parse_qs(compat_urllib_parse_urlparse(url).query)
season_slug = qs.get('tab', [None])[0]
if season_slug:
series_id += '-%s' % season_slug
webpage = self._download_webpage(
url, series_id, 'Downloading series page')
root = self._parse_json(
self._search_regex(
self._SVTPLAY_RE, webpage, 'content', group='json'),
series_id)
season_name = None
entries = []
for season in root['relatedVideoContent']['relatedVideosAccordion']:
if not isinstance(season, dict):
continue
if season_slug:
if season.get('slug') != season_slug:
continue
season_name = season.get('name')
videos = season.get('videos')
if not isinstance(videos, list):
continue
for video in videos:
content_url = video.get('contentUrl')
if not content_url or not isinstance(content_url, compat_str):
continue
entries.append(
self.url_result(
urljoin(url, content_url),
ie=SVTPlayIE.ie_key(),
video_title=video.get('title')
))
metadata = root.get('metaData')
if not isinstance(metadata, dict):
metadata = {}
title = metadata.get('title')
season_name = season_name or season_slug
if title and season_name:
title = '%s - %s' % (title, season_name)
elif season_slug:
title = season_slug
return self.playlist_result(
entries, series_id, title, metadata.get('description'))
|
unlicense
| -2,224,861,653,834,999,800
| 34.060284
| 133
| 0.516132
| false
| 3.646994
| false
| false
| false
|
Cosiroc/bleau-database
|
BleauDataBase/GeoFormat/GPX.py
|
2
|
6322
|
####################################################################################################
#
# Bleau Database - A database of the bouldering area of Fontainebleau
# Copyright (C) 2015 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
import logging
try:
from lxml import etree
except ImportError:
logging.warn('lxml module is not available')
etree = None
####################################################################################################
from ..FieldObject import FromJsonMixin
####################################################################################################
class WayPoint(FromJsonMixin):
# < wpt lat="latitudeType [1] ?" lon="longitudeType [1] ?">
# <ele> xsd:decimal </ele> [0..1] ?
# <time> xsd:dateTime </time> [0..1] ?
# <magvar> degreesType </magvar> [0..1] ?
# <geoidheight> xsd:decimal </geoidheight> [0..1] ?
# <name> xsd:string </name> [0..1] ?
# <cmt> xsd:string </cmt> [0..1] ?
# <desc> xsd:string </desc> [0..1] ?
# <src> xsd:string </src> [0..1] ?
# <link> linkType </link> [0..*] ?
# <sym> xsd:string </sym> [0..1] ?
# <type> xsd:string </type> [0..1] ?
# <fix> fixType </fix> [0..1] ?
# <sat> xsd:nonNegativeInteger </sat> [0..1] ?
# <hdop> xsd:decimal </hdop> [0..1] ?
# <vdop> xsd:decimal </vdop> [0..1] ?
# <pdop> xsd:decimal </pdop> [0..1] ?
# <ageofdgpsdata> xsd:decimal </ageofdgpsdata> [0..1] ?
# <dgpsid> dgpsStationType </dgpsid> [0..1] ?
# <extensions> extensionsType </extensions> [0..1] ?
# </wpt>
lat = float
lon = float
ele = float
time = str
magvar = float
geoidheight = float
name = str
cmt = str
desc = str
src = str
link = str
sym = str
type = str
fix = str
sat = int
hdop = float
vdop = float
pdop = float
ageofdgpsdata = float
dgpsid = int
####################################################################################################
class GPX:
##############################################
def __init__(self, gpx_path=None, schema_path=None):
self._waypoints = []
if gpx_path is not None:
self._parse(gpx_path, schema_path)
##############################################
def _parse(self, gpx_path, schema_path=None):
if schema_path is not None:
schema = etree.XMLSchema(file=schema_path)
parser = etree.XMLParser(schema=schema)
else:
parser = None
namespaces = dict(topografix='http://www.topografix.com/GPX/1/1')
tree = etree.parse(gpx_path, parser=parser)
# root = tree.getroot()
# for item in root:
# print(item.tag, tree.getpath(item))
waypoints = []
for waypoint_element in tree.xpath('topografix:wpt', namespaces=namespaces):
d = self._attribute_to_dict(waypoint_element, ('lat', 'lon'))
for element in waypoint_element:
field = etree.QName(element.tag).localname
d[field] = element.text
waypoint = WayPoint(**d)
waypoints.append(waypoint)
self._waypoints = waypoints
##############################################
@staticmethod
def _attribute_to_dict(node, fields):
attributes = node.attrib
return {field:attributes[field] for field in fields}
##############################################
@property
def waypoints(self):
return self._waypoints
##############################################
def add_waypoint(self, waypoint):
self._waypoints.append(waypoint)
##############################################
def add_waypoints(self, waypoints):
self._waypoints.extend(waypoints)
##############################################
def add_new_waypoint(self, **kwargs):
self.append_waypoint(WayPoint(**kwargs))
##############################################
def write(self, path, encoding='utf-8'):
with etree.xmlfile(path,
encoding=encoding,
compression=None,
close=True,
buffered=True) as xf:
xf.write_declaration() # standalone=True
attributes = {
'version': '1.1',
'creator': 'BleauDataBase',
'xmlns': 'http://www.topografix.com/GPX/1/1',
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'xsi:schemaLocation': 'http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd',
}
with xf.element('gpx', **attributes):
for waypoint in self._waypoints:
d = waypoint.to_json(only_defined=True)
attributes = {field:str(d[field]) for field in ('lon', 'lat')}
del d['lon']
del d['lat']
with xf.element('wpt', **attributes):
# Fixme: iter ?
# for field in waypoint.__field_names__:
# value = getattr(waypoint, field)
# if value is not None:
for field, value in d.items():
with xf.element(field):
xf.write(str(value))
xf.flush()
|
agpl-3.0
| 4,304,305,302,924,749,300
| 33.546448
| 116
| 0.465201
| false
| 4.070831
| false
| false
| false
|
hugosenari/simplui
|
simplui/container.py
|
1
|
5994
|
# ----------------------------------------------------------------------
# Copyright (c) 2009 Tristam MacDonald
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of DarkCoda nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------
from .widget import Widget
from .geometry import Rect
class Container(Widget):
"""Base class for all GUI containers, also usable by itself"""
def __init__(self, **kwargs):
"""Create a container
Keyword arguments:
name -- unique widget identifier
children -- list of child elements to be added to this container
"""
Widget.__init__(self, **kwargs)
self.children = []
children = kwargs.get('children', [])
for c in children:
self.add(c)
def _get_visible(self):
return self._visible
def _set_visible(self, visible):
Widget._set_visible(self, visible)
for c in self.children:
c.visible = visible
visible = property(_get_visible, _set_visible)
def update_global_coords(self):
Widget.update_global_coords(self)
for c in self.children:
c.update_global_coords()
def update_elements(self):
Widget.update_elements(self)
for c in self.children:
c.update_elements()
def update_theme(self, theme):
Widget.update_theme(self, theme)
for c in self.children:
c.update_theme(theme)
def update_batch(self, batch, group):
Widget.update_batch(self, batch, group)
for c in self.children:
c.update_batch(batch, group)
def update_names(self, oldname=None):
Widget.update_names(self, oldname)
for c in self.children:
c.update_names(oldname)
def remove_names(self):
Widget.remove_names(self)
for c in self.children:
c.remove_names()
def add(self, child):
self.children.append(child)
child.parent = self
self.theme and child.update_theme(self.theme)
child.update_batch(self._batch, self._group)
self.find_root().update_layout()
child.update_names()
def remove(self, child):
child.remove_names()
self.children.remove(child)
child.parent = None
child.update_batch(None, None)
self.find_root().update_layout()
def on_mouse_press(self, x, y, button, modifiers):
Widget.on_mouse_press(self, x, y, button, modifiers)
r = self.clip_rect()
for c in self.children:
if r.intersect(c.bounds()).hit_test(x, y):
c.on_mouse_press(x, y, button, modifiers)
def on_mouse_drag(self, x, y, dx, dy, button, modifiers):
Widget.on_mouse_drag(self, x, y, dx, dy, button, modifiers)
for c in self.children:
c.on_mouse_drag(x, y, dx, dy, button, modifiers)
def on_mouse_release(self, x, y, button, modifiers):
Widget.on_mouse_release(self, x, y, button, modifiers)
r = self.clip_rect()
for c in self.children:
if r.intersect(c.bounds()).hit_test(x, y):
c.on_mouse_release(x, y, button, modifiers)
def on_mouse_scroll(self, x, y, scroll_x, scroll_y):
Widget.on_mouse_scroll(self, x, y, scroll_x, scroll_y)
r = self.clip_rect()
for c in self.children:
if r.intersect(c.bounds()).hit_test(x, y):
c.on_mouse_scroll(x, y, scroll_x, scroll_y)
def on_key_press(self, symbol, modifiers):
Widget.on_key_press(self, symbol, modifiers)
for c in self.children:
c.on_key_press(symbol, modifiers)
def on_text(self, text):
Widget.on_text(self, text)
for c in self.children:
c.on_text(text)
def on_text_motion(self, motion, select=False):
Widget.on_text_motion(self, motion, select)
for c in self.children:
c.on_text_motion(motion, select)
def clip_rect(self):
return Rect(self._gx, self._gy, self.w, self.h)
class SingleContainer(Container):
"""Utility base class for containers restricted to a single child"""
def __init__(self, **kwargs):
if 'children' in kwargs:
del kwargs['children']
Container.__init__(self, **kwargs)
self._content = None
def _get_content(self):
return self._content
def _set_content(self, content):
if self._content:
Container.remove(self, self._content)
self._content = content
if self._content:
Container.add(self, self._content)
self.find_root().update_layout()
content = property(_get_content, _set_content)
def add(self, other):
raise UserWarning('add to the content element')
def remove(self, other):
raise UserWarning('remove from the content element')
def determine_size(self):
if self._content:
self._content.determine_size()
self._pref_size = self._content._pref_size
def reset_size(self, size):
Widget.reset_size(self, size)
if self._content:
self._content.reset_size(size)
|
bsd-3-clause
| -8,756,964,571,835,446,000
| 28.239024
| 72
| 0.685352
| false
| 3.36175
| false
| false
| false
|
dwavesystems/dimod
|
tests/test_variables.py
|
1
|
6327
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections.abc as abc
import decimal
import fractions
import itertools
import unittest
import numpy as np
from parameterized import parameterized_class
from dimod.variables import Variables
class TestAppend(unittest.TestCase):
def test_conflict(self):
variables = Variables()
variables._append(1)
variables._append() # should take the label 0
variables._append()
self.assertEqual(variables, [1, 0, 2])
class TestDuplicates(unittest.TestCase):
def test_duplicates(self):
# should have no duplicates
variables = Variables(['a', 'b', 'c', 'b'])
self.assertEqual(list(variables), ['a', 'b', 'c'])
def test_count(self):
variables = Variables([1, 1, 1, 4, 5])
self.assertEqual(list(variables), [1, 4, 5])
for v in range(10):
if v in variables:
self.assertEqual(variables.count(v), 1)
else:
self.assertEqual(variables.count(v), 0)
def test_len(self):
variables = Variables('aaaaa')
self.assertEqual(len(variables), 1)
def test_unlike_types_eq_hash(self):
zeros = [0, 0.0, np.int8(0), np.float64(0),
fractions.Fraction(0), decimal.Decimal(0)]
for perm in itertools.permutations(zeros, len(zeros)):
variables = Variables(perm)
self.assertEqual(len(variables), len(set(zeros)))
class TestIndex(unittest.TestCase):
def test_permissive(self):
variables = Variables()
with self.assertRaises(ValueError):
variables.index(0)
self.assertEqual(variables.index(0, permissive=True), 0)
self.assertEqual(variables.index(0, permissive=True), 0)
self.assertEqual(variables.index('a', permissive=True), 1)
class TestPop(unittest.TestCase):
def test_empty(self):
with self.assertRaises(IndexError):
Variables()._pop()
def test_simple(self):
variables = Variables('abc')
self.assertEqual(variables._pop(), 'c')
self.assertEqual(variables, 'ab')
class TestPrint(unittest.TestCase):
def test_pprint(self):
import pprint
variables = Variables(range(10))
variables._append('a') # make not range
string = pprint.pformat(variables, width=20)
target = '\n'.join(
["Variables([0,",
" 1,",
" 2,",
" 3,",
" 4,",
" 5,",
" 6,",
" 7,",
" 8,",
" 9,",
" 'a'])"])
self.assertEqual(string, target)
def test_repr_empty(self):
variables = Variables()
self.assertEqual(repr(variables), 'Variables()')
def test_repr_mixed(self):
variables = Variables('abc')
self.assertEqual(repr(variables), "Variables(['a', 'b', 'c'])")
def test_repr_range(self):
self.assertEqual(repr(Variables(range(10))),
'Variables({!r})'.format(list(range(10))))
self.assertEqual(repr(Variables(range(11))), 'Variables(range(0, 11))')
class TestRelabel(unittest.TestCase):
def test_permissive(self):
variables = Variables([0, 1])
# relabels a non-existant variable 2
variables._relabel({0: 'a', 1: 'b', 2: 'c'})
self.assertEqual(variables, Variables('ab'))
def test_swap(self):
variables = Variables([1, 0, 3, 4, 5])
variables._relabel({5: 3, 3: 5})
self.assertEqual(variables, [1, 0, 5, 4, 3])
@parameterized_class(
[dict(name='list', iterable=list(range(5))),
dict(name='string', iterable='abcde'),
dict(name='range', iterable=range(5)),
dict(name='range_reversed', iterable=range(4, -1, -1)),
dict(name='range_start', iterable=range(2, 7)),
dict(name='range_step', iterable=range(0, 10, 2)),
dict(name='mixed', iterable=[0, ('b',), 2.1, 'c', frozenset('d')]),
dict(name='floats', iterable=[0., 1., 2., 3., 4.]),
],
class_name_func=lambda cls, i, inpt: '%s_%s' % (cls.__name__, inpt['name'])
)
class TestIterable(unittest.TestCase):
def test_contains_unhashable(self):
variables = Variables(self.iterable)
self.assertFalse([] in variables)
def test_count_unhashable(self):
variables = Variables(self.iterable)
self.assertEqual(variables.count([]), 0)
def test_index(self):
variables = Variables(self.iterable)
for idx, v in enumerate(self.iterable):
self.assertEqual(variables.index(v), idx)
def test_iterable(self):
variables = Variables(self.iterable)
self.assertEqual(list(variables), list(self.iterable))
def test_equality(self):
variables = Variables(self.iterable)
self.assertEqual(variables, self.iterable)
def test_len(self):
variables = Variables(self.iterable)
self.assertEqual(len(variables), len(self.iterable))
def test_relabel_conflict(self):
variables = Variables(self.iterable)
iterable = self.iterable
# want a relabelling with identity relabels and that maps to the same
# set of labels as the original
target = [iterable[-i] for i in range(len(iterable))]
mapping = dict(zip(iterable, target))
variables._relabel(mapping)
self.assertEqual(variables, target)
def test_relabel_not_hashable(self):
variables = Variables(self.iterable)
mapping = {v: [v] for v in variables}
with self.assertRaises(ValueError):
variables._relabel(mapping)
|
apache-2.0
| 2,359,141,378,282,986,000
| 31.446154
| 79
| 0.599178
| false
| 3.954375
| true
| false
| false
|
micolous/cfsprinter
|
src/pagerprinter/plugins/skypesms.py
|
1
|
2611
|
#!/usr/bin/env python
"""
Skype SMS plugin for pagerprinter.
Copyright 2011-2013 Michael Farrell <http://micolous.id.au/>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Skype documentation that isn't behind a wall, because Skype have some
funny ideas about the definition of "open source projects", that they
can demand you to sign up for a developer programme when you really
don't need to:
<http://skype4py.sourceforge.net/doc/html/>
Library download:
<http://sourceforge.net/projects/skype4py/files/>
Note that the API is entirely un-pythonic, so be careful. It seems
like .NET coding conventions.
If you're finding yourself sending a lot of messages, sign up for a
proper SMS gateway. It's cheaper and doesn't require Skype to be
running.
"""
from __future__ import absolute_import
from . import BasePlugin
try:
from Skype4Py import Skype, smsMessageTypeOutgoing
except ImportError:
print "NOTICE: skypesms plugin requires Skype4Py to be installed"
print "http://sourceforge.net/projects/skype4py/"
PLUGIN = None
else:
# make our plugin!
class SkypePlugin(BasePlugin):
def __init__(self):
print "Attempting to connect to Skype API. If Python crashes, this"
print "could mean that Skype isn't running at the moment."
print ""
print "(There's no way around this at present -- Skype's Python"
print "libraries suck. It also this seems to crash all the time"
print "on OSX.)"
# connect to skype
self.skype = Skype()
# skype4py is quite un-pythonic, with it's method names.
self.skype.Attach()
# by now skype4py would crash if skype isn't running.
def configure(self, c):
# read in phone numbers we need
self.numbers = [
x.strip()
for x
in c.get('skypesms', 'to').lower().split(',')
]
def execute(self, msg, unit, address, when, printer, print_copies):
# lets just send the whole message verbatim.
sms = self.skype.CreateSms(smsMessageTypeOutgoing, *self.numbers)
sms.Body = "%s: %s - %s" % (when.ctime(), msg, unit)
sms.Send()
PLUGIN = SkypePlugin
|
gpl-3.0
| 4,111,728,540,101,125,000
| 32.474359
| 71
| 0.733435
| false
| 3.472074
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.