repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
TrackDR/dx | refs/heads/master | dx/dx_models.py | 2 | #
# DX Analytics
# Base Classes and Model Classes for Simulation
# dx_models.py
#
# DX Analytics is a financial analytics library, mainly for
# derviatives modeling and pricing by Monte Carlo simulation
#
# (c) Dr. Yves J. Hilpisch
# The Python Quants GmbH
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
from dx_frame import *
class simulation_class(object):
''' Providing base methods for simulation classes.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model object
Methods
=======
generate_time_grid :
returns time grid for simulation
get_instrument_values:
returns the current instrument values (array)
'''
def __init__(self, name, mar_env, corr):
try:
self.name = name
self.pricing_date = mar_env.pricing_date
self.initial_value = mar_env.get_constant('initial_value')
self.volatility = mar_env.get_constant('volatility')
self.final_date = mar_env.get_constant('final_date')
self.currency = mar_env.get_constant('currency')
self.frequency = mar_env.get_constant('frequency')
self.paths = mar_env.get_constant('paths')
self.discount_curve = mar_env.get_curve('discount_curve')
try:
# if time_grid in mar_env take this
# (for portfolio valuation)
self.time_grid = mar_env.get_list('time_grid')
except:
self.time_grid = None
try:
# if there are special dates, then add these
self.special_dates = mar_env.get_list('special_dates')
except:
self.special_dates = []
self.instrument_values = None
self.correlated = corr
if corr is True:
# only needed in a portfolio context when
# risk factors are correlated
self.cholesky_matrix = mar_env.get_list('cholesky_matrix')
self.rn_set = mar_env.get_list('rn_set')[self.name]
self.random_numbers = mar_env.get_list('random_numbers')
except:
print "Error parsing market environment."
def generate_time_grid(self):
start = self.pricing_date
end = self.final_date
# pandas date_range function
# freq = e.g. 'B' for Business Day,
# 'W' for Weekly, 'M' for Monthly
time_grid = pd.date_range(start=start, end=end,
freq=self.frequency).to_pydatetime()
time_grid = list(time_grid)
# enhance time_grid by start, end and special_dates
if start not in time_grid:
time_grid.insert(0, start)
# insert start date if not in list
if end not in time_grid:
time_grid.append(end)
# insert end date if not in list
if len(self.special_dates) > 0:
# add all special dates
time_grid.extend(self.special_dates)
# delete duplicates and sort
time_grid = sorted(set(time_grid))
self.time_grid = np.array(time_grid)
def get_instrument_values(self, fixed_seed=True):
if self.instrument_values is None:
# only initiate simulation if there are no instrument values
self.generate_paths(fixed_seed=fixed_seed, day_count=365.)
elif fixed_seed is False:
# also initiate re-simulation when fixed_seed is False
self.generate_paths(fixed_seed=fixed_seed, day_count=365.)
return self.instrument_values
class geometric_brownian_motion(simulation_class):
''' Class to generate simulated paths based on
the Black-Scholes-Merton geometric Brownian motion model.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model simulation object
Methods
=======
update :
updates parameters
generate_paths :
returns Monte Carlo paths given the market environment
'''
def __init__(self, name, mar_env, corr=False):
super(geometric_brownian_motion, self).__init__(name, mar_env, corr)
def update(self, initial_value=None, volatility=None, final_date=None):
if initial_value is not None:
self.initial_value = initial_value
if volatility is not None:
self.volatility = volatility
if final_date is not None:
self.final_date = final_date
self.instrument_values = None
def generate_paths(self, fixed_seed=False, day_count=365.):
if self.time_grid is None:
self.generate_time_grid()
# method from generic model simulation class
# number of dates for time grid
M = len(self.time_grid)
# number of paths
I = self.paths
# array initialization for path simulation
paths = np.zeros((M, I))
# initialize first date with initial_value
paths[0] = self.initial_value
if self.correlated is False:
# if not correlated generate random numbers
rand = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
else:
# if correlated use random number object as provided
# in market environment
rand = self.random_numbers
# forward rates for drift of process
forward_rates = self.discount_curve.get_forward_rates(
self.time_grid, self.paths, dtobjects=True)[1]
for t in range(1, len(self.time_grid)):
# select the right time slice from the relevant
# random number set
if self.correlated is False:
ran = rand[t]
else:
ran = np.dot(self.cholesky_matrix, rand[:, t, :])
ran = ran[self.rn_set]
dt = (self.time_grid[t] - self.time_grid[t - 1]).days / day_count
# difference between two dates as year fraction
rt = (forward_rates[t - 1] + forward_rates[t]) / 2
paths[t] = paths[t - 1] * np.exp((rt - 0.5
* self.volatility ** 2) * dt
+ self.volatility * np.sqrt(dt) * ran)
# generate simulated values for the respective date
self.instrument_values = paths
class jump_diffusion(simulation_class):
''' Class to generate simulated paths based on
the Merton (1976) jump diffusion model.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model object
Methods
=======
update :
updates parameters
generate_paths :
returns Monte Carlo paths given the market environment
'''
def __init__(self, name, mar_env, corr=False):
super(jump_diffusion, self).__init__(name, mar_env, corr)
try:
self.lamb = mar_env.get_constant('lambda')
self.mu = mar_env.get_constant('mu')
self.delt = mar_env.get_constant('delta')
except:
print "Error parsing market environment."
def update(self, initial_value=None, volatility=None, lamb=None,
mu=None, delta=None, final_date=None):
if initial_value is not None:
self.initial_value = initial_value
if volatility is not None:
self.volatility = volatility
if lamb is not None:
self.lamb = lamb
if mu is not None:
self.mu = mu
if delta is not None:
self.delt = delta
if final_date is not None:
self.final_date = final_date
self.instrument_values = None
def generate_paths(self, fixed_seed=False, day_count=365.):
if self.time_grid is None:
self.generate_time_grid()
# method from generic model simulation class
# number of dates for time grid
M = len(self.time_grid)
# number of paths
I = self.paths
# array initialization for path simulation
paths = np.zeros((M, I))
# initialize first date with initial_value
paths[0] = self.initial_value
if self.correlated is False:
# if not correlated generate random numbers
sn1 = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
else:
# if correlated use random number object as provided
# in market environment
sn1 = self.random_numbers
# Standard normally distributed seudo-random numbers
# for the jump component
sn2 = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
forward_rates = self.discount_curve.get_forward_rates(
self.time_grid, self.paths, dtobjects=True)[1]
rj = self.lamb * (np.exp(self.mu + 0.5 * self.delt ** 2) - 1)
for t in range(1, len(self.time_grid)):
# select the right time slice from the relevant
# random number set
if self.correlated is False:
ran = sn1[t]
else:
# only with correlation in portfolio context
ran = np.dot(self.cholesky_matrix, sn1[:, t, :])
ran = ran[self.rn_set]
dt = (self.time_grid[t] - self.time_grid[t - 1]).days / day_count
# difference between two dates as year fraction
poi = np.random.poisson(self.lamb * dt, I)
# Poisson distributed pseudo-random numbers for jump component
rt = (forward_rates[t - 1] + forward_rates[t]) / 2
paths[t] = paths[t - 1] * (np.exp((rt - rj-
0.5 * self.volatility ** 2) * dt
+ self.volatility * np.sqrt(dt) * ran)
+ (np.exp(self.mu + self.delt *
sn2[t]) - 1) * poi)
self.instrument_values = paths
class stochastic_volatility(simulation_class):
''' Class to generate simulated paths based on
the Heston (1993) stochastic volatility model.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model object
Methods
=======
update :
updates parameters
generate_paths :
returns Monte Carlo paths for the market environment
get_volatility_values :
returns array with simulated volatility paths
'''
def __init__(self, name, mar_env, corr=False):
super(stochastic_volatility, self).__init__(name, mar_env, corr)
try:
self.kappa = mar_env.get_constant('kappa')
self.theta = mar_env.get_constant('theta')
self.vol_vol = mar_env.get_constant('vol_vol')
self.rho = mar_env.get_constant('rho')
self.leverage = np.linalg.cholesky(
np.array([[1.0, self.rho], [self.rho, 1.0]]))
self.volatility_values = None
except:
print "Error parsing market environment."
def update(self, initial_value=None, volatility=None,
vol_vol=None, kappa=None, theta=None,
final_date=None):
if initial_value is not None:
self.initial_value = initial_value
if volatility is not None:
self.volatility = volatility
if vol_vol is not None:
self.vol_vol = vol_vol
if kappa is not None:
self.kappa = kappa
if theta is not None:
self.theta = theta
if final_date is not None:
self.final_date = final_date
self.time_grid = None
self.instrument_values = None
self.volatility_values = None
def generate_paths(self, fixed_seed=True, day_count=365.):
if self.time_grid is None:
self.generate_time_grid()
M = len(self.time_grid)
I = self.paths
paths = np.zeros((M, I))
va = np.zeros_like(paths)
va_ = np.zeros_like(paths)
paths[0] = self.initial_value
va[0] = self.volatility ** 2
va_[0] = self.volatility ** 2
if self.correlated is False:
sn1 = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
else:
sn1 = self.random_numbers
# Pseudo-random numbers for the stochastic volatility
sn2 = sn_random_numbers((1, M, I), fixed_seed=fixed_seed)
forward_rates = self.discount_curve.get_forward_rates(
self.time_grid, self.paths, dtobjects=True)[1]
for t in range(1, len(self.time_grid)):
dt = (self.time_grid[t] - self.time_grid[t - 1]).days / day_count
if self.correlated is False:
ran = sn1[t]
else:
ran = np.dot(self.cholesky_matrix, sn1[:, t, :])
ran = ran[self.rn_set]
rat = np.array([ran, sn2[t]])
rat = np.dot(self.leverage, rat)
va_[t] = (va_[t - 1] + self.kappa
* (self.theta - np.maximum(0, va_[t - 1])) * dt
+ np.sqrt(np.maximum(0, va_[t - 1]))
* self.vol_vol * np.sqrt(dt) * rat[1])
va[t] = np.maximum(0, va_[t])
rt = (forward_rates[t - 1] + forward_rates[t]) / 2
paths[t] = paths[t - 1] * (np.exp((rt - 0.5 * va[t]) * dt
+ np.sqrt(va[t]) * np.sqrt(dt) * rat[0]))
# moment matching stoch vol part
paths[t] -= np.mean(paths[t - 1] * np.sqrt(va[t])
* math.sqrt(dt) * rat[0])
self.instrument_values = paths
self.volatility_values = np.sqrt(va)
def get_volatility_values(self):
if self.volatility_values is None:
self.generate_paths(self)
return self.volatility_values
class stoch_vol_jump_diffusion(simulation_class):
''' Class to generate simulated paths based on
the Bates (1996) stochastic volatility jump-diffusion model.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model object
Methods
=======
update :
updates parameters
generate_paths :
returns Monte Carlo paths for the market environment
get_volatility_values :
returns array with simulated volatility paths
'''
def __init__(self, name, mar_env, corr=False):
super(stoch_vol_jump_diffusion, self).__init__(name, mar_env, corr)
try:
self.lamb = mar_env.get_constant('lambda')
self.mu = mar_env.get_constant('mu')
self.delt = mar_env.get_constant('delta')
self.rho = mar_env.get_constant('rho')
self.leverage = np.linalg.cholesky(
np.array([[1.0, self.rho], [self.rho, 1.0]]))
self.kappa = mar_env.get_constant('kappa')
self.theta = mar_env.get_constant('theta')
self.vol_vol = mar_env.get_constant('vol_vol')
self.volatility_values = None
except:
print "Error parsing market environment."
def update(self, initial_value=None, volatility=None,
vol_vol=None, kappa=None, theta=None, lamb=None,
mu=None, delta=None, final_date=None):
if initial_value is not None:
self.initial_value = initial_value
if volatility is not None:
self.volatility = volatility
if vol_vol is not None:
self.vol_vol = vol_vol
if kappa is not None:
self.kappa = kappa
if theta is not None:
self.theta = theta
if lamb is not None:
self.lamb = lamb
if mu is not None:
self.mu = mu
if delta is not None:
self.delt = delta
if final_date is not None:
self.final_date = final_date
self.time_grid = None
self.instrument_values = None
self.volatility_values = None
def generate_paths(self, fixed_seed=True, day_count=365.):
if self.time_grid is None:
self.generate_time_grid()
M = len(self.time_grid)
I = self.paths
paths = np.zeros((M, I))
va = np.zeros_like(paths)
va_ = np.zeros_like(paths)
paths[0] = self.initial_value
va[0] = self.volatility ** 2
va_[0] = self.volatility ** 2
if self.correlated is False:
sn1 = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
else:
sn1 = self.random_numbers
# Pseudo-random numbers for the jump component
sn2 = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
# Pseudo-random numbers for the stochastic volatility
sn3 = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
forward_rates = self.discount_curve.get_forward_rates(
self.time_grid, self.paths, dtobjects=True)[1]
rj = self.lamb * (np.exp(self.mu + 0.5 * self.delt ** 2) - 1)
for t in range(1, len(self.time_grid)):
dt = (self.time_grid[t] - self.time_grid[t - 1]).days / day_count
if self.correlated is False:
ran = sn1[t]
else:
ran = np.dot(self.cholesky_matrix, sn1[:, t, :])
ran = ran[self.rn_set]
rat = np.array([ran, sn3[t]])
rat = np.dot(self.leverage, rat)
va_[t] = (va_[t - 1] + self.kappa
* (self.theta - np.maximum(0, va_[t - 1])) * dt
+ np.sqrt(np.maximum(0, va_[t - 1]))
* self.vol_vol * np.sqrt(dt) * rat[1])
va[t] = np.maximum(0, va_[t])
poi = np.random.poisson(self.lamb * dt, I)
rt = (forward_rates[t - 1] + forward_rates[t]) / 2
paths[t] = paths[t - 1] * (np.exp((rt - rj - 0.5 * va[t]) * dt
+ np.sqrt(va[t]) * np.sqrt(dt) * rat[0])
+ (np.exp(self.mu + self.delt *
sn2[t]) - 1) * poi)
# moment matching stoch vol part
paths[t] -= np.mean(paths[t - 1] * np.sqrt(va[t])
* math.sqrt(dt) * rat[0])
self.instrument_values = paths
self.volatility_values = np.sqrt(va)
def get_volatility_values(self):
if self.volatility_values is None:
self.generate_paths(self)
return self.volatility_values
class square_root_diffusion(simulation_class):
''' Class to generate simulated paths based on
the Cox-Ingersoll-Ross (1985) square-root diffusion.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model object
Methods
=======
update :
updates parameters
generate_paths :
returns Monte Carlo paths given the market environment
'''
def __init__(self, name, mar_env, corr=False):
super(square_root_diffusion, self).__init__(name, mar_env, corr)
try:
self.kappa = mar_env.get_constant('kappa')
self.theta = mar_env.get_constant('theta')
except:
print "Error parsing market environment."
def update(self, initial_value=None, volatility=None, kappa=None,
theta=None, final_date=None):
if initial_value is not None:
self.initial_value = initial_value
if volatility is not None:
self.volatility = volatility
if kappa is not None:
self.kappa = kappa
if theta is not None:
self.theta = theta
if final_date is not None:
self.final_date = final_date
self.instrument_values = None
def generate_paths(self, fixed_seed=True, day_count=365.):
if self.time_grid is None:
self.generate_time_grid()
M = len(self.time_grid)
I = self.paths
paths = np.zeros((M, I))
paths_ = np.zeros_like(paths)
paths[0] = self.initial_value
paths_[0] = self.initial_value
if self.correlated is False:
rand = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
else:
rand = self.random_numbers
for t in range(1, len(self.time_grid)):
dt = (self.time_grid[t] - self.time_grid[t - 1]).days / day_count
if self.correlated is False:
ran = rand[t]
else:
ran = np.dot(self.cholesky_matrix, rand[:, t, :])
ran = ran[self.rn_set]
# full truncation Euler discretization
paths_[t] = (paths_[t - 1] + self.kappa
* (self.theta - np.maximum(0, paths_[t - 1])) * dt
+ np.sqrt(np.maximum(0, paths_[t - 1]))
* self.volatility * np.sqrt(dt) * ran)
paths[t] = np.maximum(0, paths_[t])
self.instrument_values = paths
class stochastic_short_rate(object):
''' Class for discounting based on stochastic short rates
based on square-root diffusion process.
Attributes
==========
name : string
name of the object
mar_env : market_environment object
containing all relevant parameters
Methods
=======
get_forward_rates :
return forward rates given a time list/array
get_discount_factors :
return discount factors given a time list/array
'''
def __init__(self, name, mar_env):
self.name = name
try:
try:
mar_env.get_curve('discount_curve')
except:
mar_env.add_curve('discount_curve', 0.0) # dummy
try:
mar_env.get_constant('currency')
except:
mar_env.add_constant('currency', 'CUR') # dummy
self.process = square_root_diffusion('process', mar_env)
self.process.generate_paths()
except:
raise ValueError('Error parsing market environment.')
def get_forward_rates(self, time_list, paths, dtobjects=True):
if len(self.process.time_grid) != len(time_list) \
or self.process.paths != paths:
self.process.paths = paths
self.process.time_grid = time_list
self.process.instrument_values = None
rates = self.process.get_instrument_values()
return time_list, rates
def get_discount_factors(self, time_list, paths, dtobjects=True):
discount_factors = []
if dtobjects is True:
dlist = get_year_deltas(time_list)
else:
dlist = time_list
forward_rate = self.get_forward_rates(time_list, paths, dtobjects)[1]
for no in range(len(dlist)):
factor = np.zeros_like(forward_rate[0, :])
for d in range(no, len(dlist) - 1):
factor += ((dlist[d + 1] - dlist[d])
* (0.5 * (forward_rate[d + 1] + forward_rate[d])))
discount_factors.append(np.exp(-factor))
return time_list, np.array(discount_factors)
def srd_forwards(initial_value, (kappa, theta, sigma), time_grid):
''' Function for forward vols/rates in SRD model.
Parameters
==========
initial_value: float
initial value of the process
kappa: float
mean-reversion factor
theta: float
long-run mean
sigma: float
volatility factor (vol-vol)
time_grid: list/array of datetime object
dates to generate forwards for
Returns
=======
forwards: array
forward vols/rates
'''
t = get_year_deltas(time_grid)
g = math.sqrt(kappa ** 2 + 2 * sigma ** 2)
sum1 = ((kappa * theta * (np.exp(g * t) - 1)) /
(2 * g + (kappa + g) * (np.exp(g * t) - 1)))
sum2 = initial_value * ((4 * g ** 2 * np.exp(g * t)) /
(2 * g + (kappa + g) * (np.exp(g * t) - 1)) ** 2)
forwards = sum1 + sum2
return forwards
class square_root_jump_diffusion(simulation_class):
''' Class to generate simulated paths based on
the square-root jump diffusion model.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model object
Methods
=======
update :
updates parameters
generate_paths :
returns Monte Carlo paths for the market environment
'''
def __init__(self, name, mar_env, corr=False):
super(square_root_jump_diffusion, self).__init__(name, mar_env, corr)
try:
self.kappa = mar_env.get_constant('kappa')
self.theta = mar_env.get_constant('theta')
self.lamb = mar_env.get_constant('lambda')
self.mu = mar_env.get_constant('mu')
self.delt = mar_env.get_constant('delta')
except:
print "Error parsing market environment."
def update(self, initial_value=None, volatility=None, kappa=None,
theta=None, lamb=None, mu=None, delt=None, final_date=None):
if initial_value is not None:
self.initial_value = initial_value
if volatility is not None:
self.volatility = volatility
if kappa is not None:
self.kappa = kappa
if theta is not None:
self.theta = theta
if lamb is not None:
self.lamb = lamb
if mu is not None:
self.mu = mu
if delt is not None:
self.delt = delt
if final_date is not None:
self.final_date = final_date
self.instrument_values = None
self.time_grid = None
def generate_paths(self, fixed_seed=True, day_count=365.):
if self.time_grid is None:
self.generate_time_grid()
M = len(self.time_grid)
I = self.paths
paths = np.zeros((M, I))
paths_ = np.zeros_like(paths)
paths[0] = self.initial_value
paths_[0] = self.initial_value
if self.correlated is False:
rand = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
else:
rand = self.random_numbers
snr = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
rj = self.lamb * (np.exp(self.mu + 0.5 * self.delt ** 2) - 1)
for t in range(1, len(self.time_grid)):
dt = (self.time_grid[t] - self.time_grid[t - 1]).days / day_count
if self.correlated is False:
ran = rand[t]
else:
ran = np.dot(self.cholesky_matrix, rand[:, t, :])
ran = ran[self.rn_set]
poi = np.random.poisson(self.lamb * dt, I)
# full truncation Euler discretization
paths_[t, :] = (paths_[t - 1, :] + self.kappa
* (self.theta - np.maximum(0, paths_[t - 1, :])) * dt
+ np.sqrt(np.maximum(0, paths_[t - 1, :]))
* self.volatility * np.sqrt(dt) * ran
+ ((np.exp(self.mu + self.delt * snr[t]) - 1) * poi)
* np.maximum(0, paths_[t - 1, :]) - rj * dt)
paths[t, :] = np.maximum(0, paths_[t, :])
self.instrument_values = paths
class square_root_jump_diffusion_plus(square_root_jump_diffusion):
''' Class to generate simulated paths based on
the square-root jump diffusion model with term structure.
Attributes
==========
name : string
name of the object
mar_env : instance of market_environment
market environment data for simulation
corr : boolean
True if correlated with other model object
Methods
=======
srd_forward_error :
error function for forward rate/vols calibration
generate_shift_base :
generates a shift base to take term structure into account
update :
updates parameters
update_shift_values :
updates shift values for term structure
generate_paths :
returns Monte Carlo paths for the market environment
update_forward_rates :
updates forward rates (vol, int. rates) for given time grid
'''
def __init__(self, name, mar_env, corr=False):
super(square_root_jump_diffusion_plus, self).__init__(name, mar_env, corr)
try:
self.term_structure = mar_env.get_curve('term_structure')
except:
self.term_structure = None
print "Missing Term Structure."
self.forward_rates = []
self.shift_base = None
self.shift_values = []
def srd_forward_error(self, p0):
if p0[0] < 0 or p0[1] < 0 or p0[2] < 0:
return 100
f_model = srd_forwards(self.initial_value, p0,
self.term_structure[:, 0])
MSE = np.sum((self.term_structure[:, 1]
- f_model) ** 2) / len(f_model)
return MSE
def generate_shift_base(self, p0):
# calibration
opt = sco.fmin(self.srd_forward_error, p0)
# shift_calculation
f_model = srd_forwards(self.initial_value, opt,
self.term_structure[:, 0])
shifts = self.term_structure[:, 1] - f_model
self.shift_base = np.array((self.term_structure[:, 0], shifts)).T
def update_shift_values(self, k=1):
if self.shift_base is not None:
t = get_year_deltas(self.shift_base[:, 0])
tck = sci.splrep(t, self.shift_base[:, 1], k=k)
self.generate_time_grid()
st = get_year_deltas(self.time_grid)
self.shift_values = np.array(zip(self.time_grid,
sci.splev(st, tck, der=0)))
else:
self.shift_values = np.array(zip(self.time_grid,
np.zeros(len(self.time_grid))))
def generate_paths(self, fixed_seed=True, day_count=365.):
if self.time_grid is None:
self.generate_time_grid()
self.update_shift_values()
M = len(self.time_grid)
I = self.paths
paths = np.zeros((M, I))
paths_ = np.zeros_like(paths)
paths[0] = self.initial_value
paths_[0] = self.initial_value
if self.correlated is False:
rand = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
else:
rand = self.random_numbers
snr = sn_random_numbers((1, M, I),
fixed_seed=fixed_seed)
forward_rates = self.discount_curve.get_forward_rates(
self.time_grid, dtobjects=True)
rj = self.lamb * (np.exp(self.mu + 0.5 * self.delt ** 2) - 1)
for t in range(1, len(self.time_grid)):
dt = (self.time_grid[t] - self.time_grid[t - 1]).days / day_count
if self.correlated is False:
ran = rand[t]
else:
ran = np.dot(self.cholesky_matrix, rand[:, t, :])
ran = ran[self.rn_set]
poi = np.random.poisson(self.lamb * dt, I)
# full truncation Euler discretization
paths_[t] = (paths_[t - 1] + self.kappa
* (self.theta - np.maximum(0, paths_[t - 1])) * dt
+ np.sqrt(np.maximum(0, paths_[t - 1]))
* self.volatility * np.sqrt(dt) * ran
+ ((np.exp(self.mu + self.delt * snr[t]) - 1) * poi)
* np.maximum(0, paths_[t - 1]) - rj * dt)
paths[t] = np.maximum(0, paths_[t]) + self.shift_values[t, 1]
self.instrument_values = paths
def update_forward_rates(self, time_grid=None):
if time_grid is None:
self.generate_time_grid()
time_grid = self.time_grid
t = get_year_deltas(time_grid)
g = np.sqrt(self.kappa ** 2 + 2 * self.volatility ** 2)
sum1 = ((self.kappa * self.theta * (np.exp(g * t) - 1)) /
(2 * g + (self.kappa + g) * (np.exp(g * t) - 1)))
sum2 = self.initial_value * ((4 * g ** 2 * np.exp(g * t)) /
(2 * g + (self.kappa + g) * (np.exp(g * t) - 1)) ** 2)
self.forward_rates = np.array(zip(time_grid, sum1 + sum2))
class general_underlying(object):
''' Needed for VAR-based portfolio modeling and valuation. '''
def __init__(self, name, data, val_env):
self.name = name
self.data = data
self.paths = val_env.get_constant('paths')
self.frequency = 'B'
self.discount_curve = val_env.get_curve('discount_curve')
self.special_dates = []
self.time_grid = val_env.get_list('time_grid')
self.fit_model = None
def get_instrument_values(self, fixed_seed=False):
return self.data.values |
r-o-b-b-i-e/pootle | refs/heads/master | pootle/apps/pootle_fs/state.py | 1 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from collections import OrderedDict
from django.db.models import Q
from django.utils.functional import cached_property
from django.utils.lru_cache import lru_cache
from pootle.core.state import ItemState, State
from pootle_store.constants import POOTLE_WINS, SOURCE_WINS
from .resources import FSProjectStateResources
FS_STATE = OrderedDict()
FS_STATE["conflict"] = {
"title": "Conflicts",
"description": "Both Pootle Store and file in filesystem have changed"}
FS_STATE["conflict_untracked"] = {
"title": "Untracked conflicts",
"description": (
"Newly created files in the filesystem matching newly created Stores "
"in Pootle")}
FS_STATE["pootle_ahead"] = {
"title": "Changed in Pootle",
"description": "Stores that have changed in Pootle"}
FS_STATE["pootle_untracked"] = {
"title": "Untracked Stores",
"description": "Newly created Stores in Pootle"}
FS_STATE["pootle_staged"] = {
"title": "Added in Pootle",
"description": (
"Stores that have been added in Pootle and are now being tracked")}
FS_STATE["pootle_removed"] = {
"title": "Removed from Pootle",
"description": "Stores that have been removed from Pootle"}
FS_STATE["fs_ahead"] = {
"title": "Changed in filesystem",
"description": "A file has been changed in the filesystem"}
FS_STATE["fs_untracked"] = {
"title": "Untracked files",
"description": "Newly created files in the filesystem"}
FS_STATE["fs_staged"] = {
"title": "Fetched from filesystem",
"description": (
"Files that have been fetched from the filesystem and are now being "
"tracked")}
FS_STATE["fs_removed"] = {
"title": "Removed from filesystem",
"description": "Files that have been removed from the filesystem"}
FS_STATE["merge_pootle_wins"] = {
"title": "Staged for merge (Pootle wins)",
"description": (
"Files or Stores that have been staged for merging on sync - pootle "
"wins where units are both updated")}
FS_STATE["merge_fs_wins"] = {
"title": "Staged for merge (FS wins)",
"description": (
"Files or Stores that have been staged for merging on sync - FS "
"wins where units are both updated")}
FS_STATE["remove"] = {
"title": "Staged for removal",
"description": "Files or Stores that have been staged or removal on sync"}
FS_STATE["both_removed"] = {
"title": "Removed from Pootle and filesystem",
"description": (
"Files or Stores that were previously tracked but have now "
"disappeared")}
class FSItemState(ItemState):
@property
def pootle_path(self):
if "pootle_path" in self.kwargs:
return self.kwargs["pootle_path"]
elif "store_fs" in self.kwargs:
return self.kwargs["store_fs"].pootle_path
elif "store" in self.kwargs:
return self.kwargs["store"].pootle_path
@property
def fs_path(self):
if "fs_path" in self.kwargs:
return self.kwargs["fs_path"]
elif "store_fs" in self.kwargs:
return self.kwargs["store_fs"].path
@property
def project(self):
return self.plugin.project
@property
def plugin(self):
return self.state.context
@property
def store_fs(self):
return self.kwargs.get("store_fs")
@property
def store(self):
return self.kwargs.get("store")
def __gt__(self, other):
if isinstance(other, self.__class__):
return self.pootle_path > other.pootle_path
return object.__gt__(other)
class ProjectFSState(State):
item_state_class = FSItemState
def __init__(self, context, fs_path=None, pootle_path=None):
self.fs_path = fs_path
self.pootle_path = pootle_path
super(ProjectFSState, self).__init__(
context, fs_path=fs_path, pootle_path=pootle_path)
@property
def project(self):
return self.context.project
@property
def states(self):
return FS_STATE.keys()
@cached_property
def resources(self):
return FSProjectStateResources(
self.context,
pootle_path=self.pootle_path,
fs_path=self.fs_path)
@property
def state_conflict(self):
conflict = self.resources.pootle_changed.exclude(
resolve_conflict__gt=0)
for store_fs in conflict.iterator():
store_fs.project = self.project
pootle_changed_, fs_changed = self._get_changes(store_fs.file)
if fs_changed:
yield dict(store_fs=store_fs)
@property
def state_fs_untracked(self):
tracked_fs_paths = self.resources.tracked_paths.keys()
tracked_pootle_paths = self.resources.tracked_paths.values()
trackable_fs_paths = self.resources.trackable_store_paths.values()
trackable_pootle_paths = self.resources.trackable_store_paths.keys()
for pootle_path, fs_path in self.resources.found_file_matches:
fs_untracked = (
fs_path not in tracked_fs_paths
and pootle_path not in tracked_pootle_paths
and fs_path not in trackable_fs_paths
and pootle_path not in trackable_pootle_paths)
if fs_untracked:
yield dict(
pootle_path=pootle_path,
fs_path=fs_path)
@property
def state_pootle_untracked(self):
for store, path in self.resources.trackable_stores:
if path not in self.resources.found_file_paths:
yield dict(
store=store,
fs_path=path)
@property
def state_conflict_untracked(self):
for store, path in self.resources.trackable_stores:
if path in self.resources.found_file_paths:
yield dict(
store=store,
fs_path=path)
@property
def state_remove(self):
to_remove = self.resources.tracked.filter(staged_for_removal=True)
for store_fs in to_remove.iterator():
yield dict(store_fs=store_fs)
@property
def state_unchanged(self):
has_changes = []
for v in self.__state__.values():
if v:
has_changes.extend([p.pootle_path for p in v])
return self.resources.synced.exclude(pootle_path__in=has_changes)
@property
def state_fs_staged(self):
staged = (
self.resources.unsynced
.exclude(path__in=self.resources.missing_file_paths)
.exclude(resolve_conflict=POOTLE_WINS)
| self.resources.synced
.filter(Q(store__isnull=True) | Q(store__obsolete=True))
.exclude(path__in=self.resources.missing_file_paths)
.filter(resolve_conflict=SOURCE_WINS))
for store_fs in staged.iterator():
store_fs.project = self.project
yield dict(store_fs=store_fs)
@property
def state_fs_ahead(self):
fs_changed = (
self.resources.synced
.exclude(path__in=self.resources.missing_file_paths))
for store_fs in fs_changed.iterator():
store_fs.project = self.project
pootle_changed, fs_changed = self._get_changes(store_fs.file)
fs_ahead = (
fs_changed
and (
not pootle_changed
or store_fs.resolve_conflict == SOURCE_WINS))
if fs_ahead:
yield dict(store_fs=store_fs)
@property
def state_fs_removed(self):
removed = (
self.resources.synced
.filter(path__in=self.resources.missing_file_paths)
.exclude(resolve_conflict=POOTLE_WINS)
.exclude(store_id__isnull=True)
.exclude(store__obsolete=True))
for store_fs in removed.iterator():
store_fs.project = self.project
yield dict(store_fs=store_fs)
@property
def state_merge_pootle_wins(self):
to_merge = self.resources.tracked.filter(
staged_for_merge=True,
resolve_conflict=POOTLE_WINS)
for store_fs in to_merge.iterator():
store_fs.project = self.project
yield dict(store_fs=store_fs)
@property
def state_merge_fs_wins(self):
to_merge = self.resources.tracked.filter(
staged_for_merge=True,
resolve_conflict=SOURCE_WINS)
for store_fs in to_merge.iterator():
store_fs.project = self.project
yield dict(store_fs=store_fs)
@property
def state_pootle_ahead(self):
for store_fs in self.resources.pootle_changed.iterator():
store_fs.project = self.project
pootle_changed_, fs_changed = self._get_changes(store_fs.file)
pootle_ahead = (
not fs_changed
or store_fs.resolve_conflict == POOTLE_WINS)
if pootle_ahead:
yield dict(store_fs=store_fs)
@property
def state_pootle_staged(self):
staged = (
self.resources.unsynced
.exclude(resolve_conflict=SOURCE_WINS)
.exclude(store__isnull=True)
.exclude(store__obsolete=True)
| self.resources.synced
.exclude(store__obsolete=True)
.exclude(store__isnull=True)
.filter(path__in=self.resources.missing_file_paths)
.filter(resolve_conflict=POOTLE_WINS))
for store_fs in staged.iterator():
store_fs.project = self.project
yield dict(store_fs=store_fs)
@property
def state_both_removed(self):
removed = (
self.resources.synced
.filter(Q(store__obsolete=True) | Q(store__isnull=True))
.filter(path__in=self.resources.missing_file_paths))
for store_fs in removed.iterator():
store_fs.project = self.project
yield dict(store_fs=store_fs)
@property
def state_pootle_removed(self):
synced = (
self.resources.synced
.exclude(resolve_conflict=SOURCE_WINS)
.exclude(path__in=self.resources.missing_file_paths)
.filter(Q(store__isnull=True) | Q(store__obsolete=True)))
for store_fs in synced.iterator():
store_fs.project = self.project
yield dict(store_fs=store_fs)
@lru_cache()
def _get_changes(self, fs_file):
return fs_file.pootle_changed, fs_file.fs_changed
def clear_cache(self):
for x in dir(self):
x = getattr(self, x)
if callable(x) and hasattr(x, "cache_clear"):
x.cache_clear()
if "resources" in self.__dict__:
del self.__dict__["resources"]
return super(ProjectFSState, self).clear_cache()
|
nvoron23/avos | refs/heads/master | openstack_dashboard/dashboards/identity/groups/urls.py | 64 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.identity.groups import views
urlpatterns = patterns(
'',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^create$', views.CreateView.as_view(), name='create'),
url(r'^(?P<group_id>[^/]+)/update/$',
views.UpdateView.as_view(), name='update'),
url(r'^(?P<group_id>[^/]+)/manage_members/$',
views.ManageMembersView.as_view(), name='manage_members'),
url(r'^(?P<group_id>[^/]+)/add_members/$',
views.NonMembersView.as_view(), name='add_members'),
)
|
rue89-tech/edx-platform | refs/heads/master | common/test/data/capa/prog2.py | 270 | # prog2
# Make this file long, since that seems to affect how uploaded files are
# handled in webob or cgi.FieldStorage.
moby_dick_ten_chapters = """
CHAPTER 1. Loomings.
Call me Ishmael. Some years ago--never mind how long precisely--having
little or no money in my purse, and nothing particular to interest me on
shore, I thought I would sail about a little and see the watery part of
the world. It is a way I have of driving off the spleen and regulating
the circulation. Whenever I find myself growing grim about the mouth;
whenever it is a damp, drizzly November in my soul; whenever I find
myself involuntarily pausing before coffin warehouses, and bringing up
the rear of every funeral I meet; and especially whenever my hypos get
such an upper hand of me, that it requires a strong moral principle to
prevent me from deliberately stepping into the street, and methodically
knocking people's hats off--then, I account it high time to get to sea
as soon as I can. This is my substitute for pistol and ball. With a
philosophical flourish Cato throws himself upon his sword; I quietly
take to the ship. There is nothing surprising in this. If they but knew
it, almost all men in their degree, some time or other, cherish very
nearly the same feelings towards the ocean with me.
There now is your insular city of the Manhattoes, belted round by
wharves as Indian isles by coral reefs--commerce surrounds it with
her surf. Right and left, the streets take you waterward. Its extreme
downtown is the battery, where that noble mole is washed by waves, and
cooled by breezes, which a few hours previous were out of sight of land.
Look at the crowds of water-gazers there.
Circumambulate the city of a dreamy Sabbath afternoon. Go from Corlears
Hook to Coenties Slip, and from thence, by Whitehall, northward. What
do you see?--Posted like silent sentinels all around the town, stand
thousands upon thousands of mortal men fixed in ocean reveries. Some
leaning against the spiles; some seated upon the pier-heads; some
looking over the bulwarks of ships from China; some high aloft in the
rigging, as if striving to get a still better seaward peep. But these
are all landsmen; of week days pent up in lath and plaster--tied to
counters, nailed to benches, clinched to desks. How then is this? Are
the green fields gone? What do they here?
But look! here come more crowds, pacing straight for the water, and
seemingly bound for a dive. Strange! Nothing will content them but the
extremest limit of the land; loitering under the shady lee of yonder
warehouses will not suffice. No. They must get just as nigh the water
as they possibly can without falling in. And there they stand--miles of
them--leagues. Inlanders all, they come from lanes and alleys, streets
and avenues--north, east, south, and west. Yet here they all unite.
Tell me, does the magnetic virtue of the needles of the compasses of all
those ships attract them thither?
Once more. Say you are in the country; in some high land of lakes. Take
almost any path you please, and ten to one it carries you down in a
dale, and leaves you there by a pool in the stream. There is magic
in it. Let the most absent-minded of men be plunged in his deepest
reveries--stand that man on his legs, set his feet a-going, and he will
infallibly lead you to water, if water there be in all that region.
Should you ever be athirst in the great American desert, try this
experiment, if your caravan happen to be supplied with a metaphysical
professor. Yes, as every one knows, meditation and water are wedded for
ever.
But here is an artist. He desires to paint you the dreamiest, shadiest,
quietest, most enchanting bit of romantic landscape in all the valley of
the Saco. What is the chief element he employs? There stand his trees,
each with a hollow trunk, as if a hermit and a crucifix were within; and
here sleeps his meadow, and there sleep his cattle; and up from yonder
cottage goes a sleepy smoke. Deep into distant woodlands winds a
mazy way, reaching to overlapping spurs of mountains bathed in their
hill-side blue. But though the picture lies thus tranced, and though
this pine-tree shakes down its sighs like leaves upon this shepherd's
head, yet all were vain, unless the shepherd's eye were fixed upon the
magic stream before him. Go visit the Prairies in June, when for scores
on scores of miles you wade knee-deep among Tiger-lilies--what is the
one charm wanting?--Water--there is not a drop of water there! Were
Niagara but a cataract of sand, would you travel your thousand miles to
see it? Why did the poor poet of Tennessee, upon suddenly receiving two
handfuls of silver, deliberate whether to buy him a coat, which he sadly
needed, or invest his money in a pedestrian trip to Rockaway Beach? Why
is almost every robust healthy boy with a robust healthy soul in him, at
some time or other crazy to go to sea? Why upon your first voyage as a
passenger, did you yourself feel such a mystical vibration, when first
told that you and your ship were now out of sight of land? Why did the
old Persians hold the sea holy? Why did the Greeks give it a separate
deity, and own brother of Jove? Surely all this is not without meaning.
And still deeper the meaning of that story of Narcissus, who because
he could not grasp the tormenting, mild image he saw in the fountain,
plunged into it and was drowned. But that same image, we ourselves see
in all rivers and oceans. It is the image of the ungraspable phantom of
life; and this is the key to it all.
Now, when I say that I am in the habit of going to sea whenever I begin
to grow hazy about the eyes, and begin to be over conscious of my lungs,
I do not mean to have it inferred that I ever go to sea as a passenger.
For to go as a passenger you must needs have a purse, and a purse is
but a rag unless you have something in it. Besides, passengers get
sea-sick--grow quarrelsome--don't sleep of nights--do not enjoy
themselves much, as a general thing;--no, I never go as a passenger;
nor, though I am something of a salt, do I ever go to sea as a
Commodore, or a Captain, or a Cook. I abandon the glory and distinction
of such offices to those who like them. For my part, I abominate all
honourable respectable toils, trials, and tribulations of every kind
whatsoever. It is quite as much as I can do to take care of myself,
without taking care of ships, barques, brigs, schooners, and what not.
And as for going as cook,--though I confess there is considerable glory
in that, a cook being a sort of officer on ship-board--yet, somehow,
I never fancied broiling fowls;--though once broiled, judiciously
buttered, and judgmatically salted and peppered, there is no one who
will speak more respectfully, not to say reverentially, of a broiled
fowl than I will. It is out of the idolatrous dotings of the old
Egyptians upon broiled ibis and roasted river horse, that you see the
mummies of those creatures in their huge bake-houses the pyramids.
No, when I go to sea, I go as a simple sailor, right before the mast,
plumb down into the forecastle, aloft there to the royal mast-head.
True, they rather order me about some, and make me jump from spar to
spar, like a grasshopper in a May meadow. And at first, this sort
of thing is unpleasant enough. It touches one's sense of honour,
particularly if you come of an old established family in the land, the
Van Rensselaers, or Randolphs, or Hardicanutes. And more than all,
if just previous to putting your hand into the tar-pot, you have been
lording it as a country schoolmaster, making the tallest boys stand
in awe of you. The transition is a keen one, I assure you, from a
schoolmaster to a sailor, and requires a strong decoction of Seneca and
the Stoics to enable you to grin and bear it. But even this wears off in
time.
What of it, if some old hunks of a sea-captain orders me to get a broom
and sweep down the decks? What does that indignity amount to, weighed,
I mean, in the scales of the New Testament? Do you think the archangel
Gabriel thinks anything the less of me, because I promptly and
respectfully obey that old hunks in that particular instance? Who ain't
a slave? Tell me that. Well, then, however the old sea-captains may
order me about--however they may thump and punch me about, I have the
satisfaction of knowing that it is all right; that everybody else is
one way or other served in much the same way--either in a physical
or metaphysical point of view, that is; and so the universal thump is
passed round, and all hands should rub each other's shoulder-blades, and
be content.
Again, I always go to sea as a sailor, because they make a point of
paying me for my trouble, whereas they never pay passengers a single
penny that I ever heard of. On the contrary, passengers themselves must
pay. And there is all the difference in the world between paying
and being paid. The act of paying is perhaps the most uncomfortable
infliction that the two orchard thieves entailed upon us. But BEING
PAID,--what will compare with it? The urbane activity with which a man
receives money is really marvellous, considering that we so earnestly
believe money to be the root of all earthly ills, and that on no account
can a monied man enter heaven. Ah! how cheerfully we consign ourselves
to perdition!
Finally, I always go to sea as a sailor, because of the wholesome
exercise and pure air of the fore-castle deck. For as in this world,
head winds are far more prevalent than winds from astern (that is,
if you never violate the Pythagorean maxim), so for the most part the
Commodore on the quarter-deck gets his atmosphere at second hand from
the sailors on the forecastle. He thinks he breathes it first; but not
so. In much the same way do the commonalty lead their leaders in many
other things, at the same time that the leaders little suspect it.
But wherefore it was that after having repeatedly smelt the sea as a
merchant sailor, I should now take it into my head to go on a whaling
voyage; this the invisible police officer of the Fates, who has the
constant surveillance of me, and secretly dogs me, and influences me
in some unaccountable way--he can better answer than any one else. And,
doubtless, my going on this whaling voyage, formed part of the grand
programme of Providence that was drawn up a long time ago. It came in as
a sort of brief interlude and solo between more extensive performances.
I take it that this part of the bill must have run something like this:
"GRAND CONTESTED ELECTION FOR THE PRESIDENCY OF THE UNITED STATES.
"WHALING VOYAGE BY ONE ISHMAEL.
"BLOODY BATTLE IN AFFGHANISTAN."
Though I cannot tell why it was exactly that those stage managers, the
Fates, put me down for this shabby part of a whaling voyage, when others
were set down for magnificent parts in high tragedies, and short and
easy parts in genteel comedies, and jolly parts in farces--though
I cannot tell why this was exactly; yet, now that I recall all the
circumstances, I think I can see a little into the springs and motives
which being cunningly presented to me under various disguises, induced
me to set about performing the part I did, besides cajoling me into the
delusion that it was a choice resulting from my own unbiased freewill
and discriminating judgment.
Chief among these motives was the overwhelming idea of the great
whale himself. Such a portentous and mysterious monster roused all my
curiosity. Then the wild and distant seas where he rolled his island
bulk; the undeliverable, nameless perils of the whale; these, with all
the attending marvels of a thousand Patagonian sights and sounds, helped
to sway me to my wish. With other men, perhaps, such things would not
have been inducements; but as for me, I am tormented with an everlasting
itch for things remote. I love to sail forbidden seas, and land on
barbarous coasts. Not ignoring what is good, I am quick to perceive a
horror, and could still be social with it--would they let me--since it
is but well to be on friendly terms with all the inmates of the place
one lodges in.
By reason of these things, then, the whaling voyage was welcome; the
great flood-gates of the wonder-world swung open, and in the wild
conceits that swayed me to my purpose, two and two there floated into
my inmost soul, endless processions of the whale, and, mid most of them
all, one grand hooded phantom, like a snow hill in the air.
CHAPTER 2. The Carpet-Bag.
I stuffed a shirt or two into my old carpet-bag, tucked it under my arm,
and started for Cape Horn and the Pacific. Quitting the good city of
old Manhatto, I duly arrived in New Bedford. It was a Saturday night in
December. Much was I disappointed upon learning that the little packet
for Nantucket had already sailed, and that no way of reaching that place
would offer, till the following Monday.
As most young candidates for the pains and penalties of whaling stop at
this same New Bedford, thence to embark on their voyage, it may as well
be related that I, for one, had no idea of so doing. For my mind was
made up to sail in no other than a Nantucket craft, because there was a
fine, boisterous something about everything connected with that famous
old island, which amazingly pleased me. Besides though New Bedford has
of late been gradually monopolising the business of whaling, and though
in this matter poor old Nantucket is now much behind her, yet Nantucket
was her great original--the Tyre of this Carthage;--the place where the
first dead American whale was stranded. Where else but from Nantucket
did those aboriginal whalemen, the Red-Men, first sally out in canoes to
give chase to the Leviathan? And where but from Nantucket, too, did that
first adventurous little sloop put forth, partly laden with imported
cobblestones--so goes the story--to throw at the whales, in order to
discover when they were nigh enough to risk a harpoon from the bowsprit?
Now having a night, a day, and still another night following before me
in New Bedford, ere I could embark for my destined port, it became a
matter of concernment where I was to eat and sleep meanwhile. It was a
very dubious-looking, nay, a very dark and dismal night, bitingly cold
and cheerless. I knew no one in the place. With anxious grapnels I had
sounded my pocket, and only brought up a few pieces of silver,--So,
wherever you go, Ishmael, said I to myself, as I stood in the middle of
a dreary street shouldering my bag, and comparing the gloom towards the
north with the darkness towards the south--wherever in your wisdom you
may conclude to lodge for the night, my dear Ishmael, be sure to inquire
the price, and don't be too particular.
With halting steps I paced the streets, and passed the sign of "The
Crossed Harpoons"--but it looked too expensive and jolly there. Further
on, from the bright red windows of the "Sword-Fish Inn," there came such
fervent rays, that it seemed to have melted the packed snow and ice from
before the house, for everywhere else the congealed frost lay ten inches
thick in a hard, asphaltic pavement,--rather weary for me, when I struck
my foot against the flinty projections, because from hard, remorseless
service the soles of my boots were in a most miserable plight. Too
expensive and jolly, again thought I, pausing one moment to watch the
broad glare in the street, and hear the sounds of the tinkling glasses
within. But go on, Ishmael, said I at last; don't you hear? get away
from before the door; your patched boots are stopping the way. So on I
went. I now by instinct followed the streets that took me waterward, for
there, doubtless, were the cheapest, if not the cheeriest inns.
Such dreary streets! blocks of blackness, not houses, on either hand,
and here and there a candle, like a candle moving about in a tomb. At
this hour of the night, of the last day of the week, that quarter of
the town proved all but deserted. But presently I came to a smoky light
proceeding from a low, wide building, the door of which stood invitingly
open. It had a careless look, as if it were meant for the uses of the
public; so, entering, the first thing I did was to stumble over an
ash-box in the porch. Ha! thought I, ha, as the flying particles almost
choked me, are these ashes from that destroyed city, Gomorrah? But "The
Crossed Harpoons," and "The Sword-Fish?"--this, then must needs be the
sign of "The Trap." However, I picked myself up and hearing a loud voice
within, pushed on and opened a second, interior door.
It seemed the great Black Parliament sitting in Tophet. A hundred black
faces turned round in their rows to peer; and beyond, a black Angel
of Doom was beating a book in a pulpit. It was a negro church; and the
preacher's text was about the blackness of darkness, and the weeping and
wailing and teeth-gnashing there. Ha, Ishmael, muttered I, backing out,
Wretched entertainment at the sign of 'The Trap!'
Moving on, I at last came to a dim sort of light not far from the docks,
and heard a forlorn creaking in the air; and looking up, saw a swinging
sign over the door with a white painting upon it, faintly representing
a tall straight jet of misty spray, and these words underneath--"The
Spouter Inn:--Peter Coffin."
Coffin?--Spouter?--Rather ominous in that particular connexion, thought
I. But it is a common name in Nantucket, they say, and I suppose this
Peter here is an emigrant from there. As the light looked so dim, and
the place, for the time, looked quiet enough, and the dilapidated little
wooden house itself looked as if it might have been carted here from
the ruins of some burnt district, and as the swinging sign had a
poverty-stricken sort of creak to it, I thought that here was the very
spot for cheap lodgings, and the best of pea coffee.
It was a queer sort of place--a gable-ended old house, one side palsied
as it were, and leaning over sadly. It stood on a sharp bleak corner,
where that tempestuous wind Euroclydon kept up a worse howling than ever
it did about poor Paul's tossed craft. Euroclydon, nevertheless, is a
mighty pleasant zephyr to any one in-doors, with his feet on the hob
quietly toasting for bed. "In judging of that tempestuous wind called
Euroclydon," says an old writer--of whose works I possess the only copy
extant--"it maketh a marvellous difference, whether thou lookest out at
it from a glass window where the frost is all on the outside, or whether
thou observest it from that sashless window, where the frost is on both
sides, and of which the wight Death is the only glazier." True enough,
thought I, as this passage occurred to my mind--old black-letter, thou
reasonest well. Yes, these eyes are windows, and this body of mine is
the house. What a pity they didn't stop up the chinks and the crannies
though, and thrust in a little lint here and there. But it's too late
to make any improvements now. The universe is finished; the copestone
is on, and the chips were carted off a million years ago. Poor Lazarus
there, chattering his teeth against the curbstone for his pillow, and
shaking off his tatters with his shiverings, he might plug up both ears
with rags, and put a corn-cob into his mouth, and yet that would not
keep out the tempestuous Euroclydon. Euroclydon! says old Dives, in his
red silken wrapper--(he had a redder one afterwards) pooh, pooh! What
a fine frosty night; how Orion glitters; what northern lights! Let them
talk of their oriental summer climes of everlasting conservatories; give
me the privilege of making my own summer with my own coals.
But what thinks Lazarus? Can he warm his blue hands by holding them up
to the grand northern lights? Would not Lazarus rather be in Sumatra
than here? Would he not far rather lay him down lengthwise along the
line of the equator; yea, ye gods! go down to the fiery pit itself, in
order to keep out this frost?
Now, that Lazarus should lie stranded there on the curbstone before the
door of Dives, this is more wonderful than that an iceberg should be
moored to one of the Moluccas. Yet Dives himself, he too lives like a
Czar in an ice palace made of frozen sighs, and being a president of a
temperance society, he only drinks the tepid tears of orphans.
But no more of this blubbering now, we are going a-whaling, and there is
plenty of that yet to come. Let us scrape the ice from our frosted feet,
and see what sort of a place this "Spouter" may be.
CHAPTER 3. The Spouter-Inn.
Entering that gable-ended Spouter-Inn, you found yourself in a wide,
low, straggling entry with old-fashioned wainscots, reminding one of
the bulwarks of some condemned old craft. On one side hung a very large
oilpainting so thoroughly besmoked, and every way defaced, that in the
unequal crosslights by which you viewed it, it was only by diligent
study and a series of systematic visits to it, and careful inquiry of
the neighbors, that you could any way arrive at an understanding of its
purpose. Such unaccountable masses of shades and shadows, that at first
you almost thought some ambitious young artist, in the time of the New
England hags, had endeavored to delineate chaos bewitched. But by dint
of much and earnest contemplation, and oft repeated ponderings, and
especially by throwing open the little window towards the back of the
entry, you at last come to the conclusion that such an idea, however
wild, might not be altogether unwarranted.
But what most puzzled and confounded you was a long, limber, portentous,
black mass of something hovering in the centre of the picture over three
blue, dim, perpendicular lines floating in a nameless yeast. A boggy,
soggy, squitchy picture truly, enough to drive a nervous man distracted.
Yet was there a sort of indefinite, half-attained, unimaginable
sublimity about it that fairly froze you to it, till you involuntarily
took an oath with yourself to find out what that marvellous painting
meant. Ever and anon a bright, but, alas, deceptive idea would dart you
through.--It's the Black Sea in a midnight gale.--It's the unnatural
combat of the four primal elements.--It's a blasted heath.--It's a
Hyperborean winter scene.--It's the breaking-up of the icebound stream
of Time. But at last all these fancies yielded to that one portentous
something in the picture's midst. THAT once found out, and all the rest
were plain. But stop; does it not bear a faint resemblance to a gigantic
fish? even the great leviathan himself?
In fact, the artist's design seemed this: a final theory of my own,
partly based upon the aggregated opinions of many aged persons with whom
I conversed upon the subject. The picture represents a Cape-Horner in a
great hurricane; the half-foundered ship weltering there with its three
dismantled masts alone visible; and an exasperated whale, purposing to
spring clean over the craft, is in the enormous act of impaling himself
upon the three mast-heads.
The opposite wall of this entry was hung all over with a heathenish
array of monstrous clubs and spears. Some were thickly set with
glittering teeth resembling ivory saws; others were tufted with knots of
human hair; and one was sickle-shaped, with a vast handle sweeping round
like the segment made in the new-mown grass by a long-armed mower. You
shuddered as you gazed, and wondered what monstrous cannibal and savage
could ever have gone a death-harvesting with such a hacking, horrifying
implement. Mixed with these were rusty old whaling lances and harpoons
all broken and deformed. Some were storied weapons. With this once long
lance, now wildly elbowed, fifty years ago did Nathan Swain kill fifteen
whales between a sunrise and a sunset. And that harpoon--so like a
corkscrew now--was flung in Javan seas, and run away with by a whale,
years afterwards slain off the Cape of Blanco. The original iron entered
nigh the tail, and, like a restless needle sojourning in the body of a
man, travelled full forty feet, and at last was found imbedded in the
hump.
Crossing this dusky entry, and on through yon low-arched way--cut
through what in old times must have been a great central chimney with
fireplaces all round--you enter the public room. A still duskier place
is this, with such low ponderous beams above, and such old wrinkled
planks beneath, that you would almost fancy you trod some old craft's
cockpits, especially of such a howling night, when this corner-anchored
old ark rocked so furiously. On one side stood a long, low, shelf-like
table covered with cracked glass cases, filled with dusty rarities
gathered from this wide world's remotest nooks. Projecting from the
further angle of the room stands a dark-looking den--the bar--a rude
attempt at a right whale's head. Be that how it may, there stands the
vast arched bone of the whale's jaw, so wide, a coach might almost drive
beneath it. Within are shabby shelves, ranged round with old decanters,
bottles, flasks; and in those jaws of swift destruction, like another
cursed Jonah (by which name indeed they called him), bustles a little
withered old man, who, for their money, dearly sells the sailors
deliriums and death.
Abominable are the tumblers into which he pours his poison. Though
true cylinders without--within, the villanous green goggling glasses
deceitfully tapered downwards to a cheating bottom. Parallel meridians
rudely pecked into the glass, surround these footpads' goblets. Fill to
THIS mark, and your charge is but a penny; to THIS a penny more; and so
on to the full glass--the Cape Horn measure, which you may gulp down for
a shilling.
Upon entering the place I found a number of young seamen gathered about
a table, examining by a dim light divers specimens of SKRIMSHANDER. I
sought the landlord, and telling him I desired to be accommodated with a
room, received for answer that his house was full--not a bed unoccupied.
"But avast," he added, tapping his forehead, "you haint no objections
to sharing a harpooneer's blanket, have ye? I s'pose you are goin'
a-whalin', so you'd better get used to that sort of thing."
I told him that I never liked to sleep two in a bed; that if I should
ever do so, it would depend upon who the harpooneer might be, and
that if he (the landlord) really had no other place for me, and the
harpooneer was not decidedly objectionable, why rather than wander
further about a strange town on so bitter a night, I would put up with
the half of any decent man's blanket.
"I thought so. All right; take a seat. Supper?--you want supper?
Supper'll be ready directly."
I sat down on an old wooden settle, carved all over like a bench on the
Battery. At one end a ruminating tar was still further adorning it with
his jack-knife, stooping over and diligently working away at the space
between his legs. He was trying his hand at a ship under full sail, but
he didn't make much headway, I thought.
At last some four or five of us were summoned to our meal in an
adjoining room. It was cold as Iceland--no fire at all--the landlord
said he couldn't afford it. Nothing but two dismal tallow candles, each
in a winding sheet. We were fain to button up our monkey jackets, and
hold to our lips cups of scalding tea with our half frozen fingers. But
the fare was of the most substantial kind--not only meat and potatoes,
but dumplings; good heavens! dumplings for supper! One young fellow in
a green box coat, addressed himself to these dumplings in a most direful
manner.
"My boy," said the landlord, "you'll have the nightmare to a dead
sartainty."
"Landlord," I whispered, "that aint the harpooneer is it?"
"Oh, no," said he, looking a sort of diabolically funny, "the harpooneer
is a dark complexioned chap. He never eats dumplings, he don't--he eats
nothing but steaks, and he likes 'em rare."
"The devil he does," says I. "Where is that harpooneer? Is he here?"
"He'll be here afore long," was the answer.
I could not help it, but I began to feel suspicious of this "dark
complexioned" harpooneer. At any rate, I made up my mind that if it so
turned out that we should sleep together, he must undress and get into
bed before I did.
Supper over, the company went back to the bar-room, when, knowing not
what else to do with myself, I resolved to spend the rest of the evening
as a looker on.
Presently a rioting noise was heard without. Starting up, the landlord
cried, "That's the Grampus's crew. I seed her reported in the offing
this morning; a three years' voyage, and a full ship. Hurrah, boys; now
we'll have the latest news from the Feegees."
A tramping of sea boots was heard in the entry; the door was flung open,
and in rolled a wild set of mariners enough. Enveloped in their shaggy
watch coats, and with their heads muffled in woollen comforters, all
bedarned and ragged, and their beards stiff with icicles, they seemed an
eruption of bears from Labrador. They had just landed from their boat,
and this was the first house they entered. No wonder, then, that they
made a straight wake for the whale's mouth--the bar--when the wrinkled
little old Jonah, there officiating, soon poured them out brimmers all
round. One complained of a bad cold in his head, upon which Jonah
mixed him a pitch-like potion of gin and molasses, which he swore was a
sovereign cure for all colds and catarrhs whatsoever, never mind of how
long standing, or whether caught off the coast of Labrador, or on the
weather side of an ice-island.
The liquor soon mounted into their heads, as it generally does even
with the arrantest topers newly landed from sea, and they began capering
about most obstreperously.
I observed, however, that one of them held somewhat aloof, and though
he seemed desirous not to spoil the hilarity of his shipmates by his own
sober face, yet upon the whole he refrained from making as much noise
as the rest. This man interested me at once; and since the sea-gods
had ordained that he should soon become my shipmate (though but a
sleeping-partner one, so far as this narrative is concerned), I will
here venture upon a little description of him. He stood full six feet
in height, with noble shoulders, and a chest like a coffer-dam. I have
seldom seen such brawn in a man. His face was deeply brown and burnt,
making his white teeth dazzling by the contrast; while in the deep
shadows of his eyes floated some reminiscences that did not seem to give
him much joy. His voice at once announced that he was a Southerner,
and from his fine stature, I thought he must be one of those tall
mountaineers from the Alleghanian Ridge in Virginia. When the revelry
of his companions had mounted to its height, this man slipped away
unobserved, and I saw no more of him till he became my comrade on the
sea. In a few minutes, however, he was missed by his shipmates, and
being, it seems, for some reason a huge favourite with them, they raised
a cry of "Bulkington! Bulkington! where's Bulkington?" and darted out of
the house in pursuit of him.
It was now about nine o'clock, and the room seeming almost
supernaturally quiet after these orgies, I began to congratulate myself
upon a little plan that had occurred to me just previous to the entrance
of the seamen.
No man prefers to sleep two in a bed. In fact, you would a good deal
rather not sleep with your own brother. I don't know how it is, but
people like to be private when they are sleeping. And when it comes to
sleeping with an unknown stranger, in a strange inn, in a strange
town, and that stranger a harpooneer, then your objections indefinitely
multiply. Nor was there any earthly reason why I as a sailor should
sleep two in a bed, more than anybody else; for sailors no more sleep
two in a bed at sea, than bachelor Kings do ashore. To be sure they
all sleep together in one apartment, but you have your own hammock, and
cover yourself with your own blanket, and sleep in your own skin.
The more I pondered over this harpooneer, the more I abominated the
thought of sleeping with him. It was fair to presume that being a
harpooneer, his linen or woollen, as the case might be, would not be of
the tidiest, certainly none of the finest. I began to twitch all over.
Besides, it was getting late, and my decent harpooneer ought to be
home and going bedwards. Suppose now, he should tumble in upon me at
midnight--how could I tell from what vile hole he had been coming?
"Landlord! I've changed my mind about that harpooneer.--I shan't sleep
with him. I'll try the bench here."
"Just as you please; I'm sorry I cant spare ye a tablecloth for a
mattress, and it's a plaguy rough board here"--feeling of the knots and
notches. "But wait a bit, Skrimshander; I've got a carpenter's plane
there in the bar--wait, I say, and I'll make ye snug enough." So saying
he procured the plane; and with his old silk handkerchief first dusting
the bench, vigorously set to planing away at my bed, the while grinning
like an ape. The shavings flew right and left; till at last the
plane-iron came bump against an indestructible knot. The landlord was
near spraining his wrist, and I told him for heaven's sake to quit--the
bed was soft enough to suit me, and I did not know how all the planing
in the world could make eider down of a pine plank. So gathering up the
shavings with another grin, and throwing them into the great stove in
the middle of the room, he went about his business, and left me in a
brown study.
I now took the measure of the bench, and found that it was a foot too
short; but that could be mended with a chair. But it was a foot too
narrow, and the other bench in the room was about four inches higher
than the planed one--so there was no yoking them. I then placed the
first bench lengthwise along the only clear space against the wall,
leaving a little interval between, for my back to settle down in. But I
soon found that there came such a draught of cold air over me from under
the sill of the window, that this plan would never do at all, especially
as another current from the rickety door met the one from the window,
and both together formed a series of small whirlwinds in the immediate
vicinity of the spot where I had thought to spend the night.
The devil fetch that harpooneer, thought I, but stop, couldn't I steal
a march on him--bolt his door inside, and jump into his bed, not to be
wakened by the most violent knockings? It seemed no bad idea; but upon
second thoughts I dismissed it. For who could tell but what the next
morning, so soon as I popped out of the room, the harpooneer might be
standing in the entry, all ready to knock me down!
Still, looking round me again, and seeing no possible chance of spending
a sufferable night unless in some other person's bed, I began to think
that after all I might be cherishing unwarrantable prejudices against
this unknown harpooneer. Thinks I, I'll wait awhile; he must be dropping
in before long. I'll have a good look at him then, and perhaps we may
become jolly good bedfellows after all--there's no telling.
But though the other boarders kept coming in by ones, twos, and threes,
and going to bed, yet no sign of my harpooneer.
"Landlord!" said I, "what sort of a chap is he--does he always keep such
late hours?" It was now hard upon twelve o'clock.
The landlord chuckled again with his lean chuckle, and seemed to
be mightily tickled at something beyond my comprehension. "No," he
answered, "generally he's an early bird--airley to bed and airley to
rise--yes, he's the bird what catches the worm. But to-night he went out
a peddling, you see, and I don't see what on airth keeps him so late,
unless, may be, he can't sell his head."
"Can't sell his head?--What sort of a bamboozingly story is this you
are telling me?" getting into a towering rage. "Do you pretend to say,
landlord, that this harpooneer is actually engaged this blessed Saturday
night, or rather Sunday morning, in peddling his head around this town?"
"That's precisely it," said the landlord, "and I told him he couldn't
sell it here, the market's overstocked."
"With what?" shouted I.
"With heads to be sure; ain't there too many heads in the world?"
"I tell you what it is, landlord," said I quite calmly, "you'd better
stop spinning that yarn to me--I'm not green."
"May be not," taking out a stick and whittling a toothpick, "but I
rayther guess you'll be done BROWN if that ere harpooneer hears you a
slanderin' his head."
"I'll break it for him," said I, now flying into a passion again at this
unaccountable farrago of the landlord's.
"It's broke a'ready," said he.
"Broke," said I--"BROKE, do you mean?"
"Sartain, and that's the very reason he can't sell it, I guess."
"Landlord," said I, going up to him as cool as Mt. Hecla in a
snow-storm--"landlord, stop whittling. You and I must understand one
another, and that too without delay. I come to your house and want a
bed; you tell me you can only give me half a one; that the other half
belongs to a certain harpooneer. And about this harpooneer, whom I
have not yet seen, you persist in telling me the most mystifying and
exasperating stories tending to beget in me an uncomfortable feeling
towards the man whom you design for my bedfellow--a sort of connexion,
landlord, which is an intimate and confidential one in the highest
degree. I now demand of you to speak out and tell me who and what this
harpooneer is, and whether I shall be in all respects safe to spend the
night with him. And in the first place, you will be so good as to unsay
that story about selling his head, which if true I take to be good
evidence that this harpooneer is stark mad, and I've no idea of sleeping
with a madman; and you, sir, YOU I mean, landlord, YOU, sir, by trying
to induce me to do so knowingly, would thereby render yourself liable to
a criminal prosecution."
"Wall," said the landlord, fetching a long breath, "that's a purty long
sarmon for a chap that rips a little now and then. But be easy, be easy,
this here harpooneer I have been tellin' you of has just arrived from
the south seas, where he bought up a lot of 'balmed New Zealand heads
(great curios, you know), and he's sold all on 'em but one, and that one
he's trying to sell to-night, cause to-morrow's Sunday, and it would not
do to be sellin' human heads about the streets when folks is goin' to
churches. He wanted to, last Sunday, but I stopped him just as he was
goin' out of the door with four heads strung on a string, for all the
airth like a string of inions."
This account cleared up the otherwise unaccountable mystery, and showed
that the landlord, after all, had had no idea of fooling me--but at
the same time what could I think of a harpooneer who stayed out of a
Saturday night clean into the holy Sabbath, engaged in such a cannibal
business as selling the heads of dead idolators?
"Depend upon it, landlord, that harpooneer is a dangerous man."
"He pays reg'lar," was the rejoinder. "But come, it's getting dreadful
late, you had better be turning flukes--it's a nice bed; Sal and me
slept in that ere bed the night we were spliced. There's plenty of room
for two to kick about in that bed; it's an almighty big bed that. Why,
afore we give it up, Sal used to put our Sam and little Johnny in the
foot of it. But I got a dreaming and sprawling about one night, and
somehow, Sam got pitched on the floor, and came near breaking his arm.
Arter that, Sal said it wouldn't do. Come along here, I'll give ye a
glim in a jiffy;" and so saying he lighted a candle and held it towards
me, offering to lead the way. But I stood irresolute; when looking at a
clock in the corner, he exclaimed "I vum it's Sunday--you won't see that
harpooneer to-night; he's come to anchor somewhere--come along then; DO
come; WON'T ye come?"
I considered the matter a moment, and then up stairs we went, and I was
ushered into a small room, cold as a clam, and furnished, sure enough,
with a prodigious bed, almost big enough indeed for any four harpooneers
to sleep abreast.
"There," said the landlord, placing the candle on a crazy old sea chest
that did double duty as a wash-stand and centre table; "there, make
yourself comfortable now, and good night to ye." I turned round from
eyeing the bed, but he had disappeared.
Folding back the counterpane, I stooped over the bed. Though none of the
most elegant, it yet stood the scrutiny tolerably well. I then glanced
round the room; and besides the bedstead and centre table, could see
no other furniture belonging to the place, but a rude shelf, the four
walls, and a papered fireboard representing a man striking a whale. Of
things not properly belonging to the room, there was a hammock lashed
up, and thrown upon the floor in one corner; also a large seaman's bag,
containing the harpooneer's wardrobe, no doubt in lieu of a land trunk.
Likewise, there was a parcel of outlandish bone fish hooks on the shelf
over the fire-place, and a tall harpoon standing at the head of the bed.
But what is this on the chest? I took it up, and held it close to the
light, and felt it, and smelt it, and tried every way possible to arrive
at some satisfactory conclusion concerning it. I can compare it to
nothing but a large door mat, ornamented at the edges with little
tinkling tags something like the stained porcupine quills round an
Indian moccasin. There was a hole or slit in the middle of this mat,
as you see the same in South American ponchos. But could it be possible
that any sober harpooneer would get into a door mat, and parade the
streets of any Christian town in that sort of guise? I put it on, to try
it, and it weighed me down like a hamper, being uncommonly shaggy and
thick, and I thought a little damp, as though this mysterious harpooneer
had been wearing it of a rainy day. I went up in it to a bit of glass
stuck against the wall, and I never saw such a sight in my life. I tore
myself out of it in such a hurry that I gave myself a kink in the neck.
I sat down on the side of the bed, and commenced thinking about this
head-peddling harpooneer, and his door mat. After thinking some time on
the bed-side, I got up and took off my monkey jacket, and then stood in
the middle of the room thinking. I then took off my coat, and thought
a little more in my shirt sleeves. But beginning to feel very cold now,
half undressed as I was, and remembering what the landlord said about
the harpooneer's not coming home at all that night, it being so very
late, I made no more ado, but jumped out of my pantaloons and boots, and
then blowing out the light tumbled into bed, and commended myself to the
care of heaven.
Whether that mattress was stuffed with corn-cobs or broken crockery,
there is no telling, but I rolled about a good deal, and could not sleep
for a long time. At last I slid off into a light doze, and had pretty
nearly made a good offing towards the land of Nod, when I heard a heavy
footfall in the passage, and saw a glimmer of light come into the room
from under the door.
Lord save me, thinks I, that must be the harpooneer, the infernal
head-peddler. But I lay perfectly still, and resolved not to say a word
till spoken to. Holding a light in one hand, and that identical New
Zealand head in the other, the stranger entered the room, and without
looking towards the bed, placed his candle a good way off from me on the
floor in one corner, and then began working away at the knotted cords
of the large bag I before spoke of as being in the room. I was all
eagerness to see his face, but he kept it averted for some time while
employed in unlacing the bag's mouth. This accomplished, however, he
turned round--when, good heavens! what a sight! Such a face! It was of
a dark, purplish, yellow colour, here and there stuck over with large
blackish looking squares. Yes, it's just as I thought, he's a terrible
bedfellow; he's been in a fight, got dreadfully cut, and here he is,
just from the surgeon. But at that moment he chanced to turn his face
so towards the light, that I plainly saw they could not be
sticking-plasters at all, those black squares on his cheeks. They were
stains of some sort or other. At first I knew not what to make of this;
but soon an inkling of the truth occurred to me. I remembered a story of
a white man--a whaleman too--who, falling among the cannibals, had been
tattooed by them. I concluded that this harpooneer, in the course of his
distant voyages, must have met with a similar adventure. And what is it,
thought I, after all! It's only his outside; a man can be honest in any
sort of skin. But then, what to make of his unearthly complexion, that
part of it, I mean, lying round about, and completely independent of the
squares of tattooing. To be sure, it might be nothing but a good coat of
tropical tanning; but I never heard of a hot sun's tanning a white man
into a purplish yellow one. However, I had never been in the South Seas;
and perhaps the sun there produced these extraordinary effects upon the
skin. Now, while all these ideas were passing through me like lightning,
this harpooneer never noticed me at all. But, after some difficulty
having opened his bag, he commenced fumbling in it, and presently pulled
out a sort of tomahawk, and a seal-skin wallet with the hair on. Placing
these on the old chest in the middle of the room, he then took the New
Zealand head--a ghastly thing enough--and crammed it down into the bag.
He now took off his hat--a new beaver hat--when I came nigh singing out
with fresh surprise. There was no hair on his head--none to speak of at
least--nothing but a small scalp-knot twisted up on his forehead. His
bald purplish head now looked for all the world like a mildewed skull.
Had not the stranger stood between me and the door, I would have bolted
out of it quicker than ever I bolted a dinner.
Even as it was, I thought something of slipping out of the window, but
it was the second floor back. I am no coward, but what to make of
this head-peddling purple rascal altogether passed my comprehension.
Ignorance is the parent of fear, and being completely nonplussed and
confounded about the stranger, I confess I was now as much afraid of him
as if it was the devil himself who had thus broken into my room at
the dead of night. In fact, I was so afraid of him that I was not
game enough just then to address him, and demand a satisfactory answer
concerning what seemed inexplicable in him.
Meanwhile, he continued the business of undressing, and at last showed
his chest and arms. As I live, these covered parts of him were checkered
with the same squares as his face; his back, too, was all over the same
dark squares; he seemed to have been in a Thirty Years' War, and just
escaped from it with a sticking-plaster shirt. Still more, his very
legs were marked, as if a parcel of dark green frogs were running up
the trunks of young palms. It was now quite plain that he must be some
abominable savage or other shipped aboard of a whaleman in the South
Seas, and so landed in this Christian country. I quaked to think of it.
A peddler of heads too--perhaps the heads of his own brothers. He might
take a fancy to mine--heavens! look at that tomahawk!
But there was no time for shuddering, for now the savage went about
something that completely fascinated my attention, and convinced me that
he must indeed be a heathen. Going to his heavy grego, or wrapall, or
dreadnaught, which he had previously hung on a chair, he fumbled in the
pockets, and produced at length a curious little deformed image with
a hunch on its back, and exactly the colour of a three days' old Congo
baby. Remembering the embalmed head, at first I almost thought that
this black manikin was a real baby preserved in some similar manner. But
seeing that it was not at all limber, and that it glistened a good deal
like polished ebony, I concluded that it must be nothing but a wooden
idol, which indeed it proved to be. For now the savage goes up to the
empty fire-place, and removing the papered fire-board, sets up this
little hunch-backed image, like a tenpin, between the andirons. The
chimney jambs and all the bricks inside were very sooty, so that I
thought this fire-place made a very appropriate little shrine or chapel
for his Congo idol.
I now screwed my eyes hard towards the half hidden image, feeling but
ill at ease meantime--to see what was next to follow. First he takes
about a double handful of shavings out of his grego pocket, and places
them carefully before the idol; then laying a bit of ship biscuit on
top and applying the flame from the lamp, he kindled the shavings into
a sacrificial blaze. Presently, after many hasty snatches into the fire,
and still hastier withdrawals of his fingers (whereby he seemed to be
scorching them badly), he at last succeeded in drawing out the biscuit;
then blowing off the heat and ashes a little, he made a polite offer of
it to the little negro. But the little devil did not seem to fancy such
dry sort of fare at all; he never moved his lips. All these strange
antics were accompanied by still stranger guttural noises from the
devotee, who seemed to be praying in a sing-song or else singing some
pagan psalmody or other, during which his face twitched about in the
most unnatural manner. At last extinguishing the fire, he took the idol
up very unceremoniously, and bagged it again in his grego pocket as
carelessly as if he were a sportsman bagging a dead woodcock.
All these queer proceedings increased my uncomfortableness, and
seeing him now exhibiting strong symptoms of concluding his business
operations, and jumping into bed with me, I thought it was high time,
now or never, before the light was put out, to break the spell in which
I had so long been bound.
But the interval I spent in deliberating what to say, was a fatal one.
Taking up his tomahawk from the table, he examined the head of it for an
instant, and then holding it to the light, with his mouth at the handle,
he puffed out great clouds of tobacco smoke. The next moment the light
was extinguished, and this wild cannibal, tomahawk between his teeth,
sprang into bed with me. I sang out, I could not help it now; and giving
a sudden grunt of astonishment he began feeling me.
Stammering out something, I knew not what, I rolled away from him
against the wall, and then conjured him, whoever or whatever he might
be, to keep quiet, and let me get up and light the lamp again. But his
guttural responses satisfied me at once that he but ill comprehended my
meaning.
"Who-e debel you?"--he at last said--"you no speak-e, dam-me, I kill-e."
And so saying the lighted tomahawk began flourishing about me in the
dark.
"Landlord, for God's sake, Peter Coffin!" shouted I. "Landlord! Watch!
Coffin! Angels! save me!"
"Speak-e! tell-ee me who-ee be, or dam-me, I kill-e!" again growled the
cannibal, while his horrid flourishings of the tomahawk scattered the
hot tobacco ashes about me till I thought my linen would get on fire.
But thank heaven, at that moment the landlord came into the room light
in hand, and leaping from the bed I ran up to him.
"Don't be afraid now," said he, grinning again, "Queequeg here wouldn't
harm a hair of your head."
"Stop your grinning," shouted I, "and why didn't you tell me that that
infernal harpooneer was a cannibal?"
"I thought ye know'd it;--didn't I tell ye, he was a peddlin' heads
around town?--but turn flukes again and go to sleep. Queequeg, look
here--you sabbee me, I sabbee--you this man sleepe you--you sabbee?"
"Me sabbee plenty"--grunted Queequeg, puffing away at his pipe and
sitting up in bed.
"You gettee in," he added, motioning to me with his tomahawk, and
throwing the clothes to one side. He really did this in not only a civil
but a really kind and charitable way. I stood looking at him a moment.
For all his tattooings he was on the whole a clean, comely looking
cannibal. What's all this fuss I have been making about, thought I to
myself--the man's a human being just as I am: he has just as much reason
to fear me, as I have to be afraid of him. Better sleep with a sober
cannibal than a drunken Christian.
"Landlord," said I, "tell him to stash his tomahawk there, or pipe, or
whatever you call it; tell him to stop smoking, in short, and I will
turn in with him. But I don't fancy having a man smoking in bed with me.
It's dangerous. Besides, I ain't insured."
This being told to Queequeg, he at once complied, and again politely
motioned me to get into bed--rolling over to one side as much as to
say--"I won't touch a leg of ye."
"Good night, landlord," said I, "you may go."
I turned in, and never slept better in my life.
CHAPTER 4. The Counterpane.
Upon waking next morning about daylight, I found Queequeg's arm thrown
over me in the most loving and affectionate manner. You had almost
thought I had been his wife. The counterpane was of patchwork, full of
odd little parti-coloured squares and triangles; and this arm of his
tattooed all over with an interminable Cretan labyrinth of a figure,
no two parts of which were of one precise shade--owing I suppose to
his keeping his arm at sea unmethodically in sun and shade, his shirt
sleeves irregularly rolled up at various times--this same arm of his, I
say, looked for all the world like a strip of that same patchwork quilt.
Indeed, partly lying on it as the arm did when I first awoke, I could
hardly tell it from the quilt, they so blended their hues together; and
it was only by the sense of weight and pressure that I could tell that
Queequeg was hugging me.
My sensations were strange. Let me try to explain them. When I was a
child, I well remember a somewhat similar circumstance that befell me;
whether it was a reality or a dream, I never could entirely settle.
The circumstance was this. I had been cutting up some caper or other--I
think it was trying to crawl up the chimney, as I had seen a little
sweep do a few days previous; and my stepmother who, somehow or other,
was all the time whipping me, or sending me to bed supperless,--my
mother dragged me by the legs out of the chimney and packed me off to
bed, though it was only two o'clock in the afternoon of the 21st June,
the longest day in the year in our hemisphere. I felt dreadfully. But
there was no help for it, so up stairs I went to my little room in the
third floor, undressed myself as slowly as possible so as to kill time,
and with a bitter sigh got between the sheets.
I lay there dismally calculating that sixteen entire hours must elapse
before I could hope for a resurrection. Sixteen hours in bed! the
small of my back ached to think of it. And it was so light too; the
sun shining in at the window, and a great rattling of coaches in the
streets, and the sound of gay voices all over the house. I felt worse
and worse--at last I got up, dressed, and softly going down in my
stockinged feet, sought out my stepmother, and suddenly threw myself
at her feet, beseeching her as a particular favour to give me a good
slippering for my misbehaviour; anything indeed but condemning me to lie
abed such an unendurable length of time. But she was the best and most
conscientious of stepmothers, and back I had to go to my room. For
several hours I lay there broad awake, feeling a great deal worse than I
have ever done since, even from the greatest subsequent misfortunes. At
last I must have fallen into a troubled nightmare of a doze; and slowly
waking from it--half steeped in dreams--I opened my eyes, and the before
sun-lit room was now wrapped in outer darkness. Instantly I felt a shock
running through all my frame; nothing was to be seen, and nothing was
to be heard; but a supernatural hand seemed placed in mine. My arm hung
over the counterpane, and the nameless, unimaginable, silent form
or phantom, to which the hand belonged, seemed closely seated by my
bed-side. For what seemed ages piled on ages, I lay there, frozen with
the most awful fears, not daring to drag away my hand; yet ever thinking
that if I could but stir it one single inch, the horrid spell would be
broken. I knew not how this consciousness at last glided away from me;
but waking in the morning, I shudderingly remembered it all, and for
days and weeks and months afterwards I lost myself in confounding
attempts to explain the mystery. Nay, to this very hour, I often puzzle
myself with it.
Now, take away the awful fear, and my sensations at feeling the
supernatural hand in mine were very similar, in their strangeness, to
those which I experienced on waking up and seeing Queequeg's pagan
arm thrown round me. But at length all the past night's events soberly
recurred, one by one, in fixed reality, and then I lay only alive to
the comical predicament. For though I tried to move his arm--unlock his
bridegroom clasp--yet, sleeping as he was, he still hugged me tightly,
as though naught but death should part us twain. I now strove to rouse
him--"Queequeg!"--but his only answer was a snore. I then rolled over,
my neck feeling as if it were in a horse-collar; and suddenly felt a
slight scratch. Throwing aside the counterpane, there lay the tomahawk
sleeping by the savage's side, as if it were a hatchet-faced baby. A
pretty pickle, truly, thought I; abed here in a strange house in the
broad day, with a cannibal and a tomahawk! "Queequeg!--in the name of
goodness, Queequeg, wake!" At length, by dint of much wriggling, and
loud and incessant expostulations upon the unbecomingness of his
hugging a fellow male in that matrimonial sort of style, I succeeded in
extracting a grunt; and presently, he drew back his arm, shook himself
all over like a Newfoundland dog just from the water, and sat up in bed,
stiff as a pike-staff, looking at me, and rubbing his eyes as if he
did not altogether remember how I came to be there, though a dim
consciousness of knowing something about me seemed slowly dawning over
him. Meanwhile, I lay quietly eyeing him, having no serious misgivings
now, and bent upon narrowly observing so curious a creature. When, at
last, his mind seemed made up touching the character of his bedfellow,
and he became, as it were, reconciled to the fact; he jumped out upon
the floor, and by certain signs and sounds gave me to understand that,
if it pleased me, he would dress first and then leave me to dress
afterwards, leaving the whole apartment to myself. Thinks I, Queequeg,
under the circumstances, this is a very civilized overture; but, the
truth is, these savages have an innate sense of delicacy, say what
you will; it is marvellous how essentially polite they are. I pay this
particular compliment to Queequeg, because he treated me with so much
civility and consideration, while I was guilty of great rudeness;
staring at him from the bed, and watching all his toilette motions; for
the time my curiosity getting the better of my breeding. Nevertheless,
a man like Queequeg you don't see every day, he and his ways were well
worth unusual regarding.
He commenced dressing at top by donning his beaver hat, a very tall one,
by the by, and then--still minus his trowsers--he hunted up his boots.
What under the heavens he did it for, I cannot tell, but his next
movement was to crush himself--boots in hand, and hat on--under the bed;
when, from sundry violent gaspings and strainings, I inferred he was
hard at work booting himself; though by no law of propriety that I ever
heard of, is any man required to be private when putting on his
boots. But Queequeg, do you see, was a creature in the transition
stage--neither caterpillar nor butterfly. He was just enough civilized
to show off his outlandishness in the strangest possible manners. His
education was not yet completed. He was an undergraduate. If he had not
been a small degree civilized, he very probably would not have troubled
himself with boots at all; but then, if he had not been still a savage,
he never would have dreamt of getting under the bed to put them on. At
last, he emerged with his hat very much dented and crushed down over his
eyes, and began creaking and limping about the room, as if, not
being much accustomed to boots, his pair of damp, wrinkled cowhide
ones--probably not made to order either--rather pinched and tormented
him at the first go off of a bitter cold morning.
Seeing, now, that there were no curtains to the window, and that the
street being very narrow, the house opposite commanded a plain view
into the room, and observing more and more the indecorous figure that
Queequeg made, staving about with little else but his hat and boots on;
I begged him as well as I could, to accelerate his toilet somewhat,
and particularly to get into his pantaloons as soon as possible. He
complied, and then proceeded to wash himself. At that time in the
morning any Christian would have washed his face; but Queequeg, to
my amazement, contented himself with restricting his ablutions to his
chest, arms, and hands. He then donned his waistcoat, and taking up a
piece of hard soap on the wash-stand centre table, dipped it into water
and commenced lathering his face. I was watching to see where he kept
his razor, when lo and behold, he takes the harpoon from the bed corner,
slips out the long wooden stock, unsheathes the head, whets it a little
on his boot, and striding up to the bit of mirror against the wall,
begins a vigorous scraping, or rather harpooning of his cheeks. Thinks
I, Queequeg, this is using Rogers's best cutlery with a vengeance.
Afterwards I wondered the less at this operation when I came to know of
what fine steel the head of a harpoon is made, and how exceedingly sharp
the long straight edges are always kept.
The rest of his toilet was soon achieved, and he proudly marched out of
the room, wrapped up in his great pilot monkey jacket, and sporting his
harpoon like a marshal's baton.
CHAPTER 5. Breakfast.
I quickly followed suit, and descending into the bar-room accosted the
grinning landlord very pleasantly. I cherished no malice towards him,
though he had been skylarking with me not a little in the matter of my
bedfellow.
However, a good laugh is a mighty good thing, and rather too scarce a
good thing; the more's the pity. So, if any one man, in his own
proper person, afford stuff for a good joke to anybody, let him not be
backward, but let him cheerfully allow himself to spend and be spent in
that way. And the man that has anything bountifully laughable about him,
be sure there is more in that man than you perhaps think for.
The bar-room was now full of the boarders who had been dropping in the
night previous, and whom I had not as yet had a good look at. They were
nearly all whalemen; chief mates, and second mates, and third mates, and
sea carpenters, and sea coopers, and sea blacksmiths, and harpooneers,
and ship keepers; a brown and brawny company, with bosky beards; an
unshorn, shaggy set, all wearing monkey jackets for morning gowns.
You could pretty plainly tell how long each one had been ashore. This
young fellow's healthy cheek is like a sun-toasted pear in hue, and
would seem to smell almost as musky; he cannot have been three days
landed from his Indian voyage. That man next him looks a few shades
lighter; you might say a touch of satin wood is in him. In the
complexion of a third still lingers a tropic tawn, but slightly bleached
withal; HE doubtless has tarried whole weeks ashore. But who could show
a cheek like Queequeg? which, barred with various tints, seemed like the
Andes' western slope, to show forth in one array, contrasting climates,
zone by zone.
"Grub, ho!" now cried the landlord, flinging open a door, and in we went
to breakfast.
They say that men who have seen the world, thereby become quite at ease
in manner, quite self-possessed in company. Not always, though: Ledyard,
the great New England traveller, and Mungo Park, the Scotch one; of all
men, they possessed the least assurance in the parlor. But perhaps the
mere crossing of Siberia in a sledge drawn by dogs as Ledyard did, or
the taking a long solitary walk on an empty stomach, in the negro heart
of Africa, which was the sum of poor Mungo's performances--this kind of
travel, I say, may not be the very best mode of attaining a high social
polish. Still, for the most part, that sort of thing is to be had
anywhere.
These reflections just here are occasioned by the circumstance that
after we were all seated at the table, and I was preparing to hear some
good stories about whaling; to my no small surprise, nearly every
man maintained a profound silence. And not only that, but they looked
embarrassed. Yes, here were a set of sea-dogs, many of whom without the
slightest bashfulness had boarded great whales on the high seas--entire
strangers to them--and duelled them dead without winking; and yet, here
they sat at a social breakfast table--all of the same calling, all of
kindred tastes--looking round as sheepishly at each other as though they
had never been out of sight of some sheepfold among the Green Mountains.
A curious sight; these bashful bears, these timid warrior whalemen!
But as for Queequeg--why, Queequeg sat there among them--at the head of
the table, too, it so chanced; as cool as an icicle. To be sure I cannot
say much for his breeding. His greatest admirer could not have cordially
justified his bringing his harpoon into breakfast with him, and using it
there without ceremony; reaching over the table with it, to the imminent
jeopardy of many heads, and grappling the beefsteaks towards him. But
THAT was certainly very coolly done by him, and every one knows that in
most people's estimation, to do anything coolly is to do it genteelly.
We will not speak of all Queequeg's peculiarities here; how he eschewed
coffee and hot rolls, and applied his undivided attention to beefsteaks,
done rare. Enough, that when breakfast was over he withdrew like the
rest into the public room, lighted his tomahawk-pipe, and was sitting
there quietly digesting and smoking with his inseparable hat on, when I
sallied out for a stroll.
CHAPTER 6. The Street.
If I had been astonished at first catching a glimpse of so outlandish
an individual as Queequeg circulating among the polite society of a
civilized town, that astonishment soon departed upon taking my first
daylight stroll through the streets of New Bedford.
In thoroughfares nigh the docks, any considerable seaport will
frequently offer to view the queerest looking nondescripts from foreign
parts. Even in Broadway and Chestnut streets, Mediterranean mariners
will sometimes jostle the affrighted ladies. Regent Street is not
unknown to Lascars and Malays; and at Bombay, in the Apollo Green, live
Yankees have often scared the natives. But New Bedford beats all Water
Street and Wapping. In these last-mentioned haunts you see only sailors;
but in New Bedford, actual cannibals stand chatting at street corners;
savages outright; many of whom yet carry on their bones unholy flesh. It
makes a stranger stare.
But, besides the Feegeeans, Tongatobooarrs, Erromanggoans, Pannangians,
and Brighggians, and, besides the wild specimens of the whaling-craft
which unheeded reel about the streets, you will see other sights still
more curious, certainly more comical. There weekly arrive in this town
scores of green Vermonters and New Hampshire men, all athirst for gain
and glory in the fishery. They are mostly young, of stalwart frames;
fellows who have felled forests, and now seek to drop the axe and snatch
the whale-lance. Many are as green as the Green Mountains whence they
came. In some things you would think them but a few hours old. Look
there! that chap strutting round the corner. He wears a beaver hat and
swallow-tailed coat, girdled with a sailor-belt and sheath-knife. Here
comes another with a sou'-wester and a bombazine cloak.
No town-bred dandy will compare with a country-bred one--I mean a
downright bumpkin dandy--a fellow that, in the dog-days, will mow his
two acres in buckskin gloves for fear of tanning his hands. Now when a
country dandy like this takes it into his head to make a distinguished
reputation, and joins the great whale-fishery, you should see the
comical things he does upon reaching the seaport. In bespeaking his
sea-outfit, he orders bell-buttons to his waistcoats; straps to his
canvas trowsers. Ah, poor Hay-Seed! how bitterly will burst those straps
in the first howling gale, when thou art driven, straps, buttons, and
all, down the throat of the tempest.
But think not that this famous town has only harpooneers, cannibals, and
bumpkins to show her visitors. Not at all. Still New Bedford is a queer
place. Had it not been for us whalemen, that tract of land would this
day perhaps have been in as howling condition as the coast of Labrador.
As it is, parts of her back country are enough to frighten one, they
look so bony. The town itself is perhaps the dearest place to live
in, in all New England. It is a land of oil, true enough: but not like
Canaan; a land, also, of corn and wine. The streets do not run with
milk; nor in the spring-time do they pave them with fresh eggs. Yet, in
spite of this, nowhere in all America will you find more patrician-like
houses; parks and gardens more opulent, than in New Bedford. Whence came
they? how planted upon this once scraggy scoria of a country?
Go and gaze upon the iron emblematical harpoons round yonder lofty
mansion, and your question will be answered. Yes; all these brave houses
and flowery gardens came from the Atlantic, Pacific, and Indian oceans.
One and all, they were harpooned and dragged up hither from the bottom
of the sea. Can Herr Alexander perform a feat like that?
In New Bedford, fathers, they say, give whales for dowers to their
daughters, and portion off their nieces with a few porpoises a-piece.
You must go to New Bedford to see a brilliant wedding; for, they say,
they have reservoirs of oil in every house, and every night recklessly
burn their lengths in spermaceti candles.
In summer time, the town is sweet to see; full of fine maples--long
avenues of green and gold. And in August, high in air, the beautiful and
bountiful horse-chestnuts, candelabra-wise, proffer the passer-by their
tapering upright cones of congregated blossoms. So omnipotent is art;
which in many a district of New Bedford has superinduced bright terraces
of flowers upon the barren refuse rocks thrown aside at creation's final
day.
And the women of New Bedford, they bloom like their own red roses. But
roses only bloom in summer; whereas the fine carnation of their cheeks
is perennial as sunlight in the seventh heavens. Elsewhere match that
bloom of theirs, ye cannot, save in Salem, where they tell me the young
girls breathe such musk, their sailor sweethearts smell them miles off
shore, as though they were drawing nigh the odorous Moluccas instead of
the Puritanic sands.
CHAPTER 7. The Chapel.
In this same New Bedford there stands a Whaleman's Chapel, and few are
the moody fishermen, shortly bound for the Indian Ocean or Pacific, who
fail to make a Sunday visit to the spot. I am sure that I did not.
Returning from my first morning stroll, I again sallied out upon this
special errand. The sky had changed from clear, sunny cold, to driving
sleet and mist. Wrapping myself in my shaggy jacket of the cloth called
bearskin, I fought my way against the stubborn storm. Entering, I
found a small scattered congregation of sailors, and sailors' wives and
widows. A muffled silence reigned, only broken at times by the shrieks
of the storm. Each silent worshipper seemed purposely sitting apart from
the other, as if each silent grief were insular and incommunicable. The
chaplain had not yet arrived; and there these silent islands of men and
women sat steadfastly eyeing several marble tablets, with black borders,
masoned into the wall on either side the pulpit. Three of them ran
something like the following, but I do not pretend to quote:--
SACRED TO THE MEMORY OF JOHN TALBOT, Who, at the age of eighteen, was
lost overboard, Near the Isle of Desolation, off Patagonia, November
1st, 1836. THIS TABLET Is erected to his Memory BY HIS SISTER.
SACRED TO THE MEMORY OF ROBERT LONG, WILLIS ELLERY, NATHAN COLEMAN,
WALTER CANNY, SETH MACY, AND SAMUEL GLEIG, Forming one of the boats'
crews OF THE SHIP ELIZA Who were towed out of sight by a Whale, On the
Off-shore Ground in the PACIFIC, December 31st, 1839. THIS MARBLE Is
here placed by their surviving SHIPMATES.
SACRED TO THE MEMORY OF The late CAPTAIN EZEKIEL HARDY, Who in the bows
of his boat was killed by a Sperm Whale on the coast of Japan, AUGUST
3d, 1833. THIS TABLET Is erected to his Memory BY HIS WIDOW.
Shaking off the sleet from my ice-glazed hat and jacket, I seated myself
near the door, and turning sideways was surprised to see Queequeg near
me. Affected by the solemnity of the scene, there was a wondering gaze
of incredulous curiosity in his countenance. This savage was the only
person present who seemed to notice my entrance; because he was the only
one who could not read, and, therefore, was not reading those frigid
inscriptions on the wall. Whether any of the relatives of the seamen
whose names appeared there were now among the congregation, I knew not;
but so many are the unrecorded accidents in the fishery, and so plainly
did several women present wear the countenance if not the trappings
of some unceasing grief, that I feel sure that here before me were
assembled those, in whose unhealing hearts the sight of those bleak
tablets sympathetically caused the old wounds to bleed afresh.
Oh! ye whose dead lie buried beneath the green grass; who standing among
flowers can say--here, HERE lies my beloved; ye know not the desolation
that broods in bosoms like these. What bitter blanks in those
black-bordered marbles which cover no ashes! What despair in those
immovable inscriptions! What deadly voids and unbidden infidelities in
the lines that seem to gnaw upon all Faith, and refuse resurrections to
the beings who have placelessly perished without a grave. As well might
those tablets stand in the cave of Elephanta as here.
In what census of living creatures, the dead of mankind are included;
why it is that a universal proverb says of them, that they tell no
tales, though containing more secrets than the Goodwin Sands; how it is
that to his name who yesterday departed for the other world, we prefix
so significant and infidel a word, and yet do not thus entitle him, if
he but embarks for the remotest Indies of this living earth; why the
Life Insurance Companies pay death-forfeitures upon immortals; in what
eternal, unstirring paralysis, and deadly, hopeless trance, yet lies
antique Adam who died sixty round centuries ago; how it is that we
still refuse to be comforted for those who we nevertheless maintain are
dwelling in unspeakable bliss; why all the living so strive to hush all
the dead; wherefore but the rumor of a knocking in a tomb will terrify a
whole city. All these things are not without their meanings.
But Faith, like a jackal, feeds among the tombs, and even from these
dead doubts she gathers her most vital hope.
It needs scarcely to be told, with what feelings, on the eve of a
Nantucket voyage, I regarded those marble tablets, and by the murky
light of that darkened, doleful day read the fate of the whalemen
who had gone before me. Yes, Ishmael, the same fate may be thine. But
somehow I grew merry again. Delightful inducements to embark, fine
chance for promotion, it seems--aye, a stove boat will make me an
immortal by brevet. Yes, there is death in this business of whaling--a
speechlessly quick chaotic bundling of a man into Eternity. But what
then? Methinks we have hugely mistaken this matter of Life and Death.
Methinks that what they call my shadow here on earth is my true
substance. Methinks that in looking at things spiritual, we are too
much like oysters observing the sun through the water, and thinking that
thick water the thinnest of air. Methinks my body is but the lees of my
better being. In fact take my body who will, take it I say, it is not
me. And therefore three cheers for Nantucket; and come a stove boat and
stove body when they will, for stave my soul, Jove himself cannot.
CHAPTER 8. The Pulpit.
I had not been seated very long ere a man of a certain venerable
robustness entered; immediately as the storm-pelted door flew back upon
admitting him, a quick regardful eyeing of him by all the congregation,
sufficiently attested that this fine old man was the chaplain. Yes, it
was the famous Father Mapple, so called by the whalemen, among whom he
was a very great favourite. He had been a sailor and a harpooneer in his
youth, but for many years past had dedicated his life to the ministry.
At the time I now write of, Father Mapple was in the hardy winter of a
healthy old age; that sort of old age which seems merging into a second
flowering youth, for among all the fissures of his wrinkles, there shone
certain mild gleams of a newly developing bloom--the spring verdure
peeping forth even beneath February's snow. No one having previously
heard his history, could for the first time behold Father Mapple without
the utmost interest, because there were certain engrafted clerical
peculiarities about him, imputable to that adventurous maritime life
he had led. When he entered I observed that he carried no umbrella, and
certainly had not come in his carriage, for his tarpaulin hat ran down
with melting sleet, and his great pilot cloth jacket seemed almost to
drag him to the floor with the weight of the water it had absorbed.
However, hat and coat and overshoes were one by one removed, and hung up
in a little space in an adjacent corner; when, arrayed in a decent suit,
he quietly approached the pulpit.
Like most old fashioned pulpits, it was a very lofty one, and since a
regular stairs to such a height would, by its long angle with the floor,
seriously contract the already small area of the chapel, the architect,
it seemed, had acted upon the hint of Father Mapple, and finished the
pulpit without a stairs, substituting a perpendicular side ladder, like
those used in mounting a ship from a boat at sea. The wife of a whaling
captain had provided the chapel with a handsome pair of red worsted
man-ropes for this ladder, which, being itself nicely headed, and
stained with a mahogany colour, the whole contrivance, considering what
manner of chapel it was, seemed by no means in bad taste. Halting for
an instant at the foot of the ladder, and with both hands grasping the
ornamental knobs of the man-ropes, Father Mapple cast a look upwards,
and then with a truly sailor-like but still reverential dexterity, hand
over hand, mounted the steps as if ascending the main-top of his vessel.
The perpendicular parts of this side ladder, as is usually the case with
swinging ones, were of cloth-covered rope, only the rounds were of wood,
so that at every step there was a joint. At my first glimpse of the
pulpit, it had not escaped me that however convenient for a ship,
these joints in the present instance seemed unnecessary. For I was not
prepared to see Father Mapple after gaining the height, slowly turn
round, and stooping over the pulpit, deliberately drag up the ladder
step by step, till the whole was deposited within, leaving him
impregnable in his little Quebec.
I pondered some time without fully comprehending the reason for this.
Father Mapple enjoyed such a wide reputation for sincerity and sanctity,
that I could not suspect him of courting notoriety by any mere tricks
of the stage. No, thought I, there must be some sober reason for this
thing; furthermore, it must symbolize something unseen. Can it be,
then, that by that act of physical isolation, he signifies his spiritual
withdrawal for the time, from all outward worldly ties and connexions?
Yes, for replenished with the meat and wine of the word, to the faithful
man of God, this pulpit, I see, is a self-containing stronghold--a lofty
Ehrenbreitstein, with a perennial well of water within the walls.
But the side ladder was not the only strange feature of the place,
borrowed from the chaplain's former sea-farings. Between the marble
cenotaphs on either hand of the pulpit, the wall which formed its back
was adorned with a large painting representing a gallant ship beating
against a terrible storm off a lee coast of black rocks and snowy
breakers. But high above the flying scud and dark-rolling clouds, there
floated a little isle of sunlight, from which beamed forth an angel's
face; and this bright face shed a distinct spot of radiance upon the
ship's tossed deck, something like that silver plate now inserted into
the Victory's plank where Nelson fell. "Ah, noble ship," the angel
seemed to say, "beat on, beat on, thou noble ship, and bear a hardy
helm; for lo! the sun is breaking through; the clouds are rolling
off--serenest azure is at hand."
Nor was the pulpit itself without a trace of the same sea-taste that
had achieved the ladder and the picture. Its panelled front was in
the likeness of a ship's bluff bows, and the Holy Bible rested on a
projecting piece of scroll work, fashioned after a ship's fiddle-headed
beak.
What could be more full of meaning?--for the pulpit is ever this earth's
foremost part; all the rest comes in its rear; the pulpit leads the
world. From thence it is the storm of God's quick wrath is first
descried, and the bow must bear the earliest brunt. From thence it is
the God of breezes fair or foul is first invoked for favourable winds.
Yes, the world's a ship on its passage out, and not a voyage complete;
and the pulpit is its prow.
CHAPTER 9. The Sermon.
Father Mapple rose, and in a mild voice of unassuming authority ordered
the scattered people to condense. "Starboard gangway, there! side away
to larboard--larboard gangway to starboard! Midships! midships!"
There was a low rumbling of heavy sea-boots among the benches, and a
still slighter shuffling of women's shoes, and all was quiet again, and
every eye on the preacher.
He paused a little; then kneeling in the pulpit's bows, folded his large
brown hands across his chest, uplifted his closed eyes, and offered
a prayer so deeply devout that he seemed kneeling and praying at the
bottom of the sea.
This ended, in prolonged solemn tones, like the continual tolling of
a bell in a ship that is foundering at sea in a fog--in such tones he
commenced reading the following hymn; but changing his manner towards
the concluding stanzas, burst forth with a pealing exultation and joy--
"The ribs and terrors in the whale,
Arched over me a dismal gloom,
While all God's sun-lit waves rolled by,
And lift me deepening down to doom.
"I saw the opening maw of hell,
With endless pains and sorrows there;
Which none but they that feel can tell--
Oh, I was plunging to despair.
"In black distress, I called my God,
When I could scarce believe him mine,
He bowed his ear to my complaints--
No more the whale did me confine.
"With speed he flew to my relief,
As on a radiant dolphin borne;
Awful, yet bright, as lightning shone
The face of my Deliverer God.
"My song for ever shall record
That terrible, that joyful hour;
I give the glory to my God,
His all the mercy and the power."
Nearly all joined in singing this hymn, which swelled high above the
howling of the storm. A brief pause ensued; the preacher slowly turned
over the leaves of the Bible, and at last, folding his hand down upon
the proper page, said: "Beloved shipmates, clinch the last verse of the
first chapter of Jonah--'And God had prepared a great fish to swallow up
Jonah.'"
"Shipmates, this book, containing only four chapters--four yarns--is one
of the smallest strands in the mighty cable of the Scriptures. Yet what
depths of the soul does Jonah's deep sealine sound! what a pregnant
lesson to us is this prophet! What a noble thing is that canticle in the
fish's belly! How billow-like and boisterously grand! We feel the floods
surging over us; we sound with him to the kelpy bottom of the waters;
sea-weed and all the slime of the sea is about us! But WHAT is this
lesson that the book of Jonah teaches? Shipmates, it is a two-stranded
lesson; a lesson to us all as sinful men, and a lesson to me as a pilot
of the living God. As sinful men, it is a lesson to us all, because it
is a story of the sin, hard-heartedness, suddenly awakened fears, the
swift punishment, repentance, prayers, and finally the deliverance and
joy of Jonah. As with all sinners among men, the sin of this son of
Amittai was in his wilful disobedience of the command of God--never
mind now what that command was, or how conveyed--which he found a hard
command. But all the things that God would have us do are hard for us to
do--remember that--and hence, he oftener commands us than endeavors to
persuade. And if we obey God, we must disobey ourselves; and it is in
this disobeying ourselves, wherein the hardness of obeying God consists.
"With this sin of disobedience in him, Jonah still further flouts at
God, by seeking to flee from Him. He thinks that a ship made by men will
carry him into countries where God does not reign, but only the Captains
of this earth. He skulks about the wharves of Joppa, and seeks a ship
that's bound for Tarshish. There lurks, perhaps, a hitherto unheeded
meaning here. By all accounts Tarshish could have been no other city
than the modern Cadiz. That's the opinion of learned men. And where is
Cadiz, shipmates? Cadiz is in Spain; as far by water, from Joppa,
as Jonah could possibly have sailed in those ancient days, when the
Atlantic was an almost unknown sea. Because Joppa, the modern Jaffa,
shipmates, is on the most easterly coast of the Mediterranean, the
Syrian; and Tarshish or Cadiz more than two thousand miles to the
westward from that, just outside the Straits of Gibraltar. See ye
not then, shipmates, that Jonah sought to flee world-wide from God?
Miserable man! Oh! most contemptible and worthy of all scorn; with
slouched hat and guilty eye, skulking from his God; prowling among the
shipping like a vile burglar hastening to cross the seas. So disordered,
self-condemning is his look, that had there been policemen in those
days, Jonah, on the mere suspicion of something wrong, had been arrested
ere he touched a deck. How plainly he's a fugitive! no baggage, not a
hat-box, valise, or carpet-bag,--no friends accompany him to the wharf
with their adieux. At last, after much dodging search, he finds the
Tarshish ship receiving the last items of her cargo; and as he steps on
board to see its Captain in the cabin, all the sailors for the moment
desist from hoisting in the goods, to mark the stranger's evil eye.
Jonah sees this; but in vain he tries to look all ease and confidence;
in vain essays his wretched smile. Strong intuitions of the man assure
the mariners he can be no innocent. In their gamesome but still serious
way, one whispers to the other--"Jack, he's robbed a widow;" or, "Joe,
do you mark him; he's a bigamist;" or, "Harry lad, I guess he's the
adulterer that broke jail in old Gomorrah, or belike, one of the missing
murderers from Sodom." Another runs to read the bill that's stuck
against the spile upon the wharf to which the ship is moored, offering
five hundred gold coins for the apprehension of a parricide, and
containing a description of his person. He reads, and looks from Jonah
to the bill; while all his sympathetic shipmates now crowd round Jonah,
prepared to lay their hands upon him. Frighted Jonah trembles, and
summoning all his boldness to his face, only looks so much the more a
coward. He will not confess himself suspected; but that itself is strong
suspicion. So he makes the best of it; and when the sailors find him
not to be the man that is advertised, they let him pass, and he descends
into the cabin.
"'Who's there?' cries the Captain at his busy desk, hurriedly making
out his papers for the Customs--'Who's there?' Oh! how that harmless
question mangles Jonah! For the instant he almost turns to flee again.
But he rallies. 'I seek a passage in this ship to Tarshish; how soon
sail ye, sir?' Thus far the busy Captain had not looked up to Jonah,
though the man now stands before him; but no sooner does he hear that
hollow voice, than he darts a scrutinizing glance. 'We sail with the
next coming tide,' at last he slowly answered, still intently eyeing
him. 'No sooner, sir?'--'Soon enough for any honest man that goes a
passenger.' Ha! Jonah, that's another stab. But he swiftly calls away
the Captain from that scent. 'I'll sail with ye,'--he says,--'the
passage money how much is that?--I'll pay now.' For it is particularly
written, shipmates, as if it were a thing not to be overlooked in this
history, 'that he paid the fare thereof' ere the craft did sail. And
taken with the context, this is full of meaning.
"Now Jonah's Captain, shipmates, was one whose discernment detects crime
in any, but whose cupidity exposes it only in the penniless. In this
world, shipmates, sin that pays its way can travel freely, and without
a passport; whereas Virtue, if a pauper, is stopped at all frontiers.
So Jonah's Captain prepares to test the length of Jonah's purse, ere he
judge him openly. He charges him thrice the usual sum; and it's assented
to. Then the Captain knows that Jonah is a fugitive; but at the same
time resolves to help a flight that paves its rear with gold. Yet when
Jonah fairly takes out his purse, prudent suspicions still molest the
Captain. He rings every coin to find a counterfeit. Not a forger, any
way, he mutters; and Jonah is put down for his passage. 'Point out my
state-room, Sir,' says Jonah now, 'I'm travel-weary; I need sleep.'
'Thou lookest like it,' says the Captain, 'there's thy room.' Jonah
enters, and would lock the door, but the lock contains no key. Hearing
him foolishly fumbling there, the Captain laughs lowly to himself, and
mutters something about the doors of convicts' cells being never allowed
to be locked within. All dressed and dusty as he is, Jonah throws
himself into his berth, and finds the little state-room ceiling almost
resting on his forehead. The air is close, and Jonah gasps. Then, in
that contracted hole, sunk, too, beneath the ship's water-line, Jonah
feels the heralding presentiment of that stifling hour, when the whale
shall hold him in the smallest of his bowels' wards.
"Screwed at its axis against the side, a swinging lamp slightly
oscillates in Jonah's room; and the ship, heeling over towards the wharf
with the weight of the last bales received, the lamp, flame and all,
though in slight motion, still maintains a permanent obliquity with
reference to the room; though, in truth, infallibly straight itself, it
but made obvious the false, lying levels among which it hung. The lamp
alarms and frightens Jonah; as lying in his berth his tormented eyes
roll round the place, and this thus far successful fugitive finds no
refuge for his restless glance. But that contradiction in the lamp more
and more appals him. The floor, the ceiling, and the side, are all awry.
'Oh! so my conscience hangs in me!' he groans, 'straight upwards, so it
burns; but the chambers of my soul are all in crookedness!'
"Like one who after a night of drunken revelry hies to his bed, still
reeling, but with conscience yet pricking him, as the plungings of the
Roman race-horse but so much the more strike his steel tags into him; as
one who in that miserable plight still turns and turns in giddy anguish,
praying God for annihilation until the fit be passed; and at last amid
the whirl of woe he feels, a deep stupor steals over him, as over the
man who bleeds to death, for conscience is the wound, and there's naught
to staunch it; so, after sore wrestlings in his berth, Jonah's prodigy
of ponderous misery drags him drowning down to sleep.
"And now the time of tide has come; the ship casts off her cables; and
from the deserted wharf the uncheered ship for Tarshish, all careening,
glides to sea. That ship, my friends, was the first of recorded
smugglers! the contraband was Jonah. But the sea rebels; he will not
bear the wicked burden. A dreadful storm comes on, the ship is like to
break. But now when the boatswain calls all hands to lighten her;
when boxes, bales, and jars are clattering overboard; when the wind
is shrieking, and the men are yelling, and every plank thunders with
trampling feet right over Jonah's head; in all this raging tumult, Jonah
sleeps his hideous sleep. He sees no black sky and raging sea, feels not
the reeling timbers, and little hears he or heeds he the far rush of the
mighty whale, which even now with open mouth is cleaving the seas after
him. Aye, shipmates, Jonah was gone down into the sides of the ship--a
berth in the cabin as I have taken it, and was fast asleep. But the
frightened master comes to him, and shrieks in his dead ear, 'What
meanest thou, O, sleeper! arise!' Startled from his lethargy by that
direful cry, Jonah staggers to his feet, and stumbling to the deck,
grasps a shroud, to look out upon the sea. But at that moment he is
sprung upon by a panther billow leaping over the bulwarks. Wave after
wave thus leaps into the ship, and finding no speedy vent runs roaring
fore and aft, till the mariners come nigh to drowning while yet afloat.
And ever, as the white moon shows her affrighted face from the steep
gullies in the blackness overhead, aghast Jonah sees the rearing
bowsprit pointing high upward, but soon beat downward again towards the
tormented deep.
"Terrors upon terrors run shouting through his soul. In all his cringing
attitudes, the God-fugitive is now too plainly known. The sailors mark
him; more and more certain grow their suspicions of him, and at last,
fully to test the truth, by referring the whole matter to high Heaven,
they fall to casting lots, to see for whose cause this great tempest was
upon them. The lot is Jonah's; that discovered, then how furiously they
mob him with their questions. 'What is thine occupation? Whence comest
thou? Thy country? What people? But mark now, my shipmates, the behavior
of poor Jonah. The eager mariners but ask him who he is, and where
from; whereas, they not only receive an answer to those questions,
but likewise another answer to a question not put by them, but the
unsolicited answer is forced from Jonah by the hard hand of God that is
upon him.
"'I am a Hebrew,' he cries--and then--'I fear the Lord the God of Heaven
who hath made the sea and the dry land!' Fear him, O Jonah? Aye, well
mightest thou fear the Lord God THEN! Straightway, he now goes on to
make a full confession; whereupon the mariners became more and more
appalled, but still are pitiful. For when Jonah, not yet supplicating
God for mercy, since he but too well knew the darkness of his
deserts,--when wretched Jonah cries out to them to take him and cast him
forth into the sea, for he knew that for HIS sake this great tempest
was upon them; they mercifully turn from him, and seek by other means to
save the ship. But all in vain; the indignant gale howls louder;
then, with one hand raised invokingly to God, with the other they not
unreluctantly lay hold of Jonah.
"And now behold Jonah taken up as an anchor and dropped into the sea;
when instantly an oily calmness floats out from the east, and the sea
is still, as Jonah carries down the gale with him, leaving smooth
water behind. He goes down in the whirling heart of such a masterless
commotion that he scarce heeds the moment when he drops seething into
the yawning jaws awaiting him; and the whale shoots-to all his ivory
teeth, like so many white bolts, upon his prison. Then Jonah prayed unto
the Lord out of the fish's belly. But observe his prayer, and learn a
weighty lesson. For sinful as he is, Jonah does not weep and wail for
direct deliverance. He feels that his dreadful punishment is just. He
leaves all his deliverance to God, contenting himself with this, that
spite of all his pains and pangs, he will still look towards His holy
temple. And here, shipmates, is true and faithful repentance; not
clamorous for pardon, but grateful for punishment. And how pleasing to
God was this conduct in Jonah, is shown in the eventual deliverance of
him from the sea and the whale. Shipmates, I do not place Jonah before
you to be copied for his sin but I do place him before you as a model
for repentance. Sin not; but if you do, take heed to repent of it like
Jonah."
While he was speaking these words, the howling of the shrieking,
slanting storm without seemed to add new power to the preacher, who,
when describing Jonah's sea-storm, seemed tossed by a storm himself.
His deep chest heaved as with a ground-swell; his tossed arms seemed the
warring elements at work; and the thunders that rolled away from off his
swarthy brow, and the light leaping from his eye, made all his simple
hearers look on him with a quick fear that was strange to them.
There now came a lull in his look, as he silently turned over the leaves
of the Book once more; and, at last, standing motionless, with closed
eyes, for the moment, seemed communing with God and himself.
But again he leaned over towards the people, and bowing his head lowly,
with an aspect of the deepest yet manliest humility, he spake these
words:
"Shipmates, God has laid but one hand upon you; both his hands press
upon me. I have read ye by what murky light may be mine the lesson that
Jonah teaches to all sinners; and therefore to ye, and still more to me,
for I am a greater sinner than ye. And now how gladly would I come down
from this mast-head and sit on the hatches there where you sit, and
listen as you listen, while some one of you reads ME that other and more
awful lesson which Jonah teaches to ME, as a pilot of the living God.
How being an anointed pilot-prophet, or speaker of true things, and
bidden by the Lord to sound those unwelcome truths in the ears of a
wicked Nineveh, Jonah, appalled at the hostility he should raise, fled
from his mission, and sought to escape his duty and his God by taking
ship at Joppa. But God is everywhere; Tarshish he never reached. As we
have seen, God came upon him in the whale, and swallowed him down to
living gulfs of doom, and with swift slantings tore him along 'into the
midst of the seas,' where the eddying depths sucked him ten thousand
fathoms down, and 'the weeds were wrapped about his head,' and all the
watery world of woe bowled over him. Yet even then beyond the reach of
any plummet--'out of the belly of hell'--when the whale grounded upon
the ocean's utmost bones, even then, God heard the engulphed, repenting
prophet when he cried. Then God spake unto the fish; and from the
shuddering cold and blackness of the sea, the whale came breeching
up towards the warm and pleasant sun, and all the delights of air and
earth; and 'vomited out Jonah upon the dry land;' when the word of the
Lord came a second time; and Jonah, bruised and beaten--his ears, like
two sea-shells, still multitudinously murmuring of the ocean--Jonah
did the Almighty's bidding. And what was that, shipmates? To preach the
Truth to the face of Falsehood! That was it!
"This, shipmates, this is that other lesson; and woe to that pilot of
the living God who slights it. Woe to him whom this world charms from
Gospel duty! Woe to him who seeks to pour oil upon the waters when God
has brewed them into a gale! Woe to him who seeks to please rather than
to appal! Woe to him whose good name is more to him than goodness! Woe
to him who, in this world, courts not dishonour! Woe to him who would
not be true, even though to be false were salvation! Yea, woe to him
who, as the great Pilot Paul has it, while preaching to others is
himself a castaway!"
He dropped and fell away from himself for a moment; then lifting his
face to them again, showed a deep joy in his eyes, as he cried out with
a heavenly enthusiasm,--"But oh! shipmates! on the starboard hand of
every woe, there is a sure delight; and higher the top of that delight,
than the bottom of the woe is deep. Is not the main-truck higher than
the kelson is low? Delight is to him--a far, far upward, and inward
delight--who against the proud gods and commodores of this earth, ever
stands forth his own inexorable self. Delight is to him whose strong
arms yet support him, when the ship of this base treacherous world has
gone down beneath him. Delight is to him, who gives no quarter in the
truth, and kills, burns, and destroys all sin though he pluck it out
from under the robes of Senators and Judges. Delight,--top-gallant
delight is to him, who acknowledges no law or lord, but the Lord his
God, and is only a patriot to heaven. Delight is to him, whom all the
waves of the billows of the seas of the boisterous mob can never shake
from this sure Keel of the Ages. And eternal delight and deliciousness
will be his, who coming to lay him down, can say with his final
breath--O Father!--chiefly known to me by Thy rod--mortal or immortal,
here I die. I have striven to be Thine, more than to be this world's, or
mine own. Yet this is nothing: I leave eternity to Thee; for what is man
that he should live out the lifetime of his God?"
He said no more, but slowly waving a benediction, covered his face with
his hands, and so remained kneeling, till all the people had departed,
and he was left alone in the place.
CHAPTER 10. A Bosom Friend.
Returning to the Spouter-Inn from the Chapel, I found Queequeg there
quite alone; he having left the Chapel before the benediction some time.
He was sitting on a bench before the fire, with his feet on the stove
hearth, and in one hand was holding close up to his face that little
negro idol of his; peering hard into its face, and with a jack-knife
gently whittling away at its nose, meanwhile humming to himself in his
heathenish way.
But being now interrupted, he put up the image; and pretty soon, going
to the table, took up a large book there, and placing it on his lap
began counting the pages with deliberate regularity; at every fiftieth
page--as I fancied--stopping a moment, looking vacantly around him, and
giving utterance to a long-drawn gurgling whistle of astonishment. He
would then begin again at the next fifty; seeming to commence at number
one each time, as though he could not count more than fifty, and it was
only by such a large number of fifties being found together, that his
astonishment at the multitude of pages was excited.
With much interest I sat watching him. Savage though he was, and
hideously marred about the face--at least to my taste--his countenance
yet had a something in it which was by no means disagreeable. You cannot
hide the soul. Through all his unearthly tattooings, I thought I saw
the traces of a simple honest heart; and in his large, deep eyes,
fiery black and bold, there seemed tokens of a spirit that would dare a
thousand devils. And besides all this, there was a certain lofty bearing
about the Pagan, which even his uncouthness could not altogether maim.
He looked like a man who had never cringed and never had had a creditor.
Whether it was, too, that his head being shaved, his forehead was drawn
out in freer and brighter relief, and looked more expansive than it
otherwise would, this I will not venture to decide; but certain it was
his head was phrenologically an excellent one. It may seem ridiculous,
but it reminded me of General Washington's head, as seen in the popular
busts of him. It had the same long regularly graded retreating slope
from above the brows, which were likewise very projecting, like two
long promontories thickly wooded on top. Queequeg was George Washington
cannibalistically developed.
Whilst I was thus closely scanning him, half-pretending meanwhile to be
looking out at the storm from the casement, he never heeded my presence,
never troubled himself with so much as a single glance; but appeared
wholly occupied with counting the pages of the marvellous book.
Considering how sociably we had been sleeping together the night
previous, and especially considering the affectionate arm I had found
thrown over me upon waking in the morning, I thought this indifference
of his very strange. But savages are strange beings; at times you do not
know exactly how to take them. At first they are overawing; their calm
self-collectedness of simplicity seems a Socratic wisdom. I had noticed
also that Queequeg never consorted at all, or but very little, with the
other seamen in the inn. He made no advances whatever; appeared to have
no desire to enlarge the circle of his acquaintances. All this struck
me as mighty singular; yet, upon second thoughts, there was something
almost sublime in it. Here was a man some twenty thousand miles from
home, by the way of Cape Horn, that is--which was the only way he could
get there--thrown among people as strange to him as though he were in
the planet Jupiter; and yet he seemed entirely at his ease; preserving
the utmost serenity; content with his own companionship; always equal to
himself. Surely this was a touch of fine philosophy; though no doubt he
had never heard there was such a thing as that. But, perhaps, to be
true philosophers, we mortals should not be conscious of so living or
so striving. So soon as I hear that such or such a man gives himself
out for a philosopher, I conclude that, like the dyspeptic old woman, he
must have "broken his digester."
As I sat there in that now lonely room; the fire burning low, in that
mild stage when, after its first intensity has warmed the air, it then
only glows to be looked at; the evening shades and phantoms gathering
round the casements, and peering in upon us silent, solitary twain;
the storm booming without in solemn swells; I began to be sensible of
strange feelings. I felt a melting in me. No more my splintered heart
and maddened hand were turned against the wolfish world. This soothing
savage had redeemed it. There he sat, his very indifference speaking a
nature in which there lurked no civilized hypocrisies and bland deceits.
Wild he was; a very sight of sights to see; yet I began to feel myself
mysteriously drawn towards him. And those same things that would have
repelled most others, they were the very magnets that thus drew me. I'll
try a pagan friend, thought I, since Christian kindness has proved but
hollow courtesy. I drew my bench near him, and made some friendly signs
and hints, doing my best to talk with him meanwhile. At first he little
noticed these advances; but presently, upon my referring to his last
night's hospitalities, he made out to ask me whether we were again to be
bedfellows. I told him yes; whereat I thought he looked pleased, perhaps
a little complimented.
We then turned over the book together, and I endeavored to explain to
him the purpose of the printing, and the meaning of the few pictures
that were in it. Thus I soon engaged his interest; and from that we went
to jabbering the best we could about the various outer sights to be seen
in this famous town. Soon I proposed a social smoke; and, producing
his pouch and tomahawk, he quietly offered me a puff. And then we sat
exchanging puffs from that wild pipe of his, and keeping it regularly
passing between us.
If there yet lurked any ice of indifference towards me in the Pagan's
breast, this pleasant, genial smoke we had, soon thawed it out, and left
us cronies. He seemed to take to me quite as naturally and unbiddenly as
I to him; and when our smoke was over, he pressed his forehead against
mine, clasped me round the waist, and said that henceforth we were
married; meaning, in his country's phrase, that we were bosom friends;
he would gladly die for me, if need should be. In a countryman, this
sudden flame of friendship would have seemed far too premature, a thing
to be much distrusted; but in this simple savage those old rules would
not apply.
After supper, and another social chat and smoke, we went to our room
together. He made me a present of his embalmed head; took out his
enormous tobacco wallet, and groping under the tobacco, drew out
some thirty dollars in silver; then spreading them on the table, and
mechanically dividing them into two equal portions, pushed one of them
towards me, and said it was mine. I was going to remonstrate; but he
silenced me by pouring them into my trowsers' pockets. I let them stay.
He then went about his evening prayers, took out his idol, and removed
the paper fireboard. By certain signs and symptoms, I thought he seemed
anxious for me to join him; but well knowing what was to follow, I
deliberated a moment whether, in case he invited me, I would comply or
otherwise.
I was a good Christian; born and bred in the bosom of the infallible
Presbyterian Church. How then could I unite with this wild idolator in
worshipping his piece of wood? But what is worship? thought I. Do
you suppose now, Ishmael, that the magnanimous God of heaven and
earth--pagans and all included--can possibly be jealous of an
insignificant bit of black wood? Impossible! But what is worship?--to do
the will of God--THAT is worship. And what is the will of God?--to do to
my fellow man what I would have my fellow man to do to me--THAT is the
will of God. Now, Queequeg is my fellow man. And what do I wish that
this Queequeg would do to me? Why, unite with me in my particular
Presbyterian form of worship. Consequently, I must then unite with him
in his; ergo, I must turn idolator. So I kindled the shavings; helped
prop up the innocent little idol; offered him burnt biscuit with
Queequeg; salamed before him twice or thrice; kissed his nose; and that
done, we undressed and went to bed, at peace with our own consciences
and all the world. But we did not go to sleep without some little chat.
How it is I know not; but there is no place like a bed for confidential
disclosures between friends. Man and wife, they say, there open the very
bottom of their souls to each other; and some old couples often lie
and chat over old times till nearly morning. Thus, then, in our hearts'
honeymoon, lay I and Queequeg--a cosy, loving pair.
"""
|
BonexGu/Blik2D-SDK | refs/heads/master | Blik2D/addon/tensorflow-1.2.1_for_blik/tensorflow/contrib/distributions/python/ops/bijectors/chain_impl.py | 51 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Chain bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from tensorflow.python.framework import constant_op
from tensorflow.python.ops.distributions import bijector
__all__ = [
"Chain",
]
class Chain(bijector.Bijector):
"""Bijector which applies a sequence of bijectors.
Example Use:
```python
chain = Chain([Exp(), Softplus()], name="one_plus_exp")
```
Results in:
* Forward:
```python
exp = Exp()
softplus = Softplus()
Chain([exp, softplus]).forward(x)
= exp.forward(softplus.forward(x))
= tf.exp(tf.log(1. + tf.exp(x)))
= 1. + tf.exp(x)
```
* Inverse:
```python
exp = Exp()
softplus = Softplus()
Chain([exp, softplus]).inverse(y)
= softplus.inverse(exp.inverse(y))
= tf.log(tf.exp(tf.log(y)) - 1.)
= tf.log(y - 1.)
```
"""
def __init__(self, bijectors=None, validate_args=False, name=None):
"""Instantiates `Chain` bijector.
Args:
bijectors: Python `list` of bijector instances. An empty list makes this
bijector equivalent to the `Identity` bijector.
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str`, name given to ops managed by this object. Default:
E.g., `Chain([Exp(), Softplus()]).name == "chain_of_exp_of_softplus"`.
Raises:
ValueError: if bijectors have different dtypes.
"""
if bijectors is None:
bijectors = ()
self._bijectors = bijectors
dtype = list(set([b.dtype for b in bijectors]))
if len(dtype) > 2:
raise ValueError("incompatible dtypes: %s" % dtype)
elif len(dtype) == 2:
dtype = dtype[1] if dtype[0] is None else dtype[0]
event_ndims = bijectors[0].event_ndims
elif len(dtype) == 1:
dtype = dtype[0]
event_ndims = bijectors[0].event_ndims
else:
dtype = None
event_ndims = None
super(Chain, self).__init__(
graph_parents=list(itertools.chain.from_iterable(
b.graph_parents for b in bijectors)),
is_constant_jacobian=all(b.is_constant_jacobian for b in bijectors),
validate_args=validate_args,
dtype=dtype,
event_ndims=event_ndims,
name=name or ("identity" if not bijectors else
"_of_".join(["chain"] + [b.name for b in bijectors])))
@property
def bijectors(self):
return self._bijectors
def _shape_helper(self, func_name, input_shape, reverse):
new_shape = input_shape
for b in reversed(self.bijectors) if reverse else self.bijectors:
func = getattr(b, func_name, None)
if func is None:
raise ValueError("unable to call %s on bijector %s (%s)" %
(func_name, b.name, func))
new_shape = func(new_shape)
return new_shape
def _forward_event_shape(self, input_shape):
return self._shape_helper("forward_event_shape", input_shape,
reverse=True)
def _forward_event_shape_tensor(self, input_shape):
return self._shape_helper(
"forward_event_shape_tensor", input_shape, reverse=True)
def _inverse_event_shape(self, output_shape):
return self._shape_helper("inverse_event_shape", output_shape,
reverse=False)
def _inverse_event_shape_tensor(self, output_shape):
return self._shape_helper("inverse_event_shape_tensor", output_shape,
reverse=False)
def _inverse(self, y, **kwargs):
for b in self.bijectors:
y = b.inverse(y, **kwargs.get(b.name, {}))
return y
def _inverse_log_det_jacobian(self, y, **kwargs):
ildj = constant_op.constant(0., dtype=y.dtype,
name="inverse_log_det_jacobian")
for b in self.bijectors:
ildj += b.inverse_log_det_jacobian(y, **kwargs.get(b.name, {}))
y = b.inverse(y, **kwargs.get(b.name, {}))
return ildj
def _forward(self, x, **kwargs):
for b in reversed(self.bijectors):
x = b.forward(x, **kwargs.get(b.name, {}))
return x
def _forward_log_det_jacobian(self, x, **kwargs):
fldj = constant_op.constant(0., dtype=x.dtype,
name="forward_log_det_jacobian")
for b in reversed(self.bijectors):
fldj += b.forward_log_det_jacobian(x, **kwargs.get(b.name, {}))
x = b.forward(x, **kwargs.get(b.name, {}))
return fldj
|
vdmann/cse-360-image-hosting-website | refs/heads/master | lib/python2.7/site-packages/django/contrib/sessions/backends/db.py | 101 | import logging
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation
from django.db import IntegrityError, transaction, router
from django.utils import timezone
from django.utils.encoding import force_text
class SessionStore(SessionBase):
"""
Implements database session store.
"""
def __init__(self, session_key=None):
super(SessionStore, self).__init__(session_key)
def load(self):
try:
s = Session.objects.get(
session_key=self.session_key,
expire_date__gt=timezone.now()
)
return self.decode(s.session_data)
except (Session.DoesNotExist, SuspiciousOperation) as e:
if isinstance(e, SuspiciousOperation):
logger = logging.getLogger('django.security.%s' %
e.__class__.__name__)
logger.warning(force_text(e))
self.create()
return {}
def exists(self, session_key):
return Session.objects.filter(session_key=session_key).exists()
def create(self):
while True:
self._session_key = self._get_new_session_key()
try:
# Save immediately to ensure we have a unique entry in the
# database.
self.save(must_create=True)
except CreateError:
# Key wasn't unique. Try again.
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
"""
Saves the current session data to the database. If 'must_create' is
True, a database error will be raised if the saving operation doesn't
create a *new* entry (as opposed to possibly updating an existing
entry).
"""
obj = Session(
session_key=self._get_or_create_session_key(),
session_data=self.encode(self._get_session(no_load=must_create)),
expire_date=self.get_expiry_date()
)
using = router.db_for_write(Session, instance=obj)
try:
with transaction.atomic(using=using):
obj.save(force_insert=must_create, using=using)
except IntegrityError:
if must_create:
raise CreateError
raise
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
try:
Session.objects.get(session_key=session_key).delete()
except Session.DoesNotExist:
pass
@classmethod
def clear_expired(cls):
Session.objects.filter(expire_date__lt=timezone.now()).delete()
# At bottom to avoid circular import
from django.contrib.sessions.models import Session
|
benjamindeleener/odoo | refs/heads/master | addons/crm/__openerp__.py | 4 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'CRM',
'version': '1.0',
'category': 'Customer Relationship Management',
'sequence': 5,
'summary': 'Leads, Opportunities, Activities',
'description': """
The generic Odoo Customer Relationship Management
====================================================
This application enables a group of people to intelligently and efficiently manage leads, opportunities, meetings and activities.
It manages key tasks such as communication, identification, prioritization, assignment, resolution and notification.
Odoo ensures that all cases are successfully tracked by users, customers and vendors. It can automatically send reminders, trigger specific methods and many other actions based on your own enterprise rules.
The greatest thing about this system is that users don't need to do anything special. The CRM module has an email gateway for the synchronization interface between mails and Odoo. That way, users can just send emails to the request tracker.
Odoo will take care of thanking them for their message, automatically routing it to the appropriate staff and make sure all future correspondence gets to the right place.
Dashboard for CRM will include:
-------------------------------
* Planned Revenue by Stage and User (graph)
* Opportunities by Stage (graph)
""",
'website': 'https://www.odoo.com/page/crm',
'depends': [
'base_action_rule',
'base_setup',
'sales_team',
'mail',
'calendar',
'resource',
'fetchmail',
'utm',
'web_tip',
'web_planner',
],
'data': [
'data/crm_action_data.xml',
'crm_data.xml',
'data/crm_stage_data.xml',
'data/sales_config_settings_data.xml',
'crm_lead_data.xml',
'crm_tip_data.xml',
'security/crm_security.xml',
'security/ir.model.access.csv',
'wizard/crm_activity_log_views.xml',
'wizard/crm_lead_lost_view.xml',
'wizard/crm_lead_to_opportunity_view.xml',
'wizard/crm_merge_opportunities_view.xml',
'crm_view.xml',
'crm_stage_views.xml',
'crm_lead_view.xml',
'crm_lead_menu.xml',
'views/crm_action_views.xml',
'calendar_event_menu.xml',
'report/crm_activity_report_view.xml',
'report/crm_opportunity_report_view.xml',
'res_partner_view.xml',
'res_config_view.xml',
'base_partner_merge_view.xml',
'sales_team_view.xml',
'views/crm.xml',
'web_planner_data.xml',
'sales_team_dashboard.xml',
'crm_mail_template_data.xml'
],
'demo': [
'crm_demo.xml',
'crm_lead_demo.xml',
'data/crm_action_demo.xml',
'crm_action_rule_demo.xml',
],
'test': [
'test/crm_access_group_users.yml',
'test/crm_lead_message.yml',
'test/lead2opportunity2win.yml',
'test/lead2opportunity_assign_salesmen.yml',
'test/crm_lead_merge.yml',
'test/crm_lead_cancel.yml',
'test/crm_lead_onchange.yml',
'test/crm_lead_copy.yml',
'test/crm_lead_unlink.yml',
'test/crm_lead_find_stage.yml',
],
'css': ['static/src/css/crm.css'],
'installable': True,
'application': True,
'auto_install': False,
}
|
frankk00/realtor | refs/heads/master | simplejson/decoder.py | 2 | """Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
def _import_c_scanstring():
try:
from simplejson._speedups import scanstring
return scanstring
except ImportError:
return None
c_scanstring = _import_c_scanstring()
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, end)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
|
Outernet-Project/outernetrx-linux | refs/heads/pillar | tools/perf/python/twatch.py | 7370 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
|
dimroc/tensorflow-mnist-tutorial | refs/heads/master | lib/python3.6/site-packages/numpy/tests/test_warnings.py | 38 | """
Tests which scan for certain occurances in the code, they may not find
all of these occurances but should catch almost all.
"""
from __future__ import division, absolute_import, print_function
import sys
if sys.version_info >= (3, 4):
from pathlib import Path
import ast
import tokenize
import numpy
from numpy.testing import run_module_suite
from numpy.testing.decorators import slow
class ParseCall(ast.NodeVisitor):
def __init__(self):
self.ls = []
def visit_Attribute(self, node):
ast.NodeVisitor.generic_visit(self, node)
self.ls.append(node.attr)
def visit_Name(self, node):
self.ls.append(node.id)
class FindFuncs(ast.NodeVisitor):
def __init__(self, filename):
super().__init__()
self.__filename = filename
def visit_Call(self, node):
p = ParseCall()
p.visit(node.func)
ast.NodeVisitor.generic_visit(self, node)
if p.ls[-1] == 'simplefilter' or p.ls[-1] == 'filterwarnings':
if node.args[0].s == "ignore":
raise AssertionError(
"ignore filter should not be used; found in "
"{} on line {}".format(self.__filename, node.lineno))
if p.ls[-1] == 'warn' and (
len(p.ls) == 1 or p.ls[-2] == 'warnings'):
if "testing/tests/test_warnings.py" is self.__filename:
# This file
return
# See if stacklevel exists:
if len(node.args) == 3:
return
args = {kw.arg for kw in node.keywords}
if "stacklevel" in args:
return
raise AssertionError(
"warnings should have an appropriate stacklevel; found in "
"{} on line {}".format(self.__filename, node.lineno))
@slow
def test_warning_calls():
# combined "ignore" and stacklevel error
base = Path(numpy.__file__).parent
for path in base.rglob("*.py"):
if base / "testing" in path.parents:
continue
if path == base / "__init__.py":
continue
if path == base / "random" / "__init__.py":
continue
# use tokenize to auto-detect encoding on systems where no
# default encoding is defined (e.g. LANG='C')
with tokenize.open(str(path)) as file:
tree = ast.parse(file.read())
FindFuncs(path).visit(tree)
if __name__ == "__main__":
run_module_suite()
|
janhui/test_engine | refs/heads/master | dev/tests/gaussian_bump.py | 2 | #!/usr/bin/env python
import numpy
import argparse
import os
import math
from Scientific.IO import NetCDF
def main():
parser = argparse.ArgumentParser(
prog="gaussian_bump",
description="""Create a Gaussian bump in a netcdf file"""
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
help="Verbose output: mainly progress reports.",
default=False
)
parser.add_argument(
'-d',
'--domain',
help="Domain size. Defualt is 1000x1000m",
default=1000.0,
type=float
)
parser.add_argument(
'-b',
'--bumpheight',
help="Distance between seabed and top of bump. Default is 100m",
default=100,
type=float
)
parser.add_argument(
'-r',
'--resolution',
help="Resolution of output netcdf file. Default is 10m",
default=10.0,
type=float
)
parser.add_argument(
'--shift',
help="Shift the bump in the 'north-south' direction, wrapping along the top/bottom",
default = 0,
type=float
)
parser.add_argument(
'--spread',
help="Spread of Gaussian",
default = 100.0,
type=float
)
parser.add_argument(
'output_file',
metavar='output_file',
nargs=1,
help='The output netcdf file'
)
args = parser.parse_args()
verbose = args.verbose
output_file = args.output_file[0]
domain_size = args.domain
bump_height = args.bumpheight
resolution = args.resolution
shift = args.shift
spread = args.spread
nPoints = int(domain_size / resolution)
shift = int(shift/resolution)
if (verbose):
print nPoints, shift
# generate regular grid
X, Y = numpy.meshgrid(numpy.linspace(0.0, domain_size, nPoints), numpy.linspace(0.0, domain_size, nPoints))
Z = numpy.zeros((nPoints,nPoints))
#for each point calculate the Gaussian
centre = domain_size/2.0
for i in range(0,len(X)):
for j in range(0,len(X[0])):
r = ((X[i][j]-centre)**2/(2.0*spread**2) + (Y[i][j]-centre)**2/(2.0*spread**2))
Z[i][j] = bump_height * math.exp(-1.0*r)
if (not shift == 0.0):
Z = numpy.roll(Z, shift, 0)
f = NetCDF.NetCDFFile(output_file, 'w')
xDim = f.createDimension("X", nPoints)
yDim = f.createDimension("Y", nPoints)
x = f.createVariable("X","d",("X",))
y = f.createVariable("Y","d",("Y",))
zVar = f.createVariable("Z","d",("X","Y"))
x.assignValue(X[0,0:nPoints])
y.assignValue(Y[0:nPoints,0])
zVar.assignValue(Z)
f.close()
os.system('grdreformat '+output_file+' '+output_file)
os.system('rm -f 1_contour.* 50_contour.*')
os.system('gdal_contour -fl 1.0 NETCDF:"'+output_file+'":z 1_contour.shp')
os.system('gdal_contour -fl 50.0 NETCDF:"'+output_file+'":z 50_contour.shp')
if __name__ == "__main__":
main()
|
ritchyteam/odoo | refs/heads/master | addons/l10n_hn/__init__.py | 411 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009-2010 Salvatore J. Trimarchi <salvatore@trimarchi.co.cc>
# (http://salvatoreweb.co.cc)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ArcherSys/ArcherSys | refs/heads/master | Lib/site-packages/cms/tests/forms.py | 8 | # -*- coding: utf-8 -*-
from __future__ import with_statement
from django.contrib.sites.models import Site
from django.core.cache import cache
from cms.admin import forms
from cms.admin.forms import PageUserForm
from cms.api import create_page, create_page_user
from cms.forms.fields import PageSelectFormField, SuperLazyIterator
from cms.forms.utils import (get_site_choices, get_page_choices,
update_site_and_page_choices)
from cms.test_utils.testcases import CMSTestCase
from cms.utils.compat.dj import get_user_model
class Mock_PageSelectFormField(PageSelectFormField):
def __init__(self, required=False):
# That's to have a proper mock object, without having to resort
# to dirtier tricks. We want to test *just* compress here.
self.required = required
self.error_messages = {}
self.error_messages['invalid_page'] = 'Invalid_page'
class FormsTestCase(CMSTestCase):
def setUp(self):
cache.clear()
def test_get_site_choices(self):
result = get_site_choices()
self.assertEqual(result, [])
def test_get_page_choices(self):
result = get_page_choices()
self.assertEqual(result, [('', '----')])
def test_get_site_choices_without_moderator(self):
result = get_site_choices()
self.assertEqual(result, [])
def test_get_site_choices_without_moderator_with_superuser(self):
# boilerplate (creating a page)
User = get_user_model()
fields = dict(is_staff=True, is_active=True, is_superuser=True, email="super@super.com")
if User.USERNAME_FIELD != 'email':
fields[User.USERNAME_FIELD] = "super"
user_super = User(**fields)
user_super.set_password(getattr(user_super, User.USERNAME_FIELD))
user_super.save()
with self.login_user_context(user_super):
create_page("home", "nav_playground.html", "en", created_by=user_super)
# The proper test
result = get_site_choices()
self.assertEqual(result, [(1, 'example.com')])
def test_compress_function_raises_when_page_is_none(self):
raised = False
try:
fake_field = Mock_PageSelectFormField(required=True)
data_list = (0, None) #(site_id, page_id) dsite-id is not used
fake_field.compress(data_list)
self.fail('compress function didn\'t raise!')
except forms.ValidationError:
raised = True
self.assertTrue(raised)
def test_compress_function_returns_none_when_not_required(self):
fake_field = Mock_PageSelectFormField(required=False)
data_list = (0, None) #(site_id, page_id) dsite-id is not used
result = fake_field.compress(data_list)
self.assertEqual(result, None)
def test_compress_function_returns_none_when_no_data_list(self):
fake_field = Mock_PageSelectFormField(required=False)
data_list = None
result = fake_field.compress(data_list)
self.assertEqual(result, None)
def test_compress_function_gets_a_page_when_one_exists(self):
# boilerplate (creating a page)
User = get_user_model()
fields = dict(is_staff=True, is_active=True, is_superuser=True, email="super@super.com")
if User.USERNAME_FIELD != 'email':
fields[User.USERNAME_FIELD] = "super"
user_super = User(**fields)
user_super.set_password(getattr(user_super, User.USERNAME_FIELD))
user_super.save()
with self.login_user_context(user_super):
home_page = create_page("home", "nav_playground.html", "en", created_by=user_super)
# The actual test
fake_field = Mock_PageSelectFormField()
data_list = (0, home_page.pk) #(site_id, page_id) dsite-id is not used
result = fake_field.compress(data_list)
self.assertEqual(home_page, result)
def test_update_site_and_page_choices(self):
Site.objects.all().delete()
site = Site.objects.create(domain='http://www.django-cms.org', name='Django CMS', pk=1)
page1 = create_page('Page 1', 'nav_playground.html', 'en', site=site)
page2 = create_page('Page 2', 'nav_playground.html', 'de', site=site)
page3 = create_page('Page 3', 'nav_playground.html', 'en',
site=site, parent=page1)
# enforce the choices to be casted to a list
site_choices, page_choices = [list(bit) for bit in update_site_and_page_choices('en')]
self.assertEqual(page_choices, [
('', '----'),
(site.name, [
(page1.pk, 'Page 1'),
(page3.pk, ' Page 3'),
(page2.pk, 'Page 2'),
])
])
self.assertEqual(site_choices, [(site.pk, site.name)])
def test_superlazy_iterator_behaves_properly_for_sites(self):
normal_result = get_site_choices()
lazy_result = SuperLazyIterator(get_site_choices)
self.assertEqual(normal_result, list(lazy_result))
def test_superlazy_iterator_behaves_properly_for_pages(self):
normal_result = get_page_choices()
lazy_result = SuperLazyIterator(get_page_choices)
self.assertEqual(normal_result, list(lazy_result))
def test_page_user_form_initial(self):
if get_user_model().USERNAME_FIELD == 'email':
myuser = get_user_model().objects.create_superuser("myuser", "myuser@django-cms.org", "myuser@django-cms.org")
else:
myuser = get_user_model().objects.create_superuser("myuser", "myuser@django-cms.org", "myuser")
user = create_page_user(myuser, myuser, grant_all=True)
puf = PageUserForm(instance=user)
names = ['can_add_page', 'can_change_page', 'can_delete_page',
'can_add_pageuser', 'can_change_pageuser',
'can_delete_pageuser', 'can_add_pagepermission',
'can_change_pagepermission', 'can_delete_pagepermission']
for name in names:
self.assertTrue(puf.initial.get(name, False))
|
MFoster/breeze | refs/heads/master | django/contrib/messages/models.py | 634 | # Models module required so tests are discovered.
|
MphasisWyde/eWamSublimeAdaptor | refs/heads/master | POC/v0_4_POC_with_generic_cmd_and_swagger/plugin/environment.py | 1 | import sublime, uuid
from bravado.client import SwaggerClient
swaggerClient = None
def get_environments():
settings = sublime.load_settings("wam.sublime-settings")
if settings == None:
return []
env_list = settings.get("environments")
if env_list == None:
env_list = []
return env_list
def get_environment_by_index(index):
envs = get_environments()
for i, item in enumerate(envs):
if i == index:
return envs[item]
return None
def get_environment_by_name(name):
envs = get_environments()
if name in envs:
return envs[name]
else:
return None
def save_environment(name, url):
env_list = get_environments()
env_uuid = str( uuid.uuid3(uuid.NAMESPACE_URL, url+"/"+name) )
env_list[name] = { 'name': name, "url": url, "uuid": env_uuid }
settings = sublime.load_settings("wam.sublime-settings")
settings.set("environments", env_list)
sublime.save_settings("wam.sublime-settings")
def clear_environments():
settings = sublime.load_settings("wam.sublime-settings")
if settings == None:
return
env_list = {}
setClientAPIFromEnv(None)
settings.set("environments", env_list)
sublime.save_settings("wam.sublime-settings")
def select_environment(window, action):
envs = get_environments()
working_env = get_working_environment()
if working_env == None:
working_name = ''
else:
working_name = working_env['name']
selected_item = -1
items = []
for i, env in enumerate(envs):
if envs[env]['name'] == working_name:
selected_item = i
items.append([envs[env]['name'], envs[env]['url']])
# [["Wynsure 5.6", "http://localhost:8082"], ["Wynsure 5.5", "http://localhost:8083"]]
window.show_quick_panel(items, action, 0, selected_item, None)
def set_working_environment(index):
if index == -1:
return
wnd = sublime.active_window()
prj_data = wnd.project_data()
if prj_data == None:
prj_data = {}
if not 'wam' in prj_data:
prj_data['wam'] = {}
selected_env = get_environment_by_index(index)
prj_data['wam']['wam_working_environment'] = selected_env['name']
setClientAPIFromEnv(selected_env)
wnd.set_project_data(prj_data)
def remove_environment(index):
if index == -1:
return
selected_env = get_environment_by_index(index)
wnd = sublime.active_window()
prj_data = wnd.project_data()
if 'wam' in prj_data:
if 'wam_working_environment' in prj_data['wam']:
if prj_data['wam']['wam_working_environment'] == selected_env['name']:
prj_data['wam']['wam_working_environment'] = ''
setClientAPIFromEnv(None)
wnd.set_project_data(prj_data)
settings = sublime.load_settings("wam.sublime-settings")
envs = settings.get("environments")
del envs[selected_env['name']]
settings.set("environments", envs)
sublime.save_settings("wam.sublime-settings")
def reset_working_environment():
wnd = sublime.active_window()
prj_data = wnd.project_data()
if not 'wam' in prj_data:
prj_data['wam'] = {}
prj_data['wam']['wam_working_environment'] = ''
setClientAPIFromEnv(None)
def get_working_environment():
wnd = sublime.active_window()
prj_data = wnd.project_data()
if prj_data == None:
return None
if 'wam' in prj_data:
if 'wam_working_environment' in prj_data['wam']:
selected_name = prj_data['wam']['wam_working_environment']
if selected_name == None:
return None
working_env = get_environment_by_name(selected_name)
if working_env == None:
reset_working_environment()
return working_env
def setClientAPIFromEnv(env):
global swaggerClient
if env == None:
swaggerClient = None
else:
print("Initializing SwaggerClient with " + env['url'] + "/api/rest/documentation")
config = {
# === bravado config ===
# Determines what is returned by the service call.
'also_return_response': False,
# === bravado-core config ====
# validate incoming responses
'validate_responses': False,
# validate outgoing requests
'validate_requests': False,
# validate the swagger spec
'validate_swagger_spec': False,
# Use models (Python classes) instead of dicts for #/definitions/{models}
'use_models': True,
# # List of user-defined formats
# 'formats': [my_super_duper_format],
}
swaggerClient = SwaggerClient.from_url(env['url'] + "/api/rest/documentation", config=config)
def getSwaggerAPI():
global swaggerClient
if swaggerClient == None:
setClientAPIFromEnv(get_working_environment())
return swaggerClient
|
gon1213/SDC | refs/heads/master | behavioral_cloning/project_3/video.py | 29 | from moviepy.editor import ImageSequenceClip
import argparse
def main():
parser = argparse.ArgumentParser(description='Create driving video.')
parser.add_argument(
'image_folder',
type=str,
default='',
help='Path to image folder. The video will be created from these images.'
)
parser.add_argument(
'--fps',
type=int,
default=60,
help='FPS (Frames per second) setting for the video.')
args = parser.parse_args()
video_file = args.image_folder + '.mp4'
print("Creating video {}, FPS={}".format(video_file, args.fps))
clip = ImageSequenceClip(args.image_folder, fps=args.fps)
clip.write_videofile(video_file)
if __name__ == '__main__':
main()
|
drjova/invenio-deposit | refs/heads/master | invenio_deposit/config.py | 2 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Default configuration of deposit module."""
from invenio_records_rest.facets import terms_filter
from invenio_records_rest.utils import check_elasticsearch
from .utils import check_oauth2_scope_write, \
check_oauth2_scope_write_elasticsearch
DEPOSIT_SEARCH_API = '/api/deposits'
"""URL of search endpoint for deposits."""
DEPOSIT_RECORDS_API = '/api/deposits/{pid_value}'
"""URL of record endpoint for deposits."""
DEPOSIT_FILES_API = '/api/files'
"""URL of files endpoints for uploading."""
DEPOSIT_PID_MINTER = 'recid'
"""PID minter used for record submissions."""
DEPOSIT_JSONSCHEMAS_PREFIX = 'deposits/'
"""Prefix for all deposit JSON schemas."""
DEPOSIT_DEFAULT_JSONSCHEMA = 'deposits/deposit-v1.0.0.json'
"""Default JSON schema used for new deposits."""
DEPOSIT_DEFAULT_SCHEMAFORM = 'json/invenio_deposit/form.json'
"""Default Angular Schema Form."""
_PID = 'pid(depid,record_class="invenio_deposit.api:Deposit")'
DEPOSIT_REST_ENDPOINTS = {
'depid': {
'pid_type': 'depid',
'pid_minter': 'deposit',
'pid_fetcher': 'deposit',
'record_class': 'invenio_deposit.api:Deposit',
'files_serializers': {
'application/json': ('invenio_deposit.serializers'
':json_v1_files_response'),
},
'record_serializers': {
'application/json': ('invenio_records_rest.serializers'
':json_v1_response'),
},
'search_class': 'invenio_deposit.search:DepositSearch',
'search_serializers': {
'application/json': ('invenio_records_rest.serializers'
':json_v1_search'),
},
'list_route': '/deposits/',
'item_route': '/deposits/<{0}:pid_value>'.format(_PID),
'file_list_route': '/deposits/<{0}:pid_value>/files'.format(_PID),
'file_item_route':
'/deposits/<{0}:pid_value>/files/<path:key>'.format(_PID),
'default_media_type': 'application/json',
'links_factory_imp': 'invenio_deposit.links:deposit_links_factory',
'create_permission_factory_imp': check_oauth2_scope_write,
'read_permission_factory_imp': check_elasticsearch,
'update_permission_factory_imp':
check_oauth2_scope_write_elasticsearch,
'delete_permission_factory_imp':
check_oauth2_scope_write_elasticsearch,
'max_result_window': 10000,
},
}
"""Basic REST deposit configuration.
Most of the configurations have the same meaning of the record configuration
:data:`invenio_records_rest.config.RECORDS_REST_ENDPOINTS`.
Deposit introduce also configuration for files.
"""
DEPOSIT_REST_SORT_OPTIONS = {
'deposits': {
'bestmatch': {
'fields': ['-_score'],
'title': 'Best match',
'default_order': 'asc',
'order': 2,
},
'mostrecent': {
'fields': ['-_updated'],
'title': 'Most recent',
'default_order': 'asc',
'order': 1,
},
},
}
"""Basic deposit sort configuration.
See :data:`invenio_records_rest.config.RECORDS_REST_SORT_OPTIONS` for more
information.
"""
DEPOSIT_REST_DEFAULT_SORT = {
'deposits': {
'query': 'bestmatch',
'noquery': 'mostrecent',
}
}
"""Default deposit sort configuration.
See :data:`invenio_records_rest.config.RECORDS_REST_DEFAULT_SORT` for more
information.
"""
DEPOSIT_REST_FACETS = {
'deposits': {
'aggs': {
'status': {
'terms': {'field': '_deposit.status'},
},
},
'post_filters': {
'status': terms_filter('_deposit.status'),
},
},
}
"""Basic deposit facts configuration.
See :data:`invenio_records_rest.config.RECORDS_REST_FACETS` for more
information.
"""
DEPOSIT_RECORDS_UI_ENDPOINTS = {
'depid': {
'pid_type': 'depid',
'route': '/deposit/<pid_value>',
'template': 'invenio_deposit/edit.html',
'record_class': 'invenio_deposit.api:Deposit',
'view_imp': 'invenio_deposit.views.ui.default_view_method',
},
}
"""Basic deposit UI endpoints configuration.
The structure of the dictionary is as follows:
.. code-block:: python
DEPOSIT_RECORDS_UI_ENDPOINTS = {
'<pid-type>': {
'pid_type': '<pid-type>',
'route': '/unique/path/to/deposit/<pid_value>',
'template': 'invenio_deposit/edit.html',
'record_class': 'mypackage.api:MyDeposit',
'view_imp': 'mypackage.views.view_method',
'jsonschema' 'path/to/jsonschema/deposit.json',
'schemaform': 'path/to/schema/form.json',
}
}
"""
DEPOSIT_UI_ENDPOINT = '{scheme}://{host}/deposit/{pid_value}'
"""The UI endpoint for depositions with pid."""
DEPOSIT_UI_INDEX_TEMPLATE = 'invenio_deposit/index.html'
"""Template for the index page."""
DEPOSIT_UI_NEW_TEMPLATE = 'invenio_deposit/edit.html'
"""Template for a new deposit page."""
DEPOSIT_UI_TOMBSTONE_TEMPLATE = 'invenio_deposit/tombstone.html'
"""Template for a tombstone deposit page."""
DEPOSIT_UI_JSTEMPLATE_ACTIONS = \
'node_modules/invenio-records-js/dist/templates/actions.html'
"""Template for <invenio-records-actions> defined by `invenio-records-js`."""
DEPOSIT_UI_JSTEMPLATE_ERROR = \
'node_modules/invenio-records-js/dist/templates/error.html'
"""Template for <invenio-records-error> defined by `invenio-records-js`."""
DEPOSIT_UI_JSTEMPLATE_FORM = \
'node_modules/invenio-records-js/dist/templates/form.html'
"""Template for <invenio-records-form> defined by `invenio-records-js`."""
DEPOSIT_UI_SEARCH_INDEX = 'deposits'
"""Search index name for the deposit."""
DEPOSIT_DEFAULT_STORAGE_CLASS = 'S'
"""Default storage class."""
DEPOSIT_REGISTER_SIGNALS = True
"""Enable the signals registration."""
DEPOSIT_FORM_TEMPLATES_BASE = 'node_modules/invenio-records-js/dist/templates'
"""Angular Schema Form temmplates location."""
DEPOSIT_FORM_TEMPLATES = {
'default': 'default.html',
'fieldset': 'fieldset.html',
'array': 'array.html',
'radios_inline': 'radios_inline.html',
'radios': 'radios.html',
'select': 'select.html',
'button': 'button.html',
'textarea': 'textarea.html'
}
"""Templates for Angular Schema Form."""
DEPOSIT_RESPONSE_MESSAGES = {}
"""Alerts shown when actions are completed on deposit."""
|
stoewer/nixpy | refs/heads/master | nixio/entity_with_sources.py | 1 | # Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from nixio.util.proxy_list import RefProxyList
class RefSourceProxyList(RefProxyList):
def __init__(self, obj):
super(RefSourceProxyList, self).__init__(
obj, "_source_count", "_get_source_by_id", "_get_source_by_pos",
"_remove_source_by_id", "_add_source_by_id"
)
_sources_doc = """
Getter for sources.
"""
def _get_sources(self):
if not hasattr(self, "_sources"):
setattr(self, "_sources", RefSourceProxyList(self))
return self._sources
class EntityWithSourcesMixin(object):
sources = property(_get_sources, None, None, _sources_doc)
|
LiveZenLK/CeygateERP | refs/heads/master | addons/l10n_be/__openerp__.py | 5 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Belgium - Accounting',
'version': '1.1',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Belgium in Odoo.
==============================================================================
After installing this module, the Configuration wizard for accounting is launched.
* We have the account templates which can be helpful to generate Charts of Accounts.
* On that particular wizard, you will be asked to pass the name of the company,
the chart template to follow, the no. of digits to generate, the code for your
account and bank account, currency to create journals.
Thus, the pure copy of Chart Template is generated.
Wizards provided by this module:
--------------------------------
* Partner VAT Intra: Enlist the partners with their related VAT and invoiced
amounts. Prepares an XML file format.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Partner VAT Intra
* Periodical VAT Declaration: Prepares an XML file for Vat Declaration of
the Main company of the User currently Logged in.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Periodical VAT Declaration
* Annual Listing Of VAT-Subjected Customers: Prepares an XML file for Vat
Declaration of the Main company of the User currently Logged in Based on
Fiscal year.
**Path to access :** Invoicing/Reporting/Legal Reports/Belgium Statements/Annual Listing Of VAT-Subjected Customers
""",
'author': 'Noviat & Odoo SA',
'depends': [
'account',
'base_vat',
'base_iban',
'l10n_multilang',
],
'data': [
'account_chart_template.xml',
'account_pcmn_belgium.xml',
'account_tax_template.xml',
'l10n_be_sequence.xml',
'fiscal_templates.xml',
'account_fiscal_position_tax_template.xml',
'account_chart_template.yml',
'security/ir.model.access.csv',
'menuitem.xml'
],
'demo': [
'demo/l10n_be_demo.yml',
'../account/demo/account_bank_statement.yml',
'../account/demo/account_invoice_demo.yml',
],
'test': [
],
'installable': True,
'website': 'https://www.odoo.com/page/accounting',
'post_init_hook': 'load_translations',
}
|
Applemann/hypatia | refs/heads/master | hypatia/__init__.py | 1 | """This module contains all of the important meta-information for
Hypatia such as the author's name, the copyright and license, status,
and so on.
"""
__author__ = "Lillian Lemmer"
__copyright__ = "Copyright 2015 Lillian Lemmer"
__credits__ = ["Lillian Lemmer"]
__license__ = "MIT"
__maintainer__ = __author__
__site__ = "http://lillian-lemmer.github.io/hypatia/"
__email__ = "lillian.lynn.lemmer@gmail.com"
__status__ = "Development"
__contributors__ = [
"Lillian Lemmer",
"Brian Houston Morrow",
"Eric James Michael Ritz"
]
__version__ = '0.2.26'
|
jortel/suds | refs/heads/master | tests/builtin.py | 65 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
import sys
sys.path.append('../')
import unittest
from suds.sax.date import Timezone as Tz
from suds.xsd.sxbuiltin import *
from unittest import TestCase
from tests import *
setup_logging()
class Date(XDate):
def __init__(self):
pass
class Time(XTime):
def __init__(self):
pass
class DateTime(XDateTime):
def __init__(self):
pass
class DateTest(TestCase):
def testSimple(self):
ref = dt.date(1941, 12, 7)
s = '%.4d-%.2d-%.2d' % (ref.year, ref.month, ref.day)
xdate = Date()
d = xdate.translate(s)
self.assertEqual(d, ref)
def testNegativeTimezone(self):
self.equalsTimezone(-6)
def testPositiveTimezone(self):
self.equalsTimezone(6)
def testUtcTimezone(self):
Timezone.LOCAL = 0
ref = dt.date(1941, 12, 7)
s = '%.4d-%.2d-%.2dZ' % (ref.year, ref.month, ref.day)
xdate = Date()
d = xdate.translate(s)
self.assertEqual(d, ref)
def equalsTimezone(self, tz):
Timezone.LOCAL = tz
ref = dt.date(1941, 12, 7)
s = '%.4d-%.2d-%.2d%+.2d:00' % (ref.year, ref.month, ref.day, tz)
xdate = Date()
d = xdate.translate(s)
self.assertEqual(d, ref)
class TimeTest(TestCase):
def testSimple(self):
ref = dt.time(10, 30, 22)
s = '%.2d:%.2d:%.2d' % (ref.hour, ref.minute, ref.second)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(t, ref)
def testSimpleWithShortMicrosecond(self):
ref = dt.time(10, 30, 22, 34)
s = '%.2d:%.2d:%.2d.%4.d' % (ref.hour, ref.minute, ref.second, ref.microsecond)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(t, ref)
def testSimpleWithMicrosecond(self):
ref = dt.time(10, 30, 22, 999999)
s = '%.2d:%.2d:%.2d.%4.d' % (ref.hour, ref.minute, ref.second, ref.microsecond)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(t, ref)
def testSimpleWithLongMicrosecond(self):
ref = dt.time(10, 30, 22, 999999)
s = '%.2d:%.2d:%.2d.%4.d' % (ref.hour, ref.minute, ref.second, int('999999999'))
xtime = Time()
t = xtime.translate(s)
self.assertEqual(t, ref)
def testPositiveTimezone(self):
self.equalsTimezone(6)
def testNegativeTimezone(self):
self.equalsTimezone(-6)
def testUtcTimezone(self):
Timezone.LOCAL = 0
ref = dt.time(10, 30, 22)
s = '%.2d:%.2d:%.2dZ' % (ref.hour, ref.minute, ref.second)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(t, ref)
def equalsTimezone(self, tz):
Timezone.LOCAL = tz
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, tz)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(t, ref)
def testConvertNegativeToGreaterNegative(self):
Timezone.LOCAL = -6
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, -5)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour-1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToLesserNegative(self):
Timezone.LOCAL = -5
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, -6)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour+1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToGreaterPositive(self):
Timezone.LOCAL = 3
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, 2)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour+1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToLesserPositive(self):
Timezone.LOCAL = 2
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, 3)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour-1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToNegative(self):
Timezone.LOCAL = -6
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, 3)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour-9, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToPositive(self):
Timezone.LOCAL = 3
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, -6)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour+9, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToUtc(self):
Timezone.LOCAL = 0
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, -6)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour+6, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToUtc(self):
Timezone.LOCAL = 0
ref = dt.time(10, 30, 22)
s = self.strTime(ref.hour, ref.minute, ref.second, 3)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour-3, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertUtcToPositive(self):
Timezone.LOCAL = 3
ref = dt.time(10, 30, 22)
s = '%.2d:%.2d:%.2dZ' % (ref.hour, ref.minute, ref.second)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour+3, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertUtcToNegative(self):
Timezone.LOCAL = -6
ref = dt.time(10, 30, 22)
s = '%.2d:%.2d:%.2dZ' % (ref.hour, ref.minute, ref.second)
xtime = Time()
t = xtime.translate(s)
self.assertEqual(ref.hour-6, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def strTime(self, h, m, s, offset):
return '%.2d:%.2d:%.2d%+.2d:00' % (h, m, s, offset)
class DateTimeTest(TestCase):
def testSimple(self):
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = '%.4d-%.2d-%.2dT%.2d:%.2d:%.2d' \
% (ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(t, ref)
def testOverflow(self):
ref = dt.datetime(1, 1, 1, 0, 0, 0)
s = '%.4d-%.2d-%.2dT%.2d:%.2d:%.2dZ' \
% (ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(t, ref)
def testSimpleWithMicrosecond(self):
ref = dt.datetime(1941, 12, 7, 10, 30, 22, 454)
s = '%.4d-%.2d-%.2dT%.2d:%.2d:%.2d.%.4d' \
% (ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
ref.microsecond)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(t, ref)
def testPositiveTimezone(self):
self.equalsTimezone(6)
def testNegativeTimezone(self):
self.equalsTimezone(-6)
def testUtcTimezone(self):
Timezone.LOCAL = 0
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = '%.4d-%.2d-%.2dT%.2d:%.2d:%.2d' \
% (ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(t, ref)
def equalsTimezone(self, tz):
Timezone.LOCAL = tz
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
tz)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(t, ref)
def testConvertNegativeToGreaterNegative(self):
Timezone.LOCAL = -6
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
-5)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour-1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToLesserNegative(self):
Timezone.LOCAL = -5
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
-6)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour+1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToGreaterPositive(self):
Timezone.LOCAL = 3
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
2)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour+1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToLesserPositive(self):
Timezone.LOCAL = 2
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
3)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour-1, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToNegative(self):
Timezone.LOCAL = -6
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
3)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour-9, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToPositive(self):
Timezone.LOCAL = 3
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
-6)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour+9, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToUtc(self):
Timezone.LOCAL = 0
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
-6)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour+6, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertPositiveToUtc(self):
Timezone.LOCAL = 0
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
3)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour-3, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertUtcToPositive(self):
Timezone.LOCAL = 3
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = '%.4d-%.2d-%.2dT%.2d:%.2d:%.2dZ' \
% (ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour+3, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertUtcToNegative(self):
Timezone.LOCAL = -6
ref = dt.datetime(1941, 12, 7, 10, 30, 22)
s = '%.4d-%.2d-%.2dT%.2d:%.2d:%.2dZ' \
% (ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(ref.day, t.day)
self.assertEqual(ref.hour-6, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToGreaterNegativeAndPreviousDay(self):
Timezone.LOCAL = -6
ref = dt.datetime(1941, 12, 7, 0, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
-5)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(6, t.day)
self.assertEqual(23, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def testConvertNegativeToLesserNegativeAndNextDay(self):
Timezone.LOCAL = -5
ref = dt.datetime(1941, 12, 7, 23, 30, 22)
s = self.strDateTime(
ref.year,
ref.month,
ref.day,
ref.hour,
ref.minute,
ref.second,
-6)
xdt = DateTime()
t = xdt.translate(s)
self.assertEqual(ref.year, t.year)
self.assertEqual(ref.month, t.month)
self.assertEqual(8, t.day)
self.assertEqual(0, t.hour)
self.assertEqual(ref.minute, t.minute)
self.assertEqual(ref.second, t.second)
def strDateTime(self, Y, M, D, h, m, s, offset):
s = '%.4d-%.2d-%.2dT%.2d:%.2d:%.2d%+.2d:00' \
% (Y, M, D, h, m, s, offset)
return s
if __name__ == '__main__':
unittest.main()
|
LokiNetworks/empower-runtime | refs/heads/master | empower/core/vap.py | 1 | #!/usr/bin/env python3
#
# Copyright (c) 2016 Roberto Riggio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""EmPOWER Virtual Access Point (VAP) class."""
import empower.logger
LOG = empower.logger.get_logger()
class VAP(object):
""" The EmPOWER Virtual Access Point
Attributes:
net_bssid: The client's MAC Address as an EtherAddress instance.
bssid: The LVAP's MAC Address as an EtherAddress instance. This
address is dynamically generated by the Access Controller.
The BSSID is supposed to be unique in the entire network.
ssid: The currently associated SSID.
block: the resource blocks to which this LVAP is assigned.
"""
def __init__(self, net_bssid, block, wtp, tenant):
# read only params
self.net_bssid = net_bssid
self.ssid = tenant.tenant_name
self.block = block
self.wtp = wtp
self.tenant_id = tenant.tenant_id
def to_dict(self):
""" Return a JSON-serializable dictionary representing the LVAP """
return {'net_bssid': self.net_bssid,
'ssid': self.ssid,
'block': self.block,
'wtp': self.wtp,
'tenant_id': self.tenant_id}
def __str__(self):
accum = []
accum.append("net_bssid ")
accum.append(str(self.net_bssid))
accum.append(" ssid ")
accum.append(str(self.ssid))
accum.append(" block ")
accum.append(str(self.block))
accum.append(" wtp ")
accum.append(str(self.wtp.addr))
accum.append(" tenant_id ")
accum.append(str(self.tenant_id))
return ''.join(accum)
def __hash__(self):
return hash(self.net_bssid)
def __eq__(self, other):
if isinstance(other, VAP):
return self.net_bssid == other.net_bssid
return False
def __ne__(self, other):
return not self.__eq__(other)
|
Gillu13/scipy | refs/heads/master | scipy/sparse/tests/test_spfuncs.py | 122 | from __future__ import division, print_function, absolute_import
from numpy import array, kron, matrix, diag
from numpy.testing import TestCase, run_module_suite, assert_, assert_equal
from scipy.sparse import spfuncs
from scipy.sparse import csr_matrix, csc_matrix, bsr_matrix
from scipy.sparse._sparsetools import csr_scale_rows, csr_scale_columns, \
bsr_scale_rows, bsr_scale_columns
class TestSparseFunctions(TestCase):
def test_scale_rows_and_cols(self):
D = matrix([[1,0,0,2,3],
[0,4,0,5,0],
[0,0,6,7,0]])
#TODO expose through function
S = csr_matrix(D)
v = array([1,2,3])
csr_scale_rows(3,5,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), diag(v)*D)
S = csr_matrix(D)
v = array([1,2,3,4,5])
csr_scale_columns(3,5,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), D*diag(v))
# blocks
E = kron(D,[[1,2],[3,4]])
S = bsr_matrix(E,blocksize=(2,2))
v = array([1,2,3,4,5,6])
bsr_scale_rows(3,5,2,2,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), diag(v)*E)
S = bsr_matrix(E,blocksize=(2,2))
v = array([1,2,3,4,5,6,7,8,9,10])
bsr_scale_columns(3,5,2,2,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), E*diag(v))
E = kron(D,[[1,2,3],[4,5,6]])
S = bsr_matrix(E,blocksize=(2,3))
v = array([1,2,3,4,5,6])
bsr_scale_rows(3,5,2,3,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), diag(v)*E)
S = bsr_matrix(E,blocksize=(2,3))
v = array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15])
bsr_scale_columns(3,5,2,3,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), E*diag(v))
def test_estimate_blocksize(self):
mats = []
mats.append([[0,1],[1,0]])
mats.append([[1,1,0],[0,0,1],[1,0,1]])
mats.append([[0],[0],[1]])
mats = [array(x) for x in mats]
blks = []
blks.append([[1]])
blks.append([[1,1],[1,1]])
blks.append([[1,1],[0,1]])
blks.append([[1,1,0],[1,0,1],[1,1,1]])
blks = [array(x) for x in blks]
for A in mats:
for B in blks:
X = kron(A,B)
r,c = spfuncs.estimate_blocksize(X)
assert_(r >= B.shape[0])
assert_(c >= B.shape[1])
def test_count_blocks(self):
def gold(A,bs):
R,C = bs
I,J = A.nonzero()
return len(set(zip(I//R,J//C)))
mats = []
mats.append([[0]])
mats.append([[1]])
mats.append([[1,0]])
mats.append([[1,1]])
mats.append([[0,1],[1,0]])
mats.append([[1,1,0],[0,0,1],[1,0,1]])
mats.append([[0],[0],[1]])
for A in mats:
for B in mats:
X = kron(A,B)
Y = csr_matrix(X)
for R in range(1,6):
for C in range(1,6):
assert_equal(spfuncs.count_blocks(Y, (R, C)), gold(X, (R, C)))
X = kron([[1,1,0],[0,0,1],[1,0,1]],[[1,1]])
Y = csc_matrix(X)
assert_equal(spfuncs.count_blocks(X, (1, 2)), gold(X, (1, 2)))
assert_equal(spfuncs.count_blocks(Y, (1, 2)), gold(X, (1, 2)))
if __name__ == "__main__":
run_module_suite()
|
botland/bitcoin | refs/heads/master | contrib/linearize/linearize-hashes.py | 214 | #!/usr/bin/python
#
# linearize-hashes.py: List blocks in a linear, no-fork version of the chain.
#
# Copyright (c) 2013-2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from __future__ import print_function
import json
import struct
import re
import base64
import httplib
import sys
settings = {}
class BitcoinRPC:
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def execute(self, obj):
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print("JSON-RPC: no response", file=sys.stderr)
return None
body = resp.read()
resp_obj = json.loads(body)
return resp_obj
@staticmethod
def build_request(idx, method, params):
obj = { 'version' : '1.1',
'method' : method,
'id' : idx }
if params is None:
obj['params'] = []
else:
obj['params'] = params
return obj
@staticmethod
def response_is_error(resp_obj):
return 'error' in resp_obj and resp_obj['error'] is not None
def get_block_hashes(settings, max_blocks_per_call=10000):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword'])
height = settings['min_height']
while height < settings['max_height']+1:
num_blocks = min(settings['max_height']+1-height, max_blocks_per_call)
batch = []
for x in range(num_blocks):
batch.append(rpc.build_request(x, 'getblockhash', [height + x]))
reply = rpc.execute(batch)
for x,resp_obj in enumerate(reply):
if rpc.response_is_error(resp_obj):
print('JSON-RPC: error at height', height+x, ': ', resp_obj['error'], file=sys.stderr)
exit(1)
assert(resp_obj['id'] == x) # assume replies are in-sequence
print(resp_obj['result'])
height += num_blocks
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: linearize-hashes.py CONFIG-FILE")
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'min_height' not in settings:
settings['min_height'] = 0
if 'max_height' not in settings:
settings['max_height'] = 313000
if 'rpcuser' not in settings or 'rpcpassword' not in settings:
print("Missing username and/or password in cfg file", file=stderr)
sys.exit(1)
settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height'])
get_block_hashes(settings)
|
ContinuumIO/watchdog | refs/heads/master | src/watchdog/observers/inotify_buffer.py | 1 | # -*- coding: utf-8 -*-
#
# Copyright 2014 Thomas Amland <thomas.amland@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from watchdog.utils import BaseThread
from watchdog.utils.delayed_queue import DelayedQueue
from watchdog.observers.inotify_c import Inotify
logger = logging.getLogger(__name__)
class InotifyBuffer(BaseThread):
"""A wrapper for `Inotify` that holds events for `delay` seconds. During
this time, IN_MOVED_FROM and IN_MOVED_TO events are paired.
"""
delay = 0.5
def __init__(self, path, recursive=False, filter_fn=None):
BaseThread.__init__(self)
self._queue = DelayedQueue(self.delay)
self._inotify = Inotify(path, recursive, filter_fn=filter_fn)
self.start()
def read_event(self):
"""Returns a single event or a tuple of from/to events in case of a
paired move event. If this buffer has been closed, immediately return
None.
"""
return self._queue.get()
def on_thread_stop(self):
self._inotify.close()
self._queue.close()
def close(self):
self.stop()
self.join()
def run(self):
"""Read event from `inotify` and add them to `queue`. When reading a
IN_MOVE_TO event, remove the previous added matching IN_MOVE_FROM event
and add them back to the queue as a tuple.
"""
while self.should_keep_running():
inotify_events = self._inotify.read_events()
for inotify_event in inotify_events:
logger.debug("in-event %s", inotify_event)
if inotify_event.is_moved_to:
def matching_from_event(event):
return (not isinstance(event, tuple) and event.is_moved_from
and event.cookie == inotify_event.cookie)
from_event = self._queue.remove(matching_from_event)
if from_event is not None:
self._queue.put((from_event, inotify_event))
else:
logger.debug("could not find matching move_from event")
self._queue.put(inotify_event)
else:
self._queue.put(inotify_event)
|
jgcaaprom/android_external_chromium_org | refs/heads/cm-12.1 | tools/roll_swarming_client.py | 63 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Rolls swarming_client.
While it is currently hard coded for swarming_client/, it is potentially
modifiable to allow different dependencies. Works only with git checkout and git
dependencies.
"""
import optparse
import os
import re
import subprocess
import sys
SRC_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def is_pristine(root, merge_base='origin/master'):
"""Returns True is a git checkout is pristine."""
cmd = ['git', 'diff', '--ignore-submodules', merge_base]
return not (
subprocess.check_output(cmd, cwd=root).strip() or
subprocess.check_output(cmd + ['--cached'], cwd=root).strip())
def roll(deps_dir, key, reviewer, bug):
if not is_pristine(SRC_ROOT):
print >> sys.stderr, 'Ensure %s is clean first.' % SRC_ROOT
return 1
full_dir = os.path.join(SRC_ROOT, deps_dir)
head = subprocess.check_output(
['git', 'rev-parse', 'HEAD'], cwd=full_dir).strip()
deps = os.path.join(SRC_ROOT, 'DEPS')
with open(deps, 'rb') as f:
deps_content = f.read()
if not head in deps_content:
print('Warning: %s is not checked out at the expected revision in DEPS' %
deps_dir)
# It happens if the user checked out a branch in the dependency by himself.
# Fall back to reading the DEPS to figure out the original commit.
for i in deps_content.splitlines():
m = re.match(r'\s+"' + key + '": "([a-z0-9]{40})",', i)
if m:
head = m.group(1)
break
else:
print >> sys.stderr, 'Expected to find commit %s for %s in DEPS' % (
head, key)
return 1
print('Found old revision %s' % head)
subprocess.check_call(['git', 'fetch', 'origin'], cwd=full_dir)
master = subprocess.check_output(
['git', 'rev-parse', 'origin/master'], cwd=full_dir).strip()
print('Found new revision %s' % master)
if master == head:
print('No revision to roll!')
return 1
commit_range = '%s..%s' % (head[:9], master[:9])
logs = subprocess.check_output(
['git', 'log', commit_range, '--date=short', '--format=%ad %ae %s'],
cwd=full_dir).strip()
logs = logs.replace('@chromium.org', '')
cmd = (
'git log %s --date=short --format=\'%%ad %%ae %%s\' | '
'sed \'s/@chromium\.org//\'') % commit_range
msg = (
'Roll %s/ to %s.\n'
'\n'
'$ %s\n'
'%s\n\n'
'R=%s\n'
'BUG=%s') % (
deps_dir,
master,
cmd,
logs,
reviewer,
bug)
print('Commit message:')
print('\n'.join(' ' + i for i in msg.splitlines()))
deps_content = deps_content.replace(head, master)
with open(deps, 'wb') as f:
f.write(deps_content)
subprocess.check_call(['git', 'add', 'DEPS'], cwd=SRC_ROOT)
subprocess.check_call(['git', 'commit', '-m', msg], cwd=SRC_ROOT)
print('Run:')
print(' git cl upl --send-mail')
return 0
def main():
parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
parser.add_option(
'-r', '--reviewer', default='',
help='To specify multiple reviewers, use comma separated list, e.g. '
'-r joe,jack,john. Defaults to @chromium.org')
parser.add_option('-b', '--bug', default='')
options, args = parser.parse_args()
if args:
parser.error('Unknown argument %s' % args)
if not options.reviewer:
parser.error('Pass a reviewer right away with -r/--reviewer')
reviewers = options.reviewer.split(',')
for i, r in enumerate(reviewers):
if not '@' in r:
reviewers[i] = r + '@chromium.org'
return roll(
'tools/swarming_client',
'swarming_revision',
','.join(reviewers),
options.bug)
if __name__ == '__main__':
sys.exit(main())
|
joshk/FrameworkBenchmarks | refs/heads/master | frameworks/Python/AsyncIO/yocto_http/hello/services/redis.py | 108 | from operator import itemgetter
import asyncio
from random import randint
@asyncio.coroutine
def get_random_record(container):
idx = randint(1, 10000)
random_number = yield from container.engines['redis'].get('world:%i' % idx)
return {'Id': idx, 'RandomNumber': random_number}
@asyncio.coroutine
def get_random_records(container, limit):
results = []
for i in range(limit):
idx = randint(1, 10000)
random_number = yield from container.engines['redis'].get('world:%i' % idx)
results.append({'Id': idx, 'RandomNumber': random_number})
return results
@asyncio.coroutine
def update_random_records(container, limit):
results = []
for i in range(limit):
idx = randint(1, 10000)
random_number = yield from container.engines['redis'].get('world:%i' % idx)
yield from container.engines['redis'].set('world:%i' % idx, str(randint(1, 10000)))
results.append({'Id': idx, 'RandomNumber': random_number})
return results
@asyncio.coroutine
def get_fortunes(container):
results = []
list_reply = yield from container.engines['redis'].lrange('fortunes')
fortunes = yield from list_reply.aslist()
i = 1
for fortune in fortunes:
results.append({'id': i, 'message': fortune})
i += 1
results.append({'id': 0, 'message': 'Additional fortune added at request time.'})
results.sort(key=itemgetter('message'))
return results |
alihalabyah/ansible | refs/heads/devel | lib/ansible/playbook/play.py | 23 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from six import string_types
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.block import Block
from ansible.playbook.helpers import load_list_of_blocks, load_list_of_roles
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.playbook.task import Task
__all__ = ['Play']
class Play(Base, Taggable, Become):
"""
A play is a language feature that represents a list of roles and/or
task/handler blocks to execute on a given set of hosts.
Usage:
Play.load(datastructure) -> Play
Play.something(...)
"""
# =================================================================================
# Connection-Related Attributes
# TODO: generalize connection
_accelerate = FieldAttribute(isa='bool', default=False)
_accelerate_ipv6 = FieldAttribute(isa='bool', default=False)
_accelerate_port = FieldAttribute(isa='int', default=5099) # should be alias of port
# Connection
_gather_facts = FieldAttribute(isa='bool', default=None)
_hosts = FieldAttribute(isa='list', default=[], required=True, listof=string_types)
_name = FieldAttribute(isa='string', default='')
# Variable Attributes
_vars_files = FieldAttribute(isa='list', default=[])
_vars_prompt = FieldAttribute(isa='list', default=[])
_vault_password = FieldAttribute(isa='string')
# Role Attributes
_roles = FieldAttribute(isa='list', default=[], priority=100)
# Block (Task) Lists Attributes
_handlers = FieldAttribute(isa='list', default=[])
_pre_tasks = FieldAttribute(isa='list', default=[])
_post_tasks = FieldAttribute(isa='list', default=[])
_tasks = FieldAttribute(isa='list', default=[])
# Flag/Setting Attributes
_any_errors_fatal = FieldAttribute(isa='bool', default=False)
_force_handlers = FieldAttribute(isa='bool')
_max_fail_percentage = FieldAttribute(isa='string', default='0')
_serial = FieldAttribute(isa='int', default=0)
_strategy = FieldAttribute(isa='string', default='linear')
# =================================================================================
def __init__(self):
super(Play, self).__init__()
self.ROLE_CACHE = {}
def __repr__(self):
return self.get_name()
def get_name(self):
''' return the name of the Play '''
return self._attributes.get('name')
@staticmethod
def load(data, variable_manager=None, loader=None):
p = Play()
return p.load_data(data, variable_manager=variable_manager, loader=loader)
def preprocess_data(self, ds):
'''
Adjusts play datastructure to cleanup old/legacy items
'''
assert isinstance(ds, dict)
# The use of 'user' in the Play datastructure was deprecated to
# line up with the same change for Tasks, due to the fact that
# 'user' conflicted with the user module.
if 'user' in ds:
# this should never happen, but error out with a helpful message
# to the user if it does...
if 'remote_user' in ds:
raise AnsibleParserError("both 'user' and 'remote_user' are set for %s. The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
ds['remote_user'] = ds['user']
del ds['user']
if 'vars_prompt' in ds and not isinstance(ds['vars_prompt'], list):
ds['vars_prompt'] = [ ds['vars_prompt'] ]
return super(Play, self).preprocess_data(ds)
def _load_hosts(self, attr, ds):
'''
Loads the hosts from the given datastructure, which might be a list
or a simple string. We also switch integers in this list back to strings,
as the YAML parser will turn things that look like numbers into numbers.
'''
if isinstance(ds, (string_types, int)):
ds = [ ds ]
if not isinstance(ds, list):
raise AnsibleParserError("'hosts' must be specified as a list or a single pattern", obj=ds)
# YAML parsing of things that look like numbers may have
# resulted in integers showing up in the list, so convert
# them back to strings to prevent problems
for idx,item in enumerate(ds):
if isinstance(item, int):
ds[idx] = "%s" % item
return ds
def _load_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
def _load_pre_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
def _load_post_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
def _load_handlers(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed handlers/blocks.
Bare handlers outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader)
def _load_roles(self, attr, ds):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions and creates the Role from those objects
'''
if ds is None:
ds = []
role_includes = load_list_of_roles(ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
roles = []
for ri in role_includes:
roles.append(Role.load(ri, play=self))
return roles
def _post_validate_vars(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
return value
def _post_validate_vars_files(self, attr, value, templar):
'''
Override post validation of vars_files on the play, as we don't want to
template these too early.
'''
return value
# disable validation on various fields which will be validated later in other objects
def _post_validate_become(self, attr, value, templar):
return value
def _post_validate_become_user(self, attr, value, templar):
return value
def _post_validate_become_method(self, attr, value, templar):
return value
# FIXME: post_validation needs to ensure that become/su/sudo have only 1 set
def _compile_roles(self):
'''
Handles the role compilation step, returning a flat list of tasks
with the lowest level dependencies first. For example, if a role R
has a dependency D1, which also has a dependency D2, the tasks from
D2 are merged first, followed by D1, and lastly by the tasks from
the parent role R last. This is done for all roles in the Play.
'''
block_list = []
if len(self.roles) > 0:
for r in self.roles:
block_list.extend(r.compile(play=self))
return block_list
def compile_roles_handlers(self):
'''
Handles the role handler compilation step, returning a flat list of Handlers
This is done for all roles in the Play.
'''
block_list = []
if len(self.roles) > 0:
for r in self.roles:
block_list.extend(r.get_handler_blocks())
return block_list
def compile(self):
'''
Compiles and returns the task list for this play, compiled from the
roles (which are themselves compiled recursively) and/or the list of
tasks specified in the play.
'''
# create a block containing a single flush handlers meta
# task, so we can be sure to run handlers at certain points
# of the playbook execution
flush_block = Block.load(
data={'meta': 'flush_handlers'},
play=self,
variable_manager=self._variable_manager,
loader=self._loader
)
block_list = []
block_list.extend(self.pre_tasks)
block_list.append(flush_block)
block_list.extend(self._compile_roles())
block_list.extend(self.tasks)
block_list.append(flush_block)
block_list.extend(self.post_tasks)
block_list.append(flush_block)
return block_list
def get_vars(self):
return self.vars.copy()
def get_vars_files(self):
return self.vars_files
def get_handlers(self):
return self.handlers[:]
def get_roles(self):
return self.roles[:]
def get_tasks(self):
tasklist = []
for task in self.pre_tasks + self.tasks + self.post_tasks:
if isinstance(task, Block):
tasklist.append(task.block + task.rescue + task.always)
else:
tasklist.append(task)
return tasklist
def serialize(self):
data = super(Play, self).serialize()
roles = []
for role in self.get_roles():
roles.append(role.serialize())
data['roles'] = roles
return data
def deserialize(self, data):
super(Play, self).deserialize(data)
if 'roles' in data:
role_data = data.get('roles', [])
roles = []
for role in role_data:
r = Role()
r.deserialize(role)
roles.append(r)
setattr(self, 'roles', roles)
del data['roles']
def copy(self):
new_me = super(Play, self).copy()
new_me.ROLE_CACHE = self.ROLE_CACHE.copy()
return new_me
|
2014c2g14/c2g14 | refs/heads/master | w2/static/Brython2.0.0-20140209-164925/Lib/textwrap.py | 745 | """Text wrapping and filling.
"""
# Copyright (C) 1999-2001 Gregory P. Ward.
# Copyright (C) 2002, 2003 Python Software Foundation.
# Written by Greg Ward <gward@python.net>
import re
__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent', 'indent']
# Hardcode the recognized whitespace characters to the US-ASCII
# whitespace characters. The main reason for doing this is that in
# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales
# that character winds up in string.whitespace. Respecting
# string.whitespace in those cases would 1) make textwrap treat 0xa0 the
# same as any other whitespace char, which is clearly wrong (it's a
# *non-breaking* space), 2) possibly cause problems with Unicode,
# since 0xa0 is not in range(128).
_whitespace = '\t\n\x0b\x0c\r '
class TextWrapper:
"""
Object for wrapping/filling text. The public interface consists of
the wrap() and fill() methods; the other methods are just there for
subclasses to override in order to tweak the default behaviour.
If you want to completely replace the main wrapping algorithm,
you'll probably have to override _wrap_chunks().
Several instance attributes control various aspects of wrapping:
width (default: 70)
the maximum width of wrapped lines (unless break_long_words
is false)
initial_indent (default: "")
string that will be prepended to the first line of wrapped
output. Counts towards the line's width.
subsequent_indent (default: "")
string that will be prepended to all lines save the first
of wrapped output; also counts towards each line's width.
expand_tabs (default: true)
Expand tabs in input text to spaces before further processing.
Each tab will become 0 .. 'tabsize' spaces, depending on its position
in its line. If false, each tab is treated as a single character.
tabsize (default: 8)
Expand tabs in input text to 0 .. 'tabsize' spaces, unless
'expand_tabs' is false.
replace_whitespace (default: true)
Replace all whitespace characters in the input text by spaces
after tab expansion. Note that if expand_tabs is false and
replace_whitespace is true, every tab will be converted to a
single space!
fix_sentence_endings (default: false)
Ensure that sentence-ending punctuation is always followed
by two spaces. Off by default because the algorithm is
(unavoidably) imperfect.
break_long_words (default: true)
Break words longer than 'width'. If false, those words will not
be broken, and some lines might be longer than 'width'.
break_on_hyphens (default: true)
Allow breaking hyphenated words. If true, wrapping will occur
preferably on whitespaces and right after hyphens part of
compound words.
drop_whitespace (default: true)
Drop leading and trailing whitespace from lines.
"""
unicode_whitespace_trans = {}
uspace = ord(' ')
for x in _whitespace:
unicode_whitespace_trans[ord(x)] = uspace
# This funky little regex is just the trick for splitting
# text up into word-wrappable chunks. E.g.
# "Hello there -- you goof-ball, use the -b option!"
# splits into
# Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
# (after stripping out empty strings).
wordsep_re = re.compile(
r'(\s+|' # any whitespace
r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words
r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
# This less funky little regex just split on recognized spaces. E.g.
# "Hello there -- you goof-ball, use the -b option!"
# splits into
# Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/
wordsep_simple_re = re.compile(r'(\s+)')
# XXX this is not locale- or charset-aware -- string.lowercase
# is US-ASCII only (and therefore English-only)
sentence_end_re = re.compile(r'[a-z]' # lowercase letter
r'[\.\!\?]' # sentence-ending punct.
r'[\"\']?' # optional end-of-quote
r'\Z') # end of chunk
def __init__(self,
width=70,
initial_indent="",
subsequent_indent="",
expand_tabs=True,
replace_whitespace=True,
fix_sentence_endings=False,
break_long_words=True,
drop_whitespace=True,
break_on_hyphens=True,
tabsize=8):
self.width = width
self.initial_indent = initial_indent
self.subsequent_indent = subsequent_indent
self.expand_tabs = expand_tabs
self.replace_whitespace = replace_whitespace
self.fix_sentence_endings = fix_sentence_endings
self.break_long_words = break_long_words
self.drop_whitespace = drop_whitespace
self.break_on_hyphens = break_on_hyphens
self.tabsize = tabsize
# -- Private methods -----------------------------------------------
# (possibly useful for subclasses to override)
def _munge_whitespace(self, text):
"""_munge_whitespace(text : string) -> string
Munge whitespace in text: expand tabs and convert all other
whitespace characters to spaces. Eg. " foo\tbar\n\nbaz"
becomes " foo bar baz".
"""
if self.expand_tabs:
text = text.expandtabs(self.tabsize)
if self.replace_whitespace:
text = text.translate(self.unicode_whitespace_trans)
return text
def _split(self, text):
"""_split(text : string) -> [string]
Split the text to wrap into indivisible chunks. Chunks are
not quite the same as words; see _wrap_chunks() for full
details. As an example, the text
Look, goof-ball -- use the -b option!
breaks into the following chunks:
'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
'use', ' ', 'the', ' ', '-b', ' ', 'option!'
if break_on_hyphens is True, or in:
'Look,', ' ', 'goof-ball', ' ', '--', ' ',
'use', ' ', 'the', ' ', '-b', ' ', option!'
otherwise.
"""
if self.break_on_hyphens is True:
chunks = self.wordsep_re.split(text)
else:
chunks = self.wordsep_simple_re.split(text)
chunks = [c for c in chunks if c]
return chunks
def _fix_sentence_endings(self, chunks):
"""_fix_sentence_endings(chunks : [string])
Correct for sentence endings buried in 'chunks'. Eg. when the
original text contains "... foo.\nBar ...", munge_whitespace()
and split() will convert that to [..., "foo.", " ", "Bar", ...]
which has one too few spaces; this method simply changes the one
space to two.
"""
i = 0
patsearch = self.sentence_end_re.search
while i < len(chunks)-1:
if chunks[i+1] == " " and patsearch(chunks[i]):
chunks[i+1] = " "
i += 2
else:
i += 1
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
"""_handle_long_word(chunks : [string],
cur_line : [string],
cur_len : int, width : int)
Handle a chunk of text (most likely a word, not whitespace) that
is too long to fit in any line.
"""
# Figure out when indent is larger than the specified width, and make
# sure at least one character is stripped off on every pass
if width < 1:
space_left = 1
else:
space_left = width - cur_len
# If we're allowed to break long words, then do so: put as much
# of the next chunk onto the current line as will fit.
if self.break_long_words:
cur_line.append(reversed_chunks[-1][:space_left])
reversed_chunks[-1] = reversed_chunks[-1][space_left:]
# Otherwise, we have to preserve the long word intact. Only add
# it to the current line if there's nothing already there --
# that minimizes how much we violate the width constraint.
elif not cur_line:
cur_line.append(reversed_chunks.pop())
# If we're not allowed to break long words, and there's already
# text on the current line, do nothing. Next time through the
# main loop of _wrap_chunks(), we'll wind up here again, but
# cur_len will be zero, so the next line will be entirely
# devoted to the long word that we can't handle right now.
def _wrap_chunks(self, chunks):
"""_wrap_chunks(chunks : [string]) -> [string]
Wrap a sequence of text chunks and return a list of lines of
length 'self.width' or less. (If 'break_long_words' is false,
some lines may be longer than this.) Chunks correspond roughly
to words and the whitespace between them: each chunk is
indivisible (modulo 'break_long_words'), but a line break can
come between any two chunks. Chunks should not have internal
whitespace; ie. a chunk is either all whitespace or a "word".
Whitespace chunks will be removed from the beginning and end of
lines, but apart from that whitespace is preserved.
"""
lines = []
if self.width <= 0:
raise ValueError("invalid width %r (must be > 0)" % self.width)
# Arrange in reverse order so items can be efficiently popped
# from a stack of chucks.
chunks.reverse()
while chunks:
# Start the list of chunks that will make up the current line.
# cur_len is just the length of all the chunks in cur_line.
cur_line = []
cur_len = 0
# Figure out which static string will prefix this line.
if lines:
indent = self.subsequent_indent
else:
indent = self.initial_indent
# Maximum width for this line.
width = self.width - len(indent)
# First chunk on line is whitespace -- drop it, unless this
# is the very beginning of the text (ie. no lines started yet).
if self.drop_whitespace and chunks[-1].strip() == '' and lines:
del chunks[-1]
while chunks:
l = len(chunks[-1])
# Can at least squeeze this chunk onto the current line.
if cur_len + l <= width:
cur_line.append(chunks.pop())
cur_len += l
# Nope, this line is full.
else:
break
# The current line is full, and the next chunk is too big to
# fit on *any* line (not just this one).
if chunks and len(chunks[-1]) > width:
self._handle_long_word(chunks, cur_line, cur_len, width)
# If the last chunk on this line is all whitespace, drop it.
if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
del cur_line[-1]
# Convert current line back to a string and store it in list
# of all lines (return value).
if cur_line:
lines.append(indent + ''.join(cur_line))
return lines
# -- Public interface ----------------------------------------------
def wrap(self, text):
"""wrap(text : string) -> [string]
Reformat the single paragraph in 'text' so it fits in lines of
no more than 'self.width' columns, and return a list of wrapped
lines. Tabs in 'text' are expanded with string.expandtabs(),
and all other whitespace characters (including newline) are
converted to space.
"""
text = self._munge_whitespace(text)
chunks = self._split(text)
if self.fix_sentence_endings:
self._fix_sentence_endings(chunks)
return self._wrap_chunks(chunks)
def fill(self, text):
"""fill(text : string) -> string
Reformat the single paragraph in 'text' to fit in lines of no
more than 'self.width' columns, and return a new string
containing the entire wrapped paragraph.
"""
return "\n".join(self.wrap(text))
# -- Convenience interface ---------------------------------------------
def wrap(text, width=70, **kwargs):
"""Wrap a single paragraph of text, returning a list of wrapped lines.
Reformat the single paragraph in 'text' so it fits in lines of no
more than 'width' columns, and return a list of wrapped lines. By
default, tabs in 'text' are expanded with string.expandtabs(), and
all other whitespace characters (including newline) are converted to
space. See TextWrapper class for available keyword args to customize
wrapping behaviour.
"""
w = TextWrapper(width=width, **kwargs)
return w.wrap(text)
def fill(text, width=70, **kwargs):
"""Fill a single paragraph of text, returning a new string.
Reformat the single paragraph in 'text' to fit in lines of no more
than 'width' columns, and return a new string containing the entire
wrapped paragraph. As with wrap(), tabs are expanded and other
whitespace characters converted to space. See TextWrapper class for
available keyword args to customize wrapping behaviour.
"""
w = TextWrapper(width=width, **kwargs)
return w.fill(text)
# -- Loosely related functionality -------------------------------------
_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE)
_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE)
def dedent(text):
"""Remove any common leading whitespace from every line in `text`.
This can be used to make triple-quoted strings line up with the left
edge of the display, while still presenting them in the source code
in indented form.
Note that tabs and spaces are both treated as whitespace, but they
are not equal: the lines " hello" and "\thello" are
considered to have no common leading whitespace. (This behaviour is
new in Python 2.5; older versions of this module incorrectly
expanded tabs before searching for common leading whitespace.)
"""
# Look for the longest leading string of spaces and tabs common to
# all lines.
margin = None
text = _whitespace_only_re.sub('', text)
indents = _leading_whitespace_re.findall(text)
for indent in indents:
if margin is None:
margin = indent
# Current line more deeply indented than previous winner:
# no change (previous winner is still on top).
elif indent.startswith(margin):
pass
# Current line consistent with and no deeper than previous winner:
# it's the new winner.
elif margin.startswith(indent):
margin = indent
# Current line and previous winner have no common whitespace:
# there is no margin.
else:
margin = ""
break
# sanity check (testing/debugging only)
if 0 and margin:
for line in text.split("\n"):
assert not line or line.startswith(margin), \
"line = %r, margin = %r" % (line, margin)
if margin:
text = re.sub(r'(?m)^' + margin, '', text)
return text
def indent(text, prefix, predicate=None):
"""Adds 'prefix' to the beginning of selected lines in 'text'.
If 'predicate' is provided, 'prefix' will only be added to the lines
where 'predicate(line)' is True. If 'predicate' is not provided,
it will default to adding 'prefix' to all non-empty lines that do not
consist solely of whitespace characters.
"""
if predicate is None:
def predicate(line):
return line.strip()
def prefixed_lines():
for line in text.splitlines(True):
yield (prefix + line if predicate(line) else line)
return ''.join(prefixed_lines())
if __name__ == "__main__":
#print dedent("\tfoo\n\tbar")
#print dedent(" \thello there\n \t how are you?")
print(dedent("Hello there.\n This is indented."))
|
russellb/nova | refs/heads/master | nova/tests/test_virt.py | 14 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Isaku Yamahata
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import flags
from nova import test
from nova.virt import driver
FLAGS = flags.FLAGS
class TestVirtDriver(test.TestCase):
def test_block_device(self):
swap = {'device_name': '/dev/sdb',
'swap_size': 1}
ephemerals = [{'num': 0,
'virtual_name': 'ephemeral0',
'device_name': '/dev/sdc1',
'size': 1}]
block_device_mapping = [{'mount_device': '/dev/sde',
'device_path': 'fake_device'}]
block_device_info = {
'root_device_name': '/dev/sda',
'swap': swap,
'ephemerals': ephemerals,
'block_device_mapping': block_device_mapping}
empty_block_device_info = {}
self.assertEqual(
driver.block_device_info_get_root(block_device_info), '/dev/sda')
self.assertEqual(
driver.block_device_info_get_root(empty_block_device_info), None)
self.assertEqual(
driver.block_device_info_get_root(None), None)
self.assertEqual(
driver.block_device_info_get_swap(block_device_info), swap)
self.assertEqual(driver.block_device_info_get_swap(
empty_block_device_info)['device_name'], None)
self.assertEqual(driver.block_device_info_get_swap(
empty_block_device_info)['swap_size'], 0)
self.assertEqual(
driver.block_device_info_get_swap({'swap': None})['device_name'],
None)
self.assertEqual(
driver.block_device_info_get_swap({'swap': None})['swap_size'],
0)
self.assertEqual(
driver.block_device_info_get_swap(None)['device_name'], None)
self.assertEqual(
driver.block_device_info_get_swap(None)['swap_size'], 0)
self.assertEqual(
driver.block_device_info_get_ephemerals(block_device_info),
ephemerals)
self.assertEqual(
driver.block_device_info_get_ephemerals(empty_block_device_info),
[])
self.assertEqual(
driver.block_device_info_get_ephemerals(None),
[])
def test_swap_is_usable(self):
self.assertFalse(driver.swap_is_usable(None))
self.assertFalse(driver.swap_is_usable({'device_name': None}))
self.assertFalse(driver.swap_is_usable({'device_name': '/dev/sdb',
'swap_size': 0}))
self.assertTrue(driver.swap_is_usable({'device_name': '/dev/sdb',
'swap_size': 1}))
|
g3-3k/libreant | refs/heads/master | libreantdb/__init__.py | 2 | from api import DB
|
sjsucohort6/openstack | refs/heads/master | python/venv/lib/python2.7/site-packages/pymongo/response.py | 17 | # Copyright 2014-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Represent a response from the server."""
class Response(object):
__slots__ = ('_data', '_address')
def __init__(self, data, address):
"""Represent a response from the server.
:Parameters:
- `data`: Raw BSON bytes.
- `address`: (host, port) of the source server.
"""
self._data = data
self._address = address
@property
def data(self):
"""Server response's raw BSON bytes."""
return self._data
@property
def address(self):
"""(host, port) of the source server."""
return self._address
class ExhaustResponse(Response):
__slots__ = ('_socket_info', '_pool')
def __init__(self, data, address, socket_info, pool):
"""Represent a response to an exhaust cursor's initial query.
:Parameters:
- `data`: Raw BSON bytes.
- `address`: (host, port) of the source server.
- `socket_info`: The SocketInfo used for the initial query.
- `pool`: The Pool from which the SocketInfo came.
"""
super(ExhaustResponse, self).__init__(data, address)
self._socket_info = socket_info
self._pool = pool
@property
def socket_info(self):
"""The SocketInfo used for the initial query.
The server will send batches on this socket, without waiting for
getMores from the client, until the result set is exhausted or there
is an error.
"""
return self._socket_info
@property
def pool(self):
"""The Pool from which the SocketInfo came."""
return self._pool
|
sunlianqiang/kbengine | refs/heads/master | kbe/res/scripts/common/Lib/test/test_dbm_ndbm.py | 91 | from test import support
support.import_module("dbm.ndbm") #skip if not supported
import unittest
import os
import random
import dbm.ndbm
from dbm.ndbm import error
class DbmTestCase(unittest.TestCase):
def setUp(self):
self.filename = support.TESTFN
self.d = dbm.ndbm.open(self.filename, 'c')
self.d.close()
def tearDown(self):
for suffix in ['', '.pag', '.dir', '.db']:
support.unlink(self.filename + suffix)
def test_keys(self):
self.d = dbm.ndbm.open(self.filename, 'c')
self.assertTrue(self.d.keys() == [])
self.d['a'] = 'b'
self.d[b'bytes'] = b'data'
self.d['12345678910'] = '019237410982340912840198242'
self.d.keys()
self.assertIn('a', self.d)
self.assertIn(b'a', self.d)
self.assertEqual(self.d[b'bytes'], b'data')
self.d.close()
def test_modes(self):
for mode in ['r', 'rw', 'w', 'n']:
try:
self.d = dbm.ndbm.open(self.filename, mode)
self.d.close()
except error:
self.fail()
def test_context_manager(self):
with dbm.ndbm.open(self.filename, 'c') as db:
db["ndbm context manager"] = "context manager"
with dbm.ndbm.open(self.filename, 'r') as db:
self.assertEqual(list(db.keys()), [b"ndbm context manager"])
with self.assertRaises(dbm.ndbm.error) as cm:
db.keys()
self.assertEqual(str(cm.exception),
"DBM object has already been closed")
if __name__ == '__main__':
unittest.main()
|
jumpstarter-io/neutron | refs/heads/master | neutron/db/migration/alembic_migrations/versions/ed93525fd003_bigswitch_quota.py | 3 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""bigswitch_quota
Revision ID: ed93525fd003
Revises: 50e86cb2637a
Create Date: 2014-01-05 10:59:19.860397
"""
# revision identifiers, used by Alembic.
revision = 'ed93525fd003'
down_revision = '50e86cb2637a'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.bigswitch.plugin.NeutronRestProxyV2'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
### commands auto generated by Alembic - please adjust! ###
op.create_table(
'quotas',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('resource', sa.String(length=255), nullable=True),
sa.Column('limit', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade(active_plugins=None, options=None):
pass |
lepistone/sale-workflow | refs/heads/8.0 | __unported__/account_invoice_reorder_lines/invoice.py | 24 | # -*- coding: utf-8 -*-
#
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2013-2014 Camptocamp (<http://www.camptocamp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp.osv import orm
class account_invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_order = 'invoice_id desc, sequence, id'
|
procandi/openwebrtc | refs/heads/master | bindings/java/standard_types.py | 32 | # Copyright (c) 2014-2015, Ericsson AB. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
import config
from type_registry import GirMetaType
from type_registry import TypeTransform
from c_generator import C
C.Helper.add_helper('jobject_wrapper_create',
C.Function('jobject_wrapper_create',
return_type='JObjectWrapper*',
params=['jobject jobj', 'gboolean weak'],
body=[
C.Decl('JNIEnv*', 'env'),
C.Decl('JObjectWrapper*', 'wrapper'),
'',
C.Assign('env', C.Call('get_jni_env')),
C.Assign('wrapper', C.Call('g_slice_new0', 'JObjectWrapper')),
C.Assert('wrapper'),
C.IfElse(ifs=['weak'],
bodies=[[
C.Assign('wrapper->weak', C.Env('NewWeakGlobalRef', 'jobj')),
C.Log.verbose('created weak global ref: %p', 'wrapper->weak'),
],[
C.Assign('wrapper->obj', C.Env('NewGlobalRef', 'jobj')),
C.Log.verbose('created global ref: %p', 'wrapper->obj'),
]]
),
C.ExceptionCheck('NULL'),
'',
C.Return('wrapper'),
]
)
)
C.Helper.add_helper('jobject_wrapper_destroy',
C.Function('jobject_wrapper_destroy',
return_type='void',
params=['gpointer data_pointer', 'gboolean weak'],
body=[
C.Decl('JNIEnv*', 'env'),
C.Decl('JObjectWrapper*', 'wrapper'),
'',
C.Assign('env', C.Call('get_jni_env')),
C.Assign('wrapper', 'data_pointer', cast='JObjectWrapper*'),
C.Assert('wrapper'),
'',
C.IfElse(ifs=['weak'],
bodies=[[
C.Log.verbose('finalizing weak global ref: %p', 'wrapper->weak'),
C.Env('DeleteWeakGlobalRef', 'wrapper->weak'),
],[
C.Log.verbose('finalizing global ref: %p', 'wrapper->obj'),
C.Env('DeleteGlobalRef', 'wrapper->obj'),
]]
),
'',
C.Call('g_slice_free', 'JObjectWrapper', 'wrapper'),
C.ExceptionCheck(None),
]
)
)
C.Helper.add_helper('jobject_callback_wrapper_create',
C.Function('jobject_callback_wrapper_create',
return_type='JObjectCallbackWrapper*',
params=['jobject jobj', 'gboolean should_destroy'],
body=[
C.Decl('JObjectCallbackWrapper*', 'callback_wrapper'),
'',
C.Assign('callback_wrapper', C.Call('g_slice_new0', 'JObjectCallbackWrapper')),
C.Assert('callback_wrapper'),
C.Assign('callback_wrapper->wrapper', C.Helper('jobject_wrapper_create', 'jobj', 'FALSE')),
C.Assign('callback_wrapper->should_destroy', 'should_destroy'),
'',
C.Return('callback_wrapper'),
]
)
)
C.Helper.add_helper('jobject_callback_wrapper_destroy',
C.Function('jobject_callback_wrapper_destroy',
return_type='void',
params=['gpointer user_data'],
body=[
C.Decl('JObjectCallbackWrapper*', 'callback_wrapper'),
'',
C.Assign('callback_wrapper', 'user_data', cast='JObjectCallbackWrapper*'),
C.Helper('jobject_wrapper_destroy', 'callback_wrapper->wrapper', 'FALSE'),
C.Call('g_slice_free', 'JObjectCallbackWrapper', 'callback_wrapper'),
]
)
)
C.Helper.add_helper('jobject_wrapper_closure_notify',
C.Function('jobject_wrapper_closure_notify',
return_type='void',
params=['gpointer data_pointer', 'GClosure* ignored'],
body=[
C.Decl('(void)', 'ignored'),
C.Helper('jobject_wrapper_destroy', 'data_pointer', 'FALSE'),
]
)
)
C.Helper.add_helper('gobject_to_jobject',
C.Function('gobject_to_jobject',
return_type='jobject',
params=['JNIEnv* env', 'gpointer data_pointer', 'gboolean take_ref'],
body=[
C.Decl('GObject*', 'gobj'),
C.Decl('JObjectWrapper*', 'wrapper'),
'',
C.If('!data_pointer',
C.Log.verbose('got jobject[NULL] from GObject[null]'),
C.Return('NULL')),
C.Assign('gobj', C.Call('G_OBJECT', 'data_pointer')),
'',
C.Assign('wrapper', C.Call('g_object_get_data', 'gobj', '"java_instance"'), cast='JObjectWrapper*'),
C.IfElse(ifs=['wrapper'],
bodies=[[
C.Log.verbose('got jobject[%p] from gobject[%p]', 'wrapper->obj', 'gobj'),
C.Return('wrapper->obj'),
], [
C.Decl('jobject', 'jobj'),
C.Decl('jclass', 'clazz'),
C.Decl('jobject', 'native_pointer'),
C.Decl('GWeakRef*', 'ref'),
'',
C.Assign('clazz', C.Call('g_hash_table_lookup', 'gobject_to_java_class_map', C.Call('G_OBJECT_TYPE', 'gobj'))),
C.If('!clazz', [
C.Log.error('Java class not found for GObject type: %s', C.Call('G_OBJECT_TYPE_NAME', 'gobj')),
C.Return('NULL'),
]),
'',
C.If('take_ref', C.Call('g_object_ref', 'gobj')),
'',
C.Assign('ref', C.Call('g_new', 'GWeakRef', '1')),
C.Call('g_weak_ref_init', 'ref', 'gobj'),
'',
C.Assign('native_pointer', C.Env.new('NativePointer', '(jlong) ref')),
C.ExceptionCheck('NULL'),
'',
C.Assign('jobj', C.Env('NewObject', 'clazz', C.Cache.method('NativeInstance', '_constructor'), 'native_pointer')),
C.ExceptionCheck('NULL'),
'',
C.Assign('wrapper', C.Helper('jobject_wrapper_create', 'jobj', 'TRUE')),
C.Assert('wrapper'),
C.Call('g_object_set_data', 'gobj', '"java_instance"', 'wrapper'),
'',
C.Log.verbose('got jobject[%p] from GObject[%p]', 'jobj', 'gobj'),
C.Return('jobj'),
]]),
]
)
)
C.Helper.add_helper('jobject_to_gobject',
C.Function('jobject_to_gobject',
return_type='gpointer',
params=['JNIEnv* env', 'jobject jobj'],
body=[
C.Decl('GWeakRef*', 'ref'),
C.Decl('gpointer', 'gobj'),
'',
C.If('!jobj',
C.Log.verbose('got GObject[NULL] from jobject[null]'),
C.Return('NULL')),
'',
C.Assign('ref', C.Env.field('jobj', ('NativeInstance', 'nativeInstance')), cast='GWeakRef*'),
C.Assign('gobj', C.Call('g_weak_ref_get', 'ref')),
C.If('!gobj',
C.Env.throw('IllegalStateException', '"GObject ref was NULL at translation"')),
C.Log.verbose('got gobject[%p] from jobject[%p]', 'gobj', 'jobj'),
C.Return('gobj'),
]
)
)
C.Helper.add_helper('gvalue_to_jobject',
C.Function('gvalue_to_jobject',
return_type='jobject',
params=['JNIEnv* env', 'GValue* value'],
body=[
C.Decl('jobject', 'obj'),
'',
C.Switch(C.Call('G_VALUE_TYPE', 'value'), cases=[
(args[0], [
C.Decl(args[1], 'val'),
C.Assign('val', C.Call(args[2], 'value'), cast=args[1]),
C.Assign('obj', C.Env.static_method((args[3], 'valueOf'), 'val')),
]) for args in [
['G_TYPE_BOOLEAN', 'jboolean', 'g_value_get_boolean', 'Boolean'],
['G_TYPE_CHAR', 'jchar', 'g_value_get_schar', 'Character'],
['G_TYPE_UCHAR', 'jchar', 'g_value_get_uchar', 'Character'],
['G_TYPE_INT', 'jint', 'g_value_get_int', 'Integer'],
['G_TYPE_UINT', 'jint', 'g_value_get_uint', 'Integer'],
['G_TYPE_LONG', 'jlong', 'g_value_get_long', 'Long'],
['G_TYPE_ULONG', 'jlong', 'g_value_get_ulong', 'Long'],
['G_TYPE_INT64', 'jlong', 'g_value_get_int64', 'Long'],
['G_TYPE_UINT64', 'jlong', 'g_value_get_uint64', 'Long'],
['G_TYPE_FLOAT', 'jfloat', 'g_value_get_float', 'Float'],
['G_TYPE_DOUBLE', 'jdouble', 'g_value_get_double', 'Double'],
]
] + [('G_TYPE_STRING',[
C.Decl('const gchar*', 'str'),
C.Assign('str', C.Call('g_value_get_string', 'value')),
C.IfElse(ifs=['str'], bodies=[[
C.Assign('obj', C.Env('NewStringUTF', 'str')),
],[
C.Assign('obj', 'NULL'),
]]),
])],
default=[
C.Assign('obj', 'NULL'),
]),
'',
C.Return('obj'),
]
)
)
class PrimitiveMetaType(GirMetaType):
default_value = '0'
def __init__(self, name, transfer_ownership=False, allow_none='Ignored'):
assert transfer_ownership == False
super(PrimitiveMetaType, self).__init__(name, transfer_ownership, allow_none=True)
def __new__(cls, java_type, jni_type, c_type, java_signature, object_type):
new = super(PrimitiveMetaType, cls).__new__(cls)
new.gir_type = c_type
new.java_type = java_type
new.jni_type = jni_type
new.c_type = c_type
new.java_signature = java_signature
new.object_type = object_type
new.object_full_type = 'java.lang.' + object_type
return new
def transform_to_c(self):
if self.is_length_param:
return TypeTransform()
else:
return TypeTransform([
C.Decl(self.c_type, self.c_name),
],[
C.Assign(self.c_name, self.jni_name, cast=self.c_type),
])
def transform_to_jni(self):
if self.is_length_param:
return TypeTransform()
else:
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
],[
C.Assign(self.jni_name, self.c_name, cast=self.jni_type)
])
class PrimitiveArrayMetaType(GirMetaType):
is_array = True
has_local_ref = True
def __init__(self, name, transfer_ownership, allow_none, c_array_type='gpointer'):
super(PrimitiveArrayMetaType, self).__init__(name, transfer_ownership, allow_none)
self.c_type = c_array_type
def __new__(cls, java_type, jni_type, c_type, java_signature, object_type):
new = super(PrimitiveArrayMetaType, cls).__new__(cls)
new.gir_type = c_type
new.java_type = java_type + '[]'
new.primitive_type_name = java_type.title()
new.jni_type = jni_type
new.c_element_type = c_type
new.java_signature = '[' + java_signature
new.object_type = object_type + '[]'
new.object_full_type = 'java.lang.' + object_type
return new
@staticmethod
def from_primitive_type(typ):
return PrimitiveArrayMetaType(
typ.java_type,
typ.jni_type + 'Array',
typ.c_type,
typ.java_signature,
typ.object_type,
)
def transform_to_c(self):
assert not self.transfer_ownership # transfer not implemented
return TypeTransform([
C.Decl(self.c_type, self.c_name),
C.Decl('jsize', self.length.jni_name),
C.Decl(self.length.c_type, self.length.c_name),
], [
C.Assert('sizeof(%s) == sizeof(%s)' % (self.c_element_type, self.jni_type[:-5])),
C.Assign(self.c_name, C.Env('Get%sArrayElements' % self.primitive_type_name, self.jni_name, 'NULL'), cast=self.c_type),
C.ExceptionCheck.default(self),
C.Assign(self.length.c_name, C.Env('GetArrayLength', '(jarray) ' + self.jni_name), cast=self.length.c_type),
C.ExceptionCheck.default(self),
], [
# discard any changes
C.Env('Release%sArrayElements' % self.primitive_type_name, self.jni_name, self.c_name, 'JNI_ABORT'),
C.ExceptionCheck.default(self),
])
def transform_to_jni(self):
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
C.Decl('jsize', self.length.jni_name),
], [
C.Assert('sizeof(%s) == sizeof(%s)' % (self.c_element_type, self.jni_type[:-5])),
C.Assign(self.length.jni_name, self.length.c_name, cast='jsize'),
C.Assign(self.jni_name, C.Env('New%sArray' % self.primitive_type_name, self.length.jni_name)),
C.ExceptionCheck.default(self),
C.Env('Set%sArrayRegion' % self.primitive_type_name, self.jni_name, '0', self.length.jni_name, '(const %s*)' % self.jni_type[:-5] + self.c_name),
], self.transfer_ownership and [
C.Call('g_free', self.c_name),
])
class CharType (PrimitiveMetaType('byte', 'jbyte', 'gchar', 'B', 'Byte')): pass
class UcharType (PrimitiveMetaType('byte', 'jbyte', 'guchar', 'B', 'Byte')): pass
class Int8Type (PrimitiveMetaType('byte', 'jbyte', 'gint8', 'B', 'Byte')): pass
class Uint8Type (PrimitiveMetaType('byte', 'jbyte', 'guint8', 'B', 'Byte')): pass
class ShortType (PrimitiveMetaType('short', 'jshort', 'gshort', 'S', 'Short')): pass
class UshortType (PrimitiveMetaType('short', 'jshort', 'gushort', 'S', 'Short')): pass
class Int16Type (PrimitiveMetaType('short', 'jshort', 'gint16', 'S', 'Short')): pass
class Uint16Type (PrimitiveMetaType('short', 'jshort', 'guint16', 'S', 'Short')): pass
class IntType (PrimitiveMetaType('int', 'jint', 'gint', 'I', 'Integer')): pass
class UintType (PrimitiveMetaType('int', 'jint', 'guint', 'I', 'Integer')): pass
class Uint32Type (PrimitiveMetaType('int', 'jint', 'gint32', 'I', 'Integer')): pass
class Int32Type (PrimitiveMetaType('int', 'jint', 'guint32', 'I', 'Integer')): pass
class LongType (PrimitiveMetaType('long', 'jlong', 'glong', 'J', 'Long')): pass
class UlongType (PrimitiveMetaType('long', 'jlong', 'gulong', 'J', 'Long')): pass
class LongPtrType(PrimitiveMetaType('long', 'jlong', 'gpointer', 'J', 'Long')): pass
class SizeType (PrimitiveMetaType('long', 'jlong', 'gsize', 'J', 'Long')): pass
class SsizeType (PrimitiveMetaType('long', 'jlong', 'gssize', 'J', 'Long')): pass
class OffsetType (PrimitiveMetaType('long', 'jlong', 'goffset', 'J', 'Long')): pass
class Int64Type (PrimitiveMetaType('long', 'jlong', 'gint64', 'J', 'Long')): pass
class Uint64Type (PrimitiveMetaType('long', 'jlong', 'guint64', 'J', 'Long')): pass
class BooleanType(PrimitiveMetaType('boolean', 'jboolean', 'gboolean', 'Z', 'Boolean')): pass
class FloatType (PrimitiveMetaType('float', 'jfloat', 'gfloat', 'F', 'Float')): pass
class DoubleType (PrimitiveMetaType('double', 'jdouble', 'gdouble', 'D', 'Double')): pass
class GWeakRefType(PrimitiveMetaType('long', 'jlong', 'gpointer', 'J', 'Long')):
def transform_to_jni(self):
ref = self.c_name + '_ref'
return TypeTransform([
C.Decl('GWeakRef*', ref),
C.Decl(self.jni_type, self.jni_name),
],[
C.Assign(ref, C.Call('g_new', 'GWeakRef', '1')),
C.Call('g_weak_ref_init', ref, self.c_name),
C.Assign(self.jni_name, ref, cast=self.jni_type),
])
class VoidType(GirMetaType()):
gir_type = 'none'
java_type = 'void'
jni_type = 'void'
c_type = 'void'
java_signature = 'V'
default_value = None
def __init__(self, name=None, transfer_ownership=False, allow_none=False):
super(VoidType, self).__init__(None)
def transform_to_c(self):
raise AssertionError('VoidType.transform_to_c should not be reached')
def transform_to_jni(self):
raise AssertionError('VoidType.transform_to_jni should not be reached')
class GParamSpecType(GirMetaType()):
gir_type = None
java_type = None
jni_type = None
c_type = 'GParamSpec*'
java_signature = None
def transform_to_c(self):
return TypeTransform()
def transform_to_jni(self):
return TypeTransform()
class ObjectMetaType(GirMetaType):
jni_type = 'jobject'
default_value = 'NULL'
has_local_ref = True
def __new__(cls, gir_type, java_type, c_type, package):
new = super(ObjectMetaType, cls).__new__(cls)
if java_type:
new.java_full_class = package + '.' + java_type
new.java_class_path = new.java_full_class.replace('.', '/')
new.java_signature = 'L' + new.java_class_path + ';'
new.gir_type = gir_type
new.java_type = java_type
new.c_type = c_type
return new
class JObjectWrapperType(ObjectMetaType(
gir_type='gpointer',
java_type=None,
c_type='gpointer',
package=None,
)):
has_local_ref = False
def __init__(self, name, closure, transfer_ownership):
super(JObjectWrapperType, self).__init__(name, transfer_ownership, allow_none=False)
if closure is None:
closure = self
self.closure = closure
self.scope = getattr(closure, 'scope', None)
def transform_to_c(self):
create = None
if self.scope is None:
create = C.Helper('jobject_wrapper_create', self.closure.jni_name, 'FALSE')
else:
create = C.Helper('jobject_callback_wrapper_create', self.closure.jni_name,
'TRUE' if self.scope == 'async' else 'FALSE')
return TypeTransform([
C.Decl(self.c_type, self.c_name),
],[
C.Assign(self.c_name, create),
], self.scope == 'call' and [
C.Helper('jobject_callback_wrapper_destroy', self.c_name),
])
def transform_to_jni(self):
get = None
if self.transfer_ownership:
get = '((JObjectCallbackWrapper*) %s)->wrapper->obj;' % self.c_name
else:
get = '((JObjectWrapper*) %s)->obj;' % self.c_name
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
],[
C.Assign(self.jni_name, get),
], self.transfer_ownership and [
C.If('((JObjectCallbackWrapper *) %s)->should_destroy' % self.c_name,
C.Helper('jobject_callback_wrapper_destroy', self.c_name),
),
])
class EnumMetaType(ObjectMetaType):
has_local_ref = False
def __new__(cls, gir_type, c_type, prefix):
return super(EnumMetaType, cls).__new__(cls,
gir_type=gir_type,
java_type=gir_type,
c_type=c_type,
package=config.PACKAGE_ROOT + '.' + prefix,
)
def transform_to_c(self):
return TypeTransform([
C.Decl(self.c_type, self.c_name),
],[
C.Assign(self.c_name, C.Env.method(self.jni_name, ('ValueEnum', 'getValue')), cast=self.c_type),
C.ExceptionCheck.default(self),
])
def transform_to_jni(self):
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
],[
C.Assign(self.jni_name, C.Helper(self.gir_type + '_to_java_enum', 'env', self.c_name)),
])
class JDestroyType(ObjectMetaType(
gir_type=None,
java_type=None,
c_type='GDestroyNotify',
package=None,
)):
jni_type=None
def transform_to_c(self):
C.Helper('jobject_callback_wrapper_destroy')
return TypeTransform([
C.Decl(self.c_type, self.c_name),
],[
C.Assign(self.c_name, 'jobject_callback_wrapper_destroy'),
])
class CallbackMetaType(ObjectMetaType):
def __init__(self, name, transfer_ownership=False, allow_none=False, scope=None):
super(CallbackMetaType, self).__init__(name, transfer_ownership, allow_none)
assert scope in [None, 'call', 'async', 'notified']
self.scope = scope
def __new__(cls, gir_type, c_type, prefix):
return super(CallbackMetaType, cls).__new__(cls,
gir_type=gir_type,
java_type=gir_type,
c_type=c_type,
package=config.PACKAGE_ROOT + '.' + prefix,
)
def transform_to_c(self):
return TypeTransform([
C.Decl(self.c_type, self.c_name),
],[
C.Assign(self.c_name, 'G_CALLBACK(callback_' + self.gir_type + ')'),
])
class ClassCallbackMetaType(CallbackMetaType):
def __new__(cls, java_type, outer):
new = super(ClassCallbackMetaType, cls).__new__(cls,
gir_type=java_type,
c_type='GCallback',
prefix='ignored',
)
new.outer_java_type = outer.java_type
new.gir_type = outer.gir_type + '_' + java_type
new.java_full_class = outer.java_full_class + '.' + java_type
new.java_class_path = outer.java_class_path + '$' + java_type
new.java_signature = 'L' + new.java_class_path + ';'
return new
class GObjectMetaType(ObjectMetaType):
has_local_ref = False
def __new__(cls, gir_type, c_type, prefix):
return super(GObjectMetaType, cls).__new__(cls,
gir_type=gir_type,
java_type=gir_type,
c_type=c_type + '*',
package=config.PACKAGE_ROOT + '.' + prefix if prefix is not None else config.PACKAGE_ROOT,
)
def transform_to_c(self):
return TypeTransform([
C.Decl(self.c_type, self.c_name),
],[
C.Assign(self.c_name, C.Helper('jobject_to_gobject', 'env', self.jni_name)),
C.Call('g_object_ref', self.c_name) if self.transfer_ownership else [],
])
def transform_to_jni(self):
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
],[
C.Assign(self.jni_name, C.Helper('gobject_to_jobject',
'env', self.c_name, 'TRUE' if not self.transfer_ownership else 'FALSE'))
])
class StringMetaType(ObjectMetaType):
def __new__(cls, c_type):
return super(StringMetaType, cls).__new__(cls,
gir_type='utf8',
java_type='String',
c_type=c_type,
package='java.lang',
)
def transform_to_c(self):
if self.transfer_ownership:
tmp = self.c_name + '_tmp'
return TypeTransform([
C.Decl(self.c_type, self.c_name),
C.Decl(self.c_type, tmp),
],[
C.IfElse(ifs=[self.jni_name], bodies=[[
C.Assign(tmp, C.Env('GetStringUTFChars', self.jni_name, 'NULL'), cast=self.c_type),
C.ExceptionCheck.default(self),
C.Assign(self.c_name, C.Call('g_strdup', tmp)),
],[
C.Assign(self.c_name, 'NULL'),
]])
],[
C.If(self.jni_name, C.Env('ReleaseStringUTFChars', self.jni_name, tmp)),
])
else:
return TypeTransform([
C.Decl(self.c_type, self.c_name),
],[
C.IfElse(ifs=[self.jni_name], bodies=[[
C.Assign(self.c_name, C.Env('GetStringUTFChars', self.jni_name, 'NULL'), cast=self.c_type),
C.ExceptionCheck.default(self),
],[
C.Assign(self.c_name, 'NULL'),
]]),
],[
C.If(self.jni_name, C.Env('ReleaseStringUTFChars', self.jni_name, self.c_name)),
])
def transform_to_jni(self):
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
],[
C.IfElse(ifs=[self.c_name], bodies=[
C.Assign(self.jni_name, C.Env('NewStringUTF', self.c_name)),
C.Assign(self.jni_name, 'NULL'),
]),
], self.transfer_ownership and [
C.Call('g_free', self.c_name),
])
class GValueType(ObjectMetaType(
gir_type='GObject.Value',
java_type='Object',
c_type='GValue*',
package='java.lang',
)):
def transform_to_jni(self):
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
], [
C.Assign(self.jni_name, C.Helper('gvalue_to_jobject', 'env', self.c_name)),
], self.transfer_ownership and [
C.Call('g_value_reset', self.c_name),
])
class ContainerMetaType(ObjectMetaType):
is_container = True
def __init__(self, name, transfer_ownership, allow_none, *inner_values):
super(ContainerMetaType, self).__init__(name, transfer_ownership, allow_none)
self.inner_values = inner_values
self.java_type = '%s<%s>' % (self.java_type, ', '.join(typ.object_type for typ in self.inner_values))
self.java_full_class = '%s<%s>' % (self.java_full_class, ', '.join(typ.object_full_type for typ in self.inner_values))
def __new__(cls, gir_type, java_type, c_type):
return super(ContainerMetaType, cls).__new__(cls,
gir_type=gir_type,
java_type=java_type,
c_type=c_type,
package='java.util',
)
def transform_to_jni(self):
inner_transforms = [value.transform_to_jni() for value in self.inner_values]
return TypeTransform(
sum([transform.declarations for transform in inner_transforms], []),
sum([transform.conversion for transform in inner_transforms], []),
sum([transform.cleanup for transform in reversed(inner_transforms)], []),
)
class BitfieldMetaType(ContainerMetaType):
is_container = False
def __init__(self, name, transfer_ownership, allow_none):
super(BitfieldMetaType, self).__init__(name, transfer_ownership, allow_none,
self.inner_type(name + '_enum'))
(self.inner_value,) = self.inner_values
def __new__(cls, gir_type, c_type, prefix=None):
new = super(BitfieldMetaType, cls).__new__(cls,
gir_type=gir_type,
java_type='EnumSet',
c_type=c_type,
)
new.inner_type = EnumMetaType(gir_type, c_type, prefix)
return new
def transform_to_c(self):
it = self.jni_name + '_iterator'
enum = self.inner_value.jni_name
return TypeTransform([
C.Decl(self.c_type, self.c_name),
C.Decl('jobject', it),
C.Decl('jobject', enum),
],[
C.Assign(self.c_name, '0'),
C.Assign(it, C.Env.method(self.jni_name, ('Iterable', 'iterator'))),
C.While(C.Env.method(it, ('Iterator', 'hasNext')),
C.Assign(enum, C.Env.method(it, ('Iterator', 'next'))),
C.Assign(self.c_name, C.Env.method(enum, ('ValueEnum', 'getValue')), cast=self.c_type, op='|='),
C.ExceptionCheck.default(self),
)
])
def transform_to_jni(self):
enum = self.inner_value
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
C.Decl(enum.jni_type, enum.jni_name),
C.Decl(enum.c_type, enum.c_name),
],[
C.Assign(self.jni_name, C.Env.static_method(('EnumSet', 'noneOf'), C.Cache(enum.java_type))),
C.While(self.c_name,
C.Assign(enum.c_name, "{0} & -{0}".format(self.c_name)),
C.Assign(enum.jni_name, C.Helper(self.gir_type + '_to_java_enum', 'env', enum.c_name)),
C.Env.method(self.jni_name, ('EnumSet', 'add'), enum.jni_name),
C.Assign(self.c_name, "{0} & ({0} - 1)".format(self.c_name)),
)
])
class GListType(ContainerMetaType(
gir_type='GLib.List',
java_type='List',
c_type='GList*',
)):
def __init__(self, *args, **kwargs):
super(GListType, self).__init__(*args, **kwargs)
(self.inner_value,) = self.inner_values
def transform_to_jni(self):
it = self.c_name + '_it'
inner_transforms = super(GListType, self).transform_to_jni()
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
C.Decl(self.c_type, it),
C.Decl(self.inner_value.c_type, self.inner_value.c_name),
inner_transforms.declarations,
],[
C.Assign(self.jni_name, C.Env.new('ArrayList')),
C.Assign(it, self.c_name),
C.While(it,
C.Assign(self.inner_value.c_name, it + '->data'),
inner_transforms.conversion,
C.Env.method(self.jni_name, ('ArrayList', 'add'), self.inner_value.jni_name),
C.ExceptionCheck.default(self),
C.Env('DeleteLocalRef', self.inner_value.jni_name) if self.inner_value.has_local_ref else [],
inner_transforms.cleanup,
C.Assign(it, it + '->next'),
),
])
class GHashTableType(ContainerMetaType(
gir_type='GLib.HashTable',
java_type='HashMap',
c_type='GHashTable*',
)):
def __init__(self, *args, **kwargs):
super(GHashTableType, self).__init__(*args, **kwargs)
(self.inner_key, self.inner_value) = self.inner_values
def transform_to_jni(self):
it = self.c_name + '_it'
inner_transforms = super(GHashTableType, self).transform_to_jni()
return TypeTransform([
C.Decl(self.jni_type, self.jni_name),
C.Decl('GHashTableIter', it),
C.Decl(self.inner_key.c_type, self.inner_key.c_name),
C.Decl(self.inner_value.c_type, self.inner_value.c_name),
inner_transforms.declarations,
], [
C.Assign(self.jni_name, 'NULL'),
C.If(self.c_name, [
C.Assign(self.jni_name, C.Env.new('HashMap')),
C.ExceptionCheck.default(self),
C.Call('g_hash_table_iter_init', '&' + it, self.c_name),
C.While(C.Call('g_hash_table_iter_next', '&' + it, '(void **) &' + self.inner_key.c_name, '(void **) &' + self.inner_value.c_name),
inner_transforms.conversion,
C.Env.method(self.jni_name, ('HashMap', 'put'), self.inner_key.jni_name, self.inner_value.jni_name),
C.ExceptionCheck.default(self),
C.Env('DeleteLocalRef', self.inner_key.jni_name) if self.inner_value.has_local_ref else [],
C.Env('DeleteLocalRef', self.inner_value.jni_name) if self.inner_value.has_local_ref else [],
inner_transforms.cleanup,
),
]),
], self.transfer_ownership and [
C.If(self.c_name, [
C.Call('g_hash_table_unref', self.c_name),
]),
])
primitive_types = [
CharType,
UcharType,
Int8Type,
Uint8Type,
ShortType,
UshortType,
Int16Type,
Uint16Type,
IntType,
UintType,
Uint32Type,
Int32Type,
LongType,
UlongType,
LongPtrType,
SizeType,
SsizeType,
OffsetType,
Int64Type,
Uint64Type,
BooleanType,
FloatType,
DoubleType,
]
primitive_array_types = [PrimitiveArrayMetaType.from_primitive_type(t) for t in primitive_types]
standard_types = primitive_types + primitive_array_types + [
VoidType,
GValueType,
StringMetaType('gchar*'),
StringMetaType('const gchar*'),
GListType,
GHashTableType,
]
|
allmende/synnefo | refs/heads/develop | snf-cyclades-app/synnefo/volume/management/commands/snapshot-show.py | 6 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from snf_django.management.commands import SynnefoCommand, CommandError
from synnefo.management import common
from synnefo.plankton.backend import PlanktonBackend
from snf_django.management import utils
class Command(SynnefoCommand):
args = "<snapshot_id>"
help = "Display available information about a snapshot"
@common.convert_api_faults
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Please provide a snapshot ID")
snapshot_id = args[0]
try:
with PlanktonBackend(None) as backend:
snapshot = backend.get_snapshot(snapshot_id,
check_permissions=False)
except:
raise CommandError("An error occurred, verify that snapshot and "
"user ID are valid")
utils.pprint_table(out=self.stdout, table=[snapshot.values()],
headers=snapshot.keys(), vertical=True)
|
iismd17/scikit-learn | refs/heads/master | sklearn/cluster/hierarchical.py | 68 | """Hierarchical Agglomerative Clustering
These routines perform some hierarchical agglomerative clustering of some
input data.
Authors : Vincent Michel, Bertrand Thirion, Alexandre Gramfort,
Gael Varoquaux
License: BSD 3 clause
"""
from heapq import heapify, heappop, heappush, heappushpop
import warnings
import sys
import numpy as np
from scipy import sparse
from ..base import BaseEstimator, ClusterMixin
from ..externals.joblib import Memory
from ..externals import six
from ..metrics.pairwise import paired_distances, pairwise_distances
from ..utils import check_array
from ..utils.sparsetools import connected_components
from . import _hierarchical
from ._feature_agglomeration import AgglomerationTransform
from ..utils.fast_dict import IntFloatDict
if sys.version_info[0] > 2:
xrange = range
###############################################################################
# For non fully-connected graphs
def _fix_connectivity(X, connectivity, n_components=None,
affinity="euclidean"):
"""
Fixes the connectivity matrix
- copies it
- makes it symmetric
- converts it to LIL if necessary
- completes it if necessary
"""
n_samples = X.shape[0]
if (connectivity.shape[0] != n_samples or
connectivity.shape[1] != n_samples):
raise ValueError('Wrong shape for connectivity matrix: %s '
'when X is %s' % (connectivity.shape, X.shape))
# Make the connectivity matrix symmetric:
connectivity = connectivity + connectivity.T
# Convert connectivity matrix to LIL
if not sparse.isspmatrix_lil(connectivity):
if not sparse.isspmatrix(connectivity):
connectivity = sparse.lil_matrix(connectivity)
else:
connectivity = connectivity.tolil()
# Compute the number of nodes
n_components, labels = connected_components(connectivity)
if n_components > 1:
warnings.warn("the number of connected components of the "
"connectivity matrix is %d > 1. Completing it to avoid "
"stopping the tree early." % n_components,
stacklevel=2)
# XXX: Can we do without completing the matrix?
for i in xrange(n_components):
idx_i = np.where(labels == i)[0]
Xi = X[idx_i]
for j in xrange(i):
idx_j = np.where(labels == j)[0]
Xj = X[idx_j]
D = pairwise_distances(Xi, Xj, metric=affinity)
ii, jj = np.where(D == np.min(D))
ii = ii[0]
jj = jj[0]
connectivity[idx_i[ii], idx_j[jj]] = True
connectivity[idx_j[jj], idx_i[ii]] = True
return connectivity, n_components
###############################################################################
# Hierarchical tree building functions
def ward_tree(X, connectivity=None, n_components=None, n_clusters=None,
return_distance=False):
"""Ward clustering based on a Feature matrix.
Recursively merges the pair of clusters that minimally increases
within-cluster variance.
The inertia matrix uses a Heapq-based representation.
This is the structured version, that takes into account some topological
structure between samples.
Read more in the :ref:`User Guide <hierarchical_clustering>`.
Parameters
----------
X : array, shape (n_samples, n_features)
feature matrix representing n_samples samples to be clustered
connectivity : sparse matrix (optional).
connectivity matrix. Defines for each sample the neighboring samples
following a given structure of the data. The matrix is assumed to
be symmetric and only the upper triangular half is used.
Default is None, i.e, the Ward algorithm is unstructured.
n_components : int (optional)
Number of connected components. If None the number of connected
components is estimated from the connectivity matrix.
NOTE: This parameter is now directly determined directly
from the connectivity matrix and will be removed in 0.18
n_clusters : int (optional)
Stop early the construction of the tree at n_clusters. This is
useful to decrease computation time if the number of clusters is
not small compared to the number of samples. In this case, the
complete tree is not computed, thus the 'children' output is of
limited use, and the 'parents' output should rather be used.
This option is valid only when specifying a connectivity matrix.
return_distance: bool (optional)
If True, return the distance between the clusters.
Returns
-------
children : 2D array, shape (n_nodes-1, 2)
The children of each non-leaf node. Values less than `n_samples`
correspond to leaves of the tree which are the original samples.
A node `i` greater than or equal to `n_samples` is a non-leaf
node and has children `children_[i - n_samples]`. Alternatively
at the i-th iteration, children[i][0] and children[i][1]
are merged to form node `n_samples + i`
n_components : int
The number of connected components in the graph.
n_leaves : int
The number of leaves in the tree
parents : 1D array, shape (n_nodes, ) or None
The parent of each node. Only returned when a connectivity matrix
is specified, elsewhere 'None' is returned.
distances : 1D array, shape (n_nodes-1, )
Only returned if return_distance is set to True (for compatibility).
The distances between the centers of the nodes. `distances[i]`
corresponds to a weighted euclidean distance between
the nodes `children[i, 1]` and `children[i, 2]`. If the nodes refer to
leaves of the tree, then `distances[i]` is their unweighted euclidean
distance. Distances are updated in the following way
(from scipy.hierarchy.linkage):
The new entry :math:`d(u,v)` is computed as follows,
.. math::
d(u,v) = \\sqrt{\\frac{|v|+|s|}
{T}d(v,s)^2
+ \\frac{|v|+|t|}
{T}d(v,t)^2
- \\frac{|v|}
{T}d(s,t)^2}
where :math:`u` is the newly joined cluster consisting of
clusters :math:`s` and :math:`t`, :math:`v` is an unused
cluster in the forest, :math:`T=|v|+|s|+|t|`, and
:math:`|*|` is the cardinality of its argument. This is also
known as the incremental algorithm.
"""
X = np.asarray(X)
if X.ndim == 1:
X = np.reshape(X, (-1, 1))
n_samples, n_features = X.shape
if connectivity is None:
from scipy.cluster import hierarchy # imports PIL
if n_clusters is not None:
warnings.warn('Partial build of the tree is implemented '
'only for structured clustering (i.e. with '
'explicit connectivity). The algorithm '
'will build the full tree and only '
'retain the lower branches required '
'for the specified number of clusters',
stacklevel=2)
out = hierarchy.ward(X)
children_ = out[:, :2].astype(np.intp)
if return_distance:
distances = out[:, 2]
return children_, 1, n_samples, None, distances
else:
return children_, 1, n_samples, None
if n_components is not None:
warnings.warn(
"n_components is now directly calculated from the connectivity "
"matrix and will be removed in 0.18",
DeprecationWarning)
connectivity, n_components = _fix_connectivity(X, connectivity)
if n_clusters is None:
n_nodes = 2 * n_samples - 1
else:
if n_clusters > n_samples:
raise ValueError('Cannot provide more clusters than samples. '
'%i n_clusters was asked, and there are %i samples.'
% (n_clusters, n_samples))
n_nodes = 2 * n_samples - n_clusters
# create inertia matrix
coord_row = []
coord_col = []
A = []
for ind, row in enumerate(connectivity.rows):
A.append(row)
# We keep only the upper triangular for the moments
# Generator expressions are faster than arrays on the following
row = [i for i in row if i < ind]
coord_row.extend(len(row) * [ind, ])
coord_col.extend(row)
coord_row = np.array(coord_row, dtype=np.intp, order='C')
coord_col = np.array(coord_col, dtype=np.intp, order='C')
# build moments as a list
moments_1 = np.zeros(n_nodes, order='C')
moments_1[:n_samples] = 1
moments_2 = np.zeros((n_nodes, n_features), order='C')
moments_2[:n_samples] = X
inertia = np.empty(len(coord_row), dtype=np.float, order='C')
_hierarchical.compute_ward_dist(moments_1, moments_2, coord_row, coord_col,
inertia)
inertia = list(six.moves.zip(inertia, coord_row, coord_col))
heapify(inertia)
# prepare the main fields
parent = np.arange(n_nodes, dtype=np.intp)
used_node = np.ones(n_nodes, dtype=bool)
children = []
if return_distance:
distances = np.empty(n_nodes - n_samples)
not_visited = np.empty(n_nodes, dtype=np.int8, order='C')
# recursive merge loop
for k in range(n_samples, n_nodes):
# identify the merge
while True:
inert, i, j = heappop(inertia)
if used_node[i] and used_node[j]:
break
parent[i], parent[j] = k, k
children.append((i, j))
used_node[i] = used_node[j] = False
if return_distance: # store inertia value
distances[k - n_samples] = inert
# update the moments
moments_1[k] = moments_1[i] + moments_1[j]
moments_2[k] = moments_2[i] + moments_2[j]
# update the structure matrix A and the inertia matrix
coord_col = []
not_visited.fill(1)
not_visited[k] = 0
_hierarchical._get_parents(A[i], coord_col, parent, not_visited)
_hierarchical._get_parents(A[j], coord_col, parent, not_visited)
# List comprehension is faster than a for loop
[A[l].append(k) for l in coord_col]
A.append(coord_col)
coord_col = np.array(coord_col, dtype=np.intp, order='C')
coord_row = np.empty(coord_col.shape, dtype=np.intp, order='C')
coord_row.fill(k)
n_additions = len(coord_row)
ini = np.empty(n_additions, dtype=np.float, order='C')
_hierarchical.compute_ward_dist(moments_1, moments_2,
coord_row, coord_col, ini)
# List comprehension is faster than a for loop
[heappush(inertia, (ini[idx], k, coord_col[idx]))
for idx in range(n_additions)]
# Separate leaves in children (empty lists up to now)
n_leaves = n_samples
# sort children to get consistent output with unstructured version
children = [c[::-1] for c in children]
children = np.array(children) # return numpy array for efficient caching
if return_distance:
# 2 is scaling factor to compare w/ unstructured version
distances = np.sqrt(2. * distances)
return children, n_components, n_leaves, parent, distances
else:
return children, n_components, n_leaves, parent
# average and complete linkage
def linkage_tree(X, connectivity=None, n_components=None,
n_clusters=None, linkage='complete', affinity="euclidean",
return_distance=False):
"""Linkage agglomerative clustering based on a Feature matrix.
The inertia matrix uses a Heapq-based representation.
This is the structured version, that takes into account some topological
structure between samples.
Read more in the :ref:`User Guide <hierarchical_clustering>`.
Parameters
----------
X : array, shape (n_samples, n_features)
feature matrix representing n_samples samples to be clustered
connectivity : sparse matrix (optional).
connectivity matrix. Defines for each sample the neighboring samples
following a given structure of the data. The matrix is assumed to
be symmetric and only the upper triangular half is used.
Default is None, i.e, the Ward algorithm is unstructured.
n_components : int (optional)
Number of connected components. If None the number of connected
components is estimated from the connectivity matrix.
NOTE: This parameter is now directly determined directly
from the connectivity matrix and will be removed in 0.18
n_clusters : int (optional)
Stop early the construction of the tree at n_clusters. This is
useful to decrease computation time if the number of clusters is
not small compared to the number of samples. In this case, the
complete tree is not computed, thus the 'children' output is of
limited use, and the 'parents' output should rather be used.
This option is valid only when specifying a connectivity matrix.
linkage : {"average", "complete"}, optional, default: "complete"
Which linkage critera to use. The linkage criterion determines which
distance to use between sets of observation.
- average uses the average of the distances of each observation of
the two sets
- complete or maximum linkage uses the maximum distances between
all observations of the two sets.
affinity : string or callable, optional, default: "euclidean".
which metric to use. Can be "euclidean", "manhattan", or any
distance know to paired distance (see metric.pairwise)
return_distance : bool, default False
whether or not to return the distances between the clusters.
Returns
-------
children : 2D array, shape (n_nodes-1, 2)
The children of each non-leaf node. Values less than `n_samples`
correspond to leaves of the tree which are the original samples.
A node `i` greater than or equal to `n_samples` is a non-leaf
node and has children `children_[i - n_samples]`. Alternatively
at the i-th iteration, children[i][0] and children[i][1]
are merged to form node `n_samples + i`
n_components : int
The number of connected components in the graph.
n_leaves : int
The number of leaves in the tree.
parents : 1D array, shape (n_nodes, ) or None
The parent of each node. Only returned when a connectivity matrix
is specified, elsewhere 'None' is returned.
distances : ndarray, shape (n_nodes-1,)
Returned when return_distance is set to True.
distances[i] refers to the distance between children[i][0] and
children[i][1] when they are merged.
See also
--------
ward_tree : hierarchical clustering with ward linkage
"""
X = np.asarray(X)
if X.ndim == 1:
X = np.reshape(X, (-1, 1))
n_samples, n_features = X.shape
linkage_choices = {'complete': _hierarchical.max_merge,
'average': _hierarchical.average_merge}
try:
join_func = linkage_choices[linkage]
except KeyError:
raise ValueError(
'Unknown linkage option, linkage should be one '
'of %s, but %s was given' % (linkage_choices.keys(), linkage))
if connectivity is None:
from scipy.cluster import hierarchy # imports PIL
if n_clusters is not None:
warnings.warn('Partial build of the tree is implemented '
'only for structured clustering (i.e. with '
'explicit connectivity). The algorithm '
'will build the full tree and only '
'retain the lower branches required '
'for the specified number of clusters',
stacklevel=2)
if affinity == 'precomputed':
# for the linkage function of hierarchy to work on precomputed
# data, provide as first argument an ndarray of the shape returned
# by pdist: it is a flat array containing the upper triangular of
# the distance matrix.
i, j = np.triu_indices(X.shape[0], k=1)
X = X[i, j]
elif affinity == 'l2':
# Translate to something understood by scipy
affinity = 'euclidean'
elif affinity in ('l1', 'manhattan'):
affinity = 'cityblock'
elif callable(affinity):
X = affinity(X)
i, j = np.triu_indices(X.shape[0], k=1)
X = X[i, j]
out = hierarchy.linkage(X, method=linkage, metric=affinity)
children_ = out[:, :2].astype(np.int)
if return_distance:
distances = out[:, 2]
return children_, 1, n_samples, None, distances
return children_, 1, n_samples, None
if n_components is not None:
warnings.warn(
"n_components is now directly calculated from the connectivity "
"matrix and will be removed in 0.18",
DeprecationWarning)
connectivity, n_components = _fix_connectivity(X, connectivity)
connectivity = connectivity.tocoo()
# Put the diagonal to zero
diag_mask = (connectivity.row != connectivity.col)
connectivity.row = connectivity.row[diag_mask]
connectivity.col = connectivity.col[diag_mask]
connectivity.data = connectivity.data[diag_mask]
del diag_mask
if affinity == 'precomputed':
distances = X[connectivity.row, connectivity.col]
else:
# FIXME We compute all the distances, while we could have only computed
# the "interesting" distances
distances = paired_distances(X[connectivity.row],
X[connectivity.col],
metric=affinity)
connectivity.data = distances
if n_clusters is None:
n_nodes = 2 * n_samples - 1
else:
assert n_clusters <= n_samples
n_nodes = 2 * n_samples - n_clusters
if return_distance:
distances = np.empty(n_nodes - n_samples)
# create inertia heap and connection matrix
A = np.empty(n_nodes, dtype=object)
inertia = list()
# LIL seems to the best format to access the rows quickly,
# without the numpy overhead of slicing CSR indices and data.
connectivity = connectivity.tolil()
# We are storing the graph in a list of IntFloatDict
for ind, (data, row) in enumerate(zip(connectivity.data,
connectivity.rows)):
A[ind] = IntFloatDict(np.asarray(row, dtype=np.intp),
np.asarray(data, dtype=np.float64))
# We keep only the upper triangular for the heap
# Generator expressions are faster than arrays on the following
inertia.extend(_hierarchical.WeightedEdge(d, ind, r)
for r, d in zip(row, data) if r < ind)
del connectivity
heapify(inertia)
# prepare the main fields
parent = np.arange(n_nodes, dtype=np.intp)
used_node = np.ones(n_nodes, dtype=np.intp)
children = []
# recursive merge loop
for k in xrange(n_samples, n_nodes):
# identify the merge
while True:
edge = heappop(inertia)
if used_node[edge.a] and used_node[edge.b]:
break
i = edge.a
j = edge.b
if return_distance:
# store distances
distances[k - n_samples] = edge.weight
parent[i] = parent[j] = k
children.append((i, j))
# Keep track of the number of elements per cluster
n_i = used_node[i]
n_j = used_node[j]
used_node[k] = n_i + n_j
used_node[i] = used_node[j] = False
# update the structure matrix A and the inertia matrix
# a clever 'min', or 'max' operation between A[i] and A[j]
coord_col = join_func(A[i], A[j], used_node, n_i, n_j)
for l, d in coord_col:
A[l].append(k, d)
# Here we use the information from coord_col (containing the
# distances) to update the heap
heappush(inertia, _hierarchical.WeightedEdge(d, k, l))
A[k] = coord_col
# Clear A[i] and A[j] to save memory
A[i] = A[j] = 0
# Separate leaves in children (empty lists up to now)
n_leaves = n_samples
# # return numpy array for efficient caching
children = np.array(children)[:, ::-1]
if return_distance:
return children, n_components, n_leaves, parent, distances
return children, n_components, n_leaves, parent
# Matching names to tree-building strategies
def _complete_linkage(*args, **kwargs):
kwargs['linkage'] = 'complete'
return linkage_tree(*args, **kwargs)
def _average_linkage(*args, **kwargs):
kwargs['linkage'] = 'average'
return linkage_tree(*args, **kwargs)
_TREE_BUILDERS = dict(
ward=ward_tree,
complete=_complete_linkage,
average=_average_linkage)
###############################################################################
# Functions for cutting hierarchical clustering tree
def _hc_cut(n_clusters, children, n_leaves):
"""Function cutting the ward tree for a given number of clusters.
Parameters
----------
n_clusters : int or ndarray
The number of clusters to form.
children : list of pairs. Length of n_nodes
The children of each non-leaf node. Values less than `n_samples` refer
to leaves of the tree. A greater value `i` indicates a node with
children `children[i - n_samples]`.
n_leaves : int
Number of leaves of the tree.
Returns
-------
labels : array [n_samples]
cluster labels for each point
"""
if n_clusters > n_leaves:
raise ValueError('Cannot extract more clusters than samples: '
'%s clusters where given for a tree with %s leaves.'
% (n_clusters, n_leaves))
# In this function, we store nodes as a heap to avoid recomputing
# the max of the nodes: the first element is always the smallest
# We use negated indices as heaps work on smallest elements, and we
# are interested in largest elements
# children[-1] is the root of the tree
nodes = [-(max(children[-1]) + 1)]
for i in xrange(n_clusters - 1):
# As we have a heap, nodes[0] is the smallest element
these_children = children[-nodes[0] - n_leaves]
# Insert the 2 children and remove the largest node
heappush(nodes, -these_children[0])
heappushpop(nodes, -these_children[1])
label = np.zeros(n_leaves, dtype=np.intp)
for i, node in enumerate(nodes):
label[_hierarchical._hc_get_descendent(-node, children, n_leaves)] = i
return label
###############################################################################
class AgglomerativeClustering(BaseEstimator, ClusterMixin):
"""
Agglomerative Clustering
Recursively merges the pair of clusters that minimally increases
a given linkage distance.
Read more in the :ref:`User Guide <hierarchical_clustering>`.
Parameters
----------
n_clusters : int, default=2
The number of clusters to find.
connectivity : array-like or callable, optional
Connectivity matrix. Defines for each sample the neighboring
samples following a given structure of the data.
This can be a connectivity matrix itself or a callable that transforms
the data into a connectivity matrix, such as derived from
kneighbors_graph. Default is None, i.e, the
hierarchical clustering algorithm is unstructured.
affinity : string or callable, default: "euclidean"
Metric used to compute the linkage. Can be "euclidean", "l1", "l2",
"manhattan", "cosine", or 'precomputed'.
If linkage is "ward", only "euclidean" is accepted.
memory : Instance of joblib.Memory or string (optional)
Used to cache the output of the computation of the tree.
By default, no caching is done. If a string is given, it is the
path to the caching directory.
n_components : int (optional)
Number of connected components. If None the number of connected
components is estimated from the connectivity matrix.
NOTE: This parameter is now directly determined from the connectivity
matrix and will be removed in 0.18
compute_full_tree : bool or 'auto' (optional)
Stop early the construction of the tree at n_clusters. This is
useful to decrease computation time if the number of clusters is
not small compared to the number of samples. This option is
useful only when specifying a connectivity matrix. Note also that
when varying the number of clusters and using caching, it may
be advantageous to compute the full tree.
linkage : {"ward", "complete", "average"}, optional, default: "ward"
Which linkage criterion to use. The linkage criterion determines which
distance to use between sets of observation. The algorithm will merge
the pairs of cluster that minimize this criterion.
- ward minimizes the variance of the clusters being merged.
- average uses the average of the distances of each observation of
the two sets.
- complete or maximum linkage uses the maximum distances between
all observations of the two sets.
pooling_func : callable, default=np.mean
This combines the values of agglomerated features into a single
value, and should accept an array of shape [M, N] and the keyword
argument ``axis=1``, and reduce it to an array of size [M].
Attributes
----------
labels_ : array [n_samples]
cluster labels for each point
n_leaves_ : int
Number of leaves in the hierarchical tree.
n_components_ : int
The estimated number of connected components in the graph.
children_ : array-like, shape (n_nodes-1, 2)
The children of each non-leaf node. Values less than `n_samples`
correspond to leaves of the tree which are the original samples.
A node `i` greater than or equal to `n_samples` is a non-leaf
node and has children `children_[i - n_samples]`. Alternatively
at the i-th iteration, children[i][0] and children[i][1]
are merged to form node `n_samples + i`
"""
def __init__(self, n_clusters=2, affinity="euclidean",
memory=Memory(cachedir=None, verbose=0),
connectivity=None, n_components=None,
compute_full_tree='auto', linkage='ward',
pooling_func=np.mean):
self.n_clusters = n_clusters
self.memory = memory
self.n_components = n_components
self.connectivity = connectivity
self.compute_full_tree = compute_full_tree
self.linkage = linkage
self.affinity = affinity
self.pooling_func = pooling_func
def fit(self, X, y=None):
"""Fit the hierarchical clustering on the data
Parameters
----------
X : array-like, shape = [n_samples, n_features]
The samples a.k.a. observations.
Returns
-------
self
"""
X = check_array(X, ensure_min_samples=2)
memory = self.memory
if isinstance(memory, six.string_types):
memory = Memory(cachedir=memory, verbose=0)
if self.n_clusters <= 0:
raise ValueError("n_clusters should be an integer greater than 0."
" %s was provided." % str(self.n_clusters))
if self.linkage == "ward" and self.affinity != "euclidean":
raise ValueError("%s was provided as affinity. Ward can only "
"work with euclidean distances." %
(self.affinity, ))
if self.linkage not in _TREE_BUILDERS:
raise ValueError("Unknown linkage type %s."
"Valid options are %s" % (self.linkage,
_TREE_BUILDERS.keys()))
tree_builder = _TREE_BUILDERS[self.linkage]
connectivity = self.connectivity
if self.connectivity is not None:
if callable(self.connectivity):
connectivity = self.connectivity(X)
connectivity = check_array(
connectivity, accept_sparse=['csr', 'coo', 'lil'])
n_samples = len(X)
compute_full_tree = self.compute_full_tree
if self.connectivity is None:
compute_full_tree = True
if compute_full_tree == 'auto':
# Early stopping is likely to give a speed up only for
# a large number of clusters. The actual threshold
# implemented here is heuristic
compute_full_tree = self.n_clusters < max(100, .02 * n_samples)
n_clusters = self.n_clusters
if compute_full_tree:
n_clusters = None
# Construct the tree
kwargs = {}
if self.linkage != 'ward':
kwargs['linkage'] = self.linkage
kwargs['affinity'] = self.affinity
self.children_, self.n_components_, self.n_leaves_, parents = \
memory.cache(tree_builder)(X, connectivity,
n_components=self.n_components,
n_clusters=n_clusters,
**kwargs)
# Cut the tree
if compute_full_tree:
self.labels_ = _hc_cut(self.n_clusters, self.children_,
self.n_leaves_)
else:
labels = _hierarchical.hc_get_heads(parents, copy=False)
# copy to avoid holding a reference on the original array
labels = np.copy(labels[:n_samples])
# Reasign cluster numbers
self.labels_ = np.searchsorted(np.unique(labels), labels)
return self
class FeatureAgglomeration(AgglomerativeClustering, AgglomerationTransform):
"""Agglomerate features.
Similar to AgglomerativeClustering, but recursively merges features
instead of samples.
Read more in the :ref:`User Guide <hierarchical_clustering>`.
Parameters
----------
n_clusters : int, default 2
The number of clusters to find.
connectivity : array-like or callable, optional
Connectivity matrix. Defines for each feature the neighboring
features following a given structure of the data.
This can be a connectivity matrix itself or a callable that transforms
the data into a connectivity matrix, such as derived from
kneighbors_graph. Default is None, i.e, the
hierarchical clustering algorithm is unstructured.
affinity : string or callable, default "euclidean"
Metric used to compute the linkage. Can be "euclidean", "l1", "l2",
"manhattan", "cosine", or 'precomputed'.
If linkage is "ward", only "euclidean" is accepted.
memory : Instance of joblib.Memory or string, optional
Used to cache the output of the computation of the tree.
By default, no caching is done. If a string is given, it is the
path to the caching directory.
n_components : int (optional)
Number of connected components. If None the number of connected
components is estimated from the connectivity matrix.
NOTE: This parameter is now directly determined from the connectivity
matrix and will be removed in 0.18
compute_full_tree : bool or 'auto', optional, default "auto"
Stop early the construction of the tree at n_clusters. This is
useful to decrease computation time if the number of clusters is
not small compared to the number of features. This option is
useful only when specifying a connectivity matrix. Note also that
when varying the number of clusters and using caching, it may
be advantageous to compute the full tree.
linkage : {"ward", "complete", "average"}, optional, default "ward"
Which linkage criterion to use. The linkage criterion determines which
distance to use between sets of features. The algorithm will merge
the pairs of cluster that minimize this criterion.
- ward minimizes the variance of the clusters being merged.
- average uses the average of the distances of each feature of
the two sets.
- complete or maximum linkage uses the maximum distances between
all features of the two sets.
pooling_func : callable, default np.mean
This combines the values of agglomerated features into a single
value, and should accept an array of shape [M, N] and the keyword
argument `axis=1`, and reduce it to an array of size [M].
Attributes
----------
labels_ : array-like, (n_features,)
cluster labels for each feature.
n_leaves_ : int
Number of leaves in the hierarchical tree.
n_components_ : int
The estimated number of connected components in the graph.
children_ : array-like, shape (n_nodes-1, 2)
The children of each non-leaf node. Values less than `n_features`
correspond to leaves of the tree which are the original samples.
A node `i` greater than or equal to `n_features` is a non-leaf
node and has children `children_[i - n_features]`. Alternatively
at the i-th iteration, children[i][0] and children[i][1]
are merged to form node `n_features + i`
"""
def fit(self, X, y=None, **params):
"""Fit the hierarchical clustering on the data
Parameters
----------
X : array-like, shape = [n_samples, n_features]
The data
Returns
-------
self
"""
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'],
ensure_min_features=2)
return AgglomerativeClustering.fit(self, X.T, **params)
@property
def fit_predict(self):
raise AttributeError
|
benjaminabel/pelican-plugins | refs/heads/master | filetime_from_hg/__init__.py | 26 | from .filetime_from_hg import *
|
yuzhu/kubernetes | refs/heads/master | cluster/juju/charms/trusty/kubernetes/hooks/lib/registrator.py | 97 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import httplib
import json
import time
class Registrator:
def __init__(self):
self.ds ={
"creationTimestamp": "",
"kind": "Minion",
"name": "", # private_address
"metadata": {
"name": "", #private_address,
},
"spec": {
"externalID": "", #private_address
"capacity": {
"mem": "", # mem + ' K',
"cpu": "", # cpus
}
},
"status": {
"conditions": [],
"hostIP": "", #private_address
}
}
@property
def data(self):
''' Returns a data-structure for population to make a request. '''
return self.ds
def register(self, hostname, port, api_path):
''' Contact the API Server for a new registration '''
headers = {"Content-type": "application/json",
"Accept": "application/json"}
connection = httplib.HTTPConnection(hostname, port)
print 'CONN {}'.format(connection)
connection.request("POST", api_path, json.dumps(self.data), headers)
response = connection.getresponse()
body = response.read()
print(body)
result = json.loads(body)
print("Response status:%s reason:%s body:%s" % \
(response.status, response.reason, result))
return response, result
def update(self):
''' Contact the API Server to update a registration '''
# do a get on the API for the node
# repost to the API with any modified data
pass
def save(self):
''' Marshall the registration data '''
# TODO
pass
def command_succeeded(self, response, result):
''' Evaluate response data to determine if the command was successful '''
if response.status in [200, 201]:
print("Registered")
return True
elif response.status in [409,]:
print("Status Conflict")
# Suggested return a PUT instead of a POST with this response
# code, this predicates use of the UPDATE method
# TODO
elif response.status in (500,) and result.get(
'message', '').startswith('The requested resource does not exist'):
# There's something fishy in the kube api here (0.4 dev), first time we
# go to register a new minion, we always seem to get this error.
# https://github.com/GoogleCloudPlatform/kubernetes/issues/1995
time.sleep(1)
print("Retrying registration...")
raise ValueError("Registration returned 500, retry")
# return register_machine(apiserver, retry=True)
else:
print("Registration error")
# TODO - get request data
raise RuntimeError("Unable to register machine with")
|
cancro7/gem5 | refs/heads/master | src/mem/SerialLink.py | 20 | # Copyright (c) 2012-2013 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# Copyright (c) 2015 The University of Bologna
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
# Andreas Hansson
# Erfan Azarkhish
from m5.params import *
from MemObject import MemObject
# SerialLink is a simple variation of the Bridge class, with the ability to
# account for the latency of packet serialization.
class SerialLink(MemObject):
type = 'SerialLink'
cxx_header = "mem/serial_link.hh"
slave = SlavePort('Slave port')
master = MasterPort('Master port')
req_size = Param.Unsigned(16, "The number of requests to buffer")
resp_size = Param.Unsigned(16, "The number of responses to buffer")
delay = Param.Latency('0ns', "The latency of this serial_link")
ranges = VectorParam.AddrRange([AllMemory],
"Address ranges to pass through the serial_link")
# Bandwidth of the serial link is determined by the clock domain which the
# link belongs to and the number of lanes:
num_lanes = Param.Unsigned(1, "Number of parallel lanes inside the serial"
"link. (aka. lane width)")
link_speed = Param.UInt64(1, "Gb/s Speed of each parallel lane inside the"
"serial link. (aka. lane speed)")
|
jiazichenzhan/Server_Manage_Plugin | refs/heads/master | ironic-plugin-pike/ironic/tests/unit/drivers/test_base.py | 4 | # Copyright 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
from ironic.common import exception
from ironic.common import raid
from ironic.drivers import base as driver_base
from ironic.drivers.modules import fake
from ironic.tests import base
class FakeVendorInterface(driver_base.VendorInterface):
def get_properties(self):
pass
@driver_base.passthru(['POST'])
def noexception(self):
return "Fake"
@driver_base.driver_passthru(['POST'])
def driver_noexception(self):
return "Fake"
@driver_base.passthru(['POST'])
def ironicexception(self):
raise exception.IronicException("Fake!")
@driver_base.passthru(['POST'])
def normalexception(self):
raise Exception("Fake!")
@driver_base.passthru(['POST'], require_exclusive_lock=False)
def shared_task(self):
return "shared fake"
def validate(self, task, **kwargs):
pass
def driver_validate(self, **kwargs):
pass
class PassthruDecoratorTestCase(base.TestCase):
def setUp(self):
super(PassthruDecoratorTestCase, self).setUp()
self.fvi = FakeVendorInterface()
def test_passthru_noexception(self):
result = self.fvi.noexception()
self.assertEqual("Fake", result)
@mock.patch.object(driver_base, 'LOG', autospec=True)
def test_passthru_ironicexception(self, mock_log):
self.assertRaises(exception.IronicException,
self.fvi.ironicexception, mock.ANY)
mock_log.exception.assert_called_with(
mock.ANY, 'ironicexception')
@mock.patch.object(driver_base, 'LOG', autospec=True)
def test_passthru_nonironicexception(self, mock_log):
self.assertRaises(exception.VendorPassthruException,
self.fvi.normalexception, mock.ANY)
mock_log.exception.assert_called_with(
mock.ANY, 'normalexception')
def test_passthru_shared_task_metadata(self):
self.assertIn('require_exclusive_lock',
self.fvi.shared_task._vendor_metadata[1])
self.assertFalse(
self.fvi.shared_task._vendor_metadata[1]['require_exclusive_lock'])
def test_passthru_exclusive_task_metadata(self):
self.assertIn('require_exclusive_lock',
self.fvi.noexception._vendor_metadata[1])
self.assertTrue(
self.fvi.noexception._vendor_metadata[1]['require_exclusive_lock'])
def test_passthru_check_func_references(self):
inst1 = FakeVendorInterface()
inst2 = FakeVendorInterface()
self.assertNotEqual(inst1.vendor_routes['noexception']['func'],
inst2.vendor_routes['noexception']['func'])
self.assertNotEqual(inst1.driver_routes['driver_noexception']['func'],
inst2.driver_routes['driver_noexception']['func'])
class CleanStepDecoratorTestCase(base.TestCase):
def setUp(self):
super(CleanStepDecoratorTestCase, self).setUp()
method_mock = mock.MagicMock()
del method_mock._is_clean_step
del method_mock._clean_step_priority
del method_mock._clean_step_abortable
del method_mock._clean_step_argsinfo
self.method = method_mock
def test__validate_argsinfo(self):
# None, empty dict
driver_base._validate_argsinfo(None)
driver_base._validate_argsinfo({})
# Only description specified
driver_base._validate_argsinfo({'arg1': {'description': 'desc1'}})
# Multiple args
driver_base._validate_argsinfo({'arg1': {'description': 'desc1',
'required': True},
'arg2': {'description': 'desc2'}})
def test__validate_argsinfo_not_dict(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'argsinfo.+dictionary',
driver_base._validate_argsinfo, 'not-a-dict')
def test__validate_argsinfo_arg_not_dict(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'Argument.+dictionary',
driver_base._validate_argsinfo,
{'arg1': 'not-a-dict'})
def test__validate_argsinfo_arg_empty_dict(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'description',
driver_base._validate_argsinfo,
{'arg1': {}})
def test__validate_argsinfo_arg_missing_description(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'description',
driver_base._validate_argsinfo,
{'arg1': {'required': True}})
def test__validate_argsinfo_arg_description_invalid(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'string',
driver_base._validate_argsinfo,
{'arg1': {'description': True}})
def test__validate_argsinfo_arg_required_invalid(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'Boolean',
driver_base._validate_argsinfo,
{'arg1': {'description': 'desc1',
'required': 'maybe'}})
def test__validate_argsinfo_arg_unknown_key(self):
self.assertRaisesRegex(exception.InvalidParameterValue,
'invalid',
driver_base._validate_argsinfo,
{'arg1': {'description': 'desc1',
'unknown': 'bad'}})
def test_clean_step_priority_only(self):
d = driver_base.clean_step(priority=10)
d(self.method)
self.assertTrue(self.method._is_clean_step)
self.assertEqual(10, self.method._clean_step_priority)
self.assertFalse(self.method._clean_step_abortable)
self.assertIsNone(self.method._clean_step_argsinfo)
def test_clean_step_all_args(self):
argsinfo = {'arg1': {'description': 'desc1',
'required': True}}
d = driver_base.clean_step(priority=0, abortable=True,
argsinfo=argsinfo)
d(self.method)
self.assertTrue(self.method._is_clean_step)
self.assertEqual(0, self.method._clean_step_priority)
self.assertTrue(self.method._clean_step_abortable)
self.assertEqual(argsinfo, self.method._clean_step_argsinfo)
def test_clean_step_bad_priority(self):
d = driver_base.clean_step(priority='hi')
self.assertRaisesRegex(exception.InvalidParameterValue, 'priority',
d, self.method)
self.assertTrue(self.method._is_clean_step)
self.assertFalse(hasattr(self.method, '_clean_step_priority'))
self.assertFalse(hasattr(self.method, '_clean_step_abortable'))
self.assertFalse(hasattr(self.method, '_clean_step_argsinfo'))
def test_clean_step_bad_abortable(self):
d = driver_base.clean_step(priority=0, abortable='blue')
self.assertRaisesRegex(exception.InvalidParameterValue, 'abortable',
d, self.method)
self.assertTrue(self.method._is_clean_step)
self.assertEqual(0, self.method._clean_step_priority)
self.assertFalse(hasattr(self.method, '_clean_step_abortable'))
self.assertFalse(hasattr(self.method, '_clean_step_argsinfo'))
@mock.patch.object(driver_base, '_validate_argsinfo', spec_set=True,
autospec=True)
def test_clean_step_bad_argsinfo(self, mock_valid):
mock_valid.side_effect = exception.InvalidParameterValue('bad')
d = driver_base.clean_step(priority=0, argsinfo=100)
self.assertRaises(exception.InvalidParameterValue, d, self.method)
self.assertTrue(self.method._is_clean_step)
self.assertEqual(0, self.method._clean_step_priority)
self.assertFalse(self.method._clean_step_abortable)
self.assertFalse(hasattr(self.method, '_clean_step_argsinfo'))
class CleanStepTestCase(base.TestCase):
def test_get_and_execute_clean_steps(self):
# Create a fake Driver class, create some clean steps, make sure
# they are listed correctly, and attempt to execute one of them
method_mock = mock.MagicMock(spec_set=[])
method_args_mock = mock.MagicMock(spec_set=[])
task_mock = mock.MagicMock(spec_set=[])
class BaseTestClass(driver_base.BaseInterface):
def get_properties(self):
return {}
def validate(self, task):
pass
class TestClass(BaseTestClass):
interface_type = 'test'
@driver_base.clean_step(priority=0)
def manual_method(self, task):
pass
@driver_base.clean_step(priority=10, abortable=True)
def automated_method(self, task):
method_mock(task)
def not_clean_method(self, task):
pass
class TestClass2(BaseTestClass):
interface_type = 'test2'
@driver_base.clean_step(priority=0)
def manual_method2(self, task):
pass
@driver_base.clean_step(priority=20, abortable=True)
def automated_method2(self, task):
method_mock(task)
def not_clean_method2(self, task):
pass
class TestClass3(BaseTestClass):
interface_type = 'test3'
@driver_base.clean_step(priority=0, abortable=True, argsinfo={
'arg1': {'description': 'desc1',
'required': True}})
def manual_method3(self, task, **kwargs):
method_args_mock(task, **kwargs)
@driver_base.clean_step(priority=15, argsinfo={
'arg10': {'description': 'desc10'}})
def automated_method3(self, task, **kwargs):
pass
def not_clean_method3(self, task):
pass
obj = TestClass()
obj2 = TestClass2()
obj3 = TestClass3()
self.assertEqual(2, len(obj.get_clean_steps(task_mock)))
# Ensure the steps look correct
self.assertEqual(10, obj.get_clean_steps(task_mock)[0]['priority'])
self.assertTrue(obj.get_clean_steps(task_mock)[0]['abortable'])
self.assertEqual('test', obj.get_clean_steps(
task_mock)[0]['interface'])
self.assertEqual('automated_method', obj.get_clean_steps(
task_mock)[0]['step'])
self.assertEqual(0, obj.get_clean_steps(task_mock)[1]['priority'])
self.assertFalse(obj.get_clean_steps(task_mock)[1]['abortable'])
self.assertEqual('test', obj.get_clean_steps(
task_mock)[1]['interface'])
self.assertEqual('manual_method', obj.get_clean_steps(
task_mock)[1]['step'])
# Ensure the second obj get different clean steps
self.assertEqual(2, len(obj2.get_clean_steps(task_mock)))
# Ensure the steps look correct
self.assertEqual(20, obj2.get_clean_steps(task_mock)[0]['priority'])
self.assertTrue(obj2.get_clean_steps(task_mock)[0]['abortable'])
self.assertEqual('test2', obj2.get_clean_steps(
task_mock)[0]['interface'])
self.assertEqual('automated_method2', obj2.get_clean_steps(
task_mock)[0]['step'])
self.assertEqual(0, obj2.get_clean_steps(task_mock)[1]['priority'])
self.assertFalse(obj2.get_clean_steps(task_mock)[1]['abortable'])
self.assertEqual('test2', obj2.get_clean_steps(
task_mock)[1]['interface'])
self.assertEqual('manual_method2', obj2.get_clean_steps(
task_mock)[1]['step'])
self.assertIsNone(obj2.get_clean_steps(task_mock)[0]['argsinfo'])
# Ensure the third obj has different clean steps
self.assertEqual(2, len(obj3.get_clean_steps(task_mock)))
self.assertEqual(15, obj3.get_clean_steps(task_mock)[0]['priority'])
self.assertFalse(obj3.get_clean_steps(task_mock)[0]['abortable'])
self.assertEqual('test3', obj3.get_clean_steps(
task_mock)[0]['interface'])
self.assertEqual('automated_method3', obj3.get_clean_steps(
task_mock)[0]['step'])
self.assertEqual({'arg10': {'description': 'desc10'}},
obj3.get_clean_steps(task_mock)[0]['argsinfo'])
self.assertEqual(0, obj3.get_clean_steps(task_mock)[1]['priority'])
self.assertTrue(obj3.get_clean_steps(task_mock)[1]['abortable'])
self.assertEqual(obj3.interface_type, obj3.get_clean_steps(
task_mock)[1]['interface'])
self.assertEqual('manual_method3', obj3.get_clean_steps(
task_mock)[1]['step'])
self.assertEqual({'arg1': {'description': 'desc1', 'required': True}},
obj3.get_clean_steps(task_mock)[1]['argsinfo'])
# Ensure we can execute the function.
obj.execute_clean_step(task_mock, obj.get_clean_steps(task_mock)[0])
method_mock.assert_called_once_with(task_mock)
args = {'arg1': 'val1'}
clean_step = {'interface': 'test3', 'step': 'manual_method3',
'args': args}
obj3.execute_clean_step(task_mock, clean_step)
method_args_mock.assert_called_once_with(task_mock, **args)
class MyRAIDInterface(driver_base.RAIDInterface):
def create_configuration(self, task):
pass
def delete_configuration(self, task):
pass
class RAIDInterfaceTestCase(base.TestCase):
@mock.patch.object(driver_base.RAIDInterface, 'validate_raid_config',
autospec=True)
def test_validate(self, validate_raid_config_mock):
raid_interface = MyRAIDInterface()
node_mock = mock.MagicMock(target_raid_config='some_raid_config')
task_mock = mock.MagicMock(node=node_mock)
raid_interface.validate(task_mock)
validate_raid_config_mock.assert_called_once_with(
raid_interface, task_mock, 'some_raid_config')
@mock.patch.object(driver_base.RAIDInterface, 'validate_raid_config',
autospec=True)
def test_validate_no_target_raid_config(self, validate_raid_config_mock):
raid_interface = MyRAIDInterface()
node_mock = mock.MagicMock(target_raid_config={})
task_mock = mock.MagicMock(node=node_mock)
raid_interface.validate(task_mock)
self.assertFalse(validate_raid_config_mock.called)
@mock.patch.object(raid, 'validate_configuration', autospec=True)
def test_validate_raid_config(self, common_validate_mock):
with open(driver_base.RAID_CONFIG_SCHEMA, 'r') as raid_schema_fobj:
raid_schema = json.load(raid_schema_fobj)
raid_interface = MyRAIDInterface()
raid_interface.validate_raid_config('task', 'some_raid_config')
common_validate_mock.assert_called_once_with(
'some_raid_config', raid_schema)
@mock.patch.object(raid, 'get_logical_disk_properties',
autospec=True)
def test_get_logical_disk_properties(self, get_properties_mock):
with open(driver_base.RAID_CONFIG_SCHEMA, 'r') as raid_schema_fobj:
raid_schema = json.load(raid_schema_fobj)
raid_interface = MyRAIDInterface()
raid_interface.get_logical_disk_properties()
get_properties_mock.assert_called_once_with(raid_schema)
class TestDeployInterface(base.TestCase):
@mock.patch.object(driver_base.LOG, 'warning', autospec=True)
def test_warning_on_heartbeat(self, mock_log):
# NOTE(dtantsur): FakeDeploy does not override heartbeat
deploy = fake.FakeDeploy()
deploy.heartbeat(mock.Mock(node=mock.Mock(uuid='uuid',
driver='driver')),
'url')
self.assertTrue(mock_log.called)
class TestManagementInterface(base.TestCase):
def test_inject_nmi_default_impl(self):
management = fake.FakeManagement()
task_mock = mock.MagicMock(spec_set=['node'])
self.assertRaises(exception.UnsupportedDriverExtension,
management.inject_nmi, task_mock)
class TestBaseDriver(base.TestCase):
def test_class_variables_immutable(self):
# Test to make sure that our *_interfaces variables in the class don't
# get modified by a child class
self.assertEqual(('deploy', 'power'),
driver_base.BaseDriver.core_interfaces)
self.assertEqual(('boot', 'console', 'inspect', 'management', 'raid'),
driver_base.BaseDriver.standard_interfaces)
# Ensure that instantiating an instance of a derived class does not
# change our variables.
driver_base.BareDriver()
self.assertEqual(('deploy', 'power'),
driver_base.BaseDriver.core_interfaces)
self.assertEqual(('boot', 'console', 'inspect', 'management', 'raid'),
driver_base.BaseDriver.standard_interfaces)
class TestBareDriver(base.TestCase):
def test_class_variables_immutable(self):
# Test to make sure that our *_interfaces variables in the class don't
# get modified by a child class
self.assertEqual(('deploy', 'power', 'network'),
driver_base.BareDriver.core_interfaces)
self.assertEqual(
('boot', 'console', 'inspect', 'management', 'raid', 'storage'),
driver_base.BareDriver.standard_interfaces
)
|
sixuanwang/SAMSaaS | refs/heads/master | wirecloud-develop/src/build/lib.linux-x86_64-2.7/wirecloud/proxy/views.py | 2 | # -*- coding: utf-8 -*-
# Copyright (c) 2008-2015 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from six.moves.http_cookies import SimpleCookie
import re
import requests
import socket
from six.moves.urllib.parse import urlparse
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse
try:
from django.http import StreamingHttpResponse
except: # Django 1.4
from django.http import HttpResponse as StreamingHttpResponse
from django.utils.encoding import iri_to_uri
from django.utils.translation import ugettext as _
from wirecloud.commons.utils.http import build_error_response, get_current_domain
from wirecloud.platform.plugins import get_request_proxy_processors, get_response_proxy_processors
from wirecloud.proxy.utils import is_valid_response_header, ValidationError
class Proxy():
http_headerRE = re.compile('^http_')
protocolRE = re.compile('HTTP/(.*)')
blacklisted_http_headers = [
'http_host',
]
# set the timeout to 60 seconds
socket.setdefaulttimeout(60)
def do_request(self, request, url, method):
url = iri_to_uri(url)
request_data = {
"method": method,
"url": url,
"data": None,
"headers": {},
"cookies": SimpleCookie(),
"user": request.user,
"original-request": request,
}
# Request creation
proto, host, cgi, param, query = urlparse(url)[:5]
# Extract headers from META
if 'HTTP_TRANSFER_ENCODING' in request.META:
return build_error_response(request, 500, "Wirecloud doesn't support requests using Transfer-Encodings")
for header in request.META.items():
header_name = header[0].lower()
if header_name == 'content_type' and header[1]:
request_data['headers']["content-type"] = header[1]
elif header_name == 'content_length' and header[1]:
# Only take into account request body if the request has a
# Content-Length header (we don't support chunked requests)
request_data['data'] = request
request_data['headers']['content-length'] = header[1]
request_data['data'].len = int(header[1])
elif header_name == 'cookie' or header_name == 'http_cookie':
cookie_parser = SimpleCookie(str(header[1]))
del cookie_parser[str(settings.SESSION_COOKIE_NAME)]
if str(settings.CSRF_COOKIE_NAME) in cookie_parser:
del cookie_parser[str(settings.CSRF_COOKIE_NAME)]
request_data['cookies'].update(cookie_parser)
elif self.http_headerRE.match(header_name) and not header_name in self.blacklisted_http_headers:
fixed_name = header_name.replace("http_", "", 1).replace('_', '-')
request_data['headers'][fixed_name] = header[1]
# Build the Via header
protocolVersion = self.protocolRE.match(request.META['SERVER_PROTOCOL'])
if protocolVersion is not None:
protocolVersion = protocolVersion.group(1)
else:
protocolVersion = '1.1'
via_header = "%s %s (Wirecloud-python-Proxy/1.1)" % (protocolVersion, get_current_domain(request))
if 'via' in request_data['headers']:
request_data['headers']['via'] += ', ' + via_header
else:
request_data['headers']['via'] = via_header
# XFF headers
if 'x-forwarded-for' in request_data['headers']:
request_data['headers']['x-forwarded-for'] += ', ' + request.META['REMOTE_ADDR']
else:
request_data['headers']['x-forwarded-for'] = request.META['REMOTE_ADDR']
request_data['headers']['x-forwarded-host'] = host
if 'x-forwarded-server' in request_data['headers']:
del request_data['headers']['x-forwarded-server']
# Pass proxy processors to the new request
try:
for processor in get_request_proxy_processors():
processor.process_request(request_data)
except ValidationError as e:
return e.get_response(request)
# Cookies
cookie_header_content = ', '.join([cookie_parser[key].OutputString() for key in request_data['cookies']])
if cookie_header_content != '':
request_data['headers']['Cookie'] = cookie_header_content
# Open the request
try:
res = requests.request(request_data['method'], request_data['url'], headers=request_data['headers'], data=request_data['data'], stream=True, verify=getattr(settings, 'WIRECLOUD_HTTPS_VERIFY', True))
except requests.exceptions.HTTPError:
return HttpResponse(status=504)
except requests.exceptions.ConnectionError:
return HttpResponse(status=502)
# Build a Django response
response = StreamingHttpResponse(res.raw.stream(4096, decode_content=False), status=res.status_code)
if 'reason_phrase' in response: # pragma: no cover
# Currently only django 1.6+ supports custom reason phrases
response.reason_phrase = res.reason_phrase
# Add all the headers received from the response
for header in res.headers:
header_lower = header.lower()
if header_lower == 'set-cookie':
for cookie in res.cookies:
response.set_cookie(cookie.name, value=cookie.value, expires=cookie.expires, path=cookie.path)
elif header_lower == 'via':
via_header = via_header + ', ' + res.headers[header]
elif is_valid_response_header(header_lower):
response[header] = res.headers[header]
# Pass proxy processors to the response
for processor in get_response_proxy_processors():
response = processor.process_response(request_data, response)
response['Via'] = via_header
return response
WIRECLOUD_PROXY = Proxy()
def proxy_request(request, protocol, domain, path):
# TODO improve proxy security
try:
if request.get_host() != urlparse(request.META["HTTP_REFERER"])[1]:
raise Exception()
if settings.SESSION_COOKIE_NAME not in request.COOKIES:
raise Exception()
except:
return build_error_response(request, 403, _("Invalid request"))
url = protocol + '://' + domain + path
if len(request.GET) > 0:
url += '?' + request.GET.urlencode()
try:
response = WIRECLOUD_PROXY.do_request(request, url, request.method.upper())
except Exception as e:
msg = _("Error processing proxy request: %s") % unicode(e)
return build_error_response(request, 500, msg)
# Process cookies
for key in response.cookies:
cookie = response.cookies[key]
if cookie['path'] == '':
cookie['path'] = reverse('wirecloud|proxy', kwargs={'protocol': protocol, 'domain': domain, 'path': path})
else:
cookie['path'] = reverse('wirecloud|proxy', kwargs={'protocol': protocol, 'domain': domain, 'path': cookie['path']})
return response
|
delete/spymanager | refs/heads/master | src/__init__.py | 1 | import pymongo
from .myexceptions import AlreadyExistsOnDatabaseException
TELEGRAM_URL = 'https://api.telegram.org/bot{token}/{method}?chat_id={chat_id}&text={text}'
class Manager():
""" Manage objects adding, removing and getting from database """
def __init__(self, collection):
self.collection = collection
def add(self, username):
try:
self.collection.insert_one(self.newObj)
except pymongo.errors.DuplicateKeyError:
raise AlreadyExistsOnDatabaseException
def remove(self, username):
self.collection.find_one_and_delete({"username": username})
def all(self):
cursor = self.collection.find()
return [document for document in cursor]
def get(self, username):
pass
|
sysadminmatmoz/OCB | refs/heads/9.0 | addons/hw_posbox_homepage/controllers/main.py | 28 | # -*- coding: utf-8 -*-
import logging
import os
import time
import werkzeug
import subprocess
from os import listdir
import openerp
from openerp import http
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
index_style = """
<style>
body {
width: 480px;
margin: 60px auto;
font-family: sans-serif;
text-align: justify;
color: #6B6B6B;
}
.text-red {
color: #FF0000;
}
</style>
"""
index_template = """
<!DOCTYPE HTML>
<html>
<head>
<title>Odoo's PosBox</title>
""" + index_style + """
</head>
<body>
<h1>Your PosBox is up and running</h1>
<p>
The PosBox is an hardware adapter that allows you to use
receipt printers and barcode scanners with Odoo's Point of
Sale, <b>version 8.0 or later</b>. You can start an <a href='https://www.odoo.com/start'>online free trial</a>,
or <a href='https://www.odoo.com/start?download'>download and install</a> it yourself.
</p>
<p>
For more information on how to setup the Point of Sale with
the PosBox, please refer to
<a href='https://www.odoo.com/documentation/user/point_of_sale/posbox/index.html'>the manual</a>.
</p>
<p>
To see the status of the connected hardware, please refer
to the <a href='/hw_proxy/status'>hardware status page</a>.
</p>
<p>
Wi-Fi can be configured by visiting the <a href='/wifi'>Wi-Fi configuration page</a>.
</p>
<p>
The PosBox software installed on this posbox is <b>version 14</b>,
the posbox version number is independent from Odoo. You can upgrade
the software on the <a href='/hw_proxy/upgrade/'>upgrade page</a>.
</p>
<p>For any other question, please contact the Odoo support at <a href='mailto:support@odoo.com'>support@odoo.com</a>
</p>
</body>
</html>
"""
class PosboxHomepage(openerp.addons.web.controllers.main.Home):
@http.route('/', type='http', auth='none', website=True)
def index(self):
#return request.render('hw_posbox_homepage.index',mimetype='text/html')
return index_template
@http.route('/wifi', type='http', auth='none', website=True)
def wifi(self):
wifi_template = """
<!DOCTYPE HTML>
<html>
<head>
<title>Wifi configuration</title>
""" + index_style + """
</head>
<body>
<h1>Configure wifi</h1>
<p>
Here you can configure how the posbox should connect to wireless networks.
Currently only Open and WPA networks are supported. When enabling the persistent checkbox,
the chosen network will be saved and the posbox will attempt to connect to it every time it boots.
</p>
<form action='/wifi_connect' method='POST'>
<table>
<tr>
<td>
ESSID:
</td>
<td>
<select name="essid">
"""
try:
f = open('/tmp/scanned_networks.txt', 'r')
for line in f:
line = line.rstrip()
line = werkzeug.utils.escape(line)
wifi_template += '<option value="' + line + '">' + line + '</option>\n'
f.close()
except IOError:
_logger.warning("No /tmp/scanned_networks.txt")
wifi_template += """
</select>
</td>
</tr>
<tr>
<td>
Password:
</td>
<td>
<input type="password" name="password" placeholder="optional"/>
</td>
</tr>
<tr>
<td>
Persistent:
</td>
<td>
<input type="checkbox" name="persistent"/>
</td>
</tr>
<tr>
<td/>
<td>
<input type="submit" value="connect"/>
</td>
</tr>
</table>
</form>
<p>
You can clear the persistent configuration by clicking below:
<form action='/wifi_clear'>
<input type="submit" value="Clear persistent network configuration"/>
</form>
</p>
<form>
</body>
</html>
"""
return wifi_template
@http.route('/wifi_connect', type='http', auth='none', cors='*')
def connect_to_wifi(self, essid, password, persistent=False):
if persistent:
persistent = "1"
else:
persistent = ""
subprocess.call(['/home/pi/odoo/addons/point_of_sale/tools/posbox/configuration/connect_to_wifi.sh', essid, password, persistent])
return "connecting to " + essid
@http.route('/wifi_clear', type='http', auth='none', cors='*')
def clear_wifi_configuration(self):
os.system('/home/pi/odoo/addons/point_of_sale/tools/posbox/configuration/clear_wifi_configuration.sh')
return "configuration cleared"
@http.route('/remote_connect', type='http', auth='none', cors='*')
def remote_connect(self):
ngrok_template = """
<!DOCTYPE HTML>
<html>
<head>
<title>Remote debugging</title>
<script src="http://code.jquery.com/jquery-1.11.0.min.js"></script>
<script>
$(function () {
var upgrading = false;
$('#enable_debug').click(function () {
var auth_token = $('#auth_token').val();
if (auth_token == "") {
alert('Please provide an authentication token.');
} else {
$.ajax({
url: '/enable_ngrok',
data: {
'auth_token': auth_token
}
}).always(function (response) {
if (response === 'already running') {
alert('Remote debugging already activated.');
} else {
$('#auth_token').attr('disabled','disabled');
$('#enable_debug').html('Enabled remote debugging');
$('#enable_debug').removeAttr('href', '')
$('#enable_debug').off('click');
}
});
}
});
});
</script>
""" + index_style + """
<style>
#enable_debug {
padding: 10px;
background: rgb(121, 197, 107);
color: white;
border-radius: 3px;
text-align: center;
margin: 30px;
text-decoration: none;
display: inline-block;
}
.centering{
text-align: center;
}
</style>
</head>
<body>
<h1>Remote debugging</h1>
<p class='text-red'>
This allows someone to gain remote access to your Posbox, and
thus your entire local network. Only enable this for someone
you trust.
</p>
<div class='centering'>
<input type="text" id="auth_token" size="42" placeholder="Authentication Token"/> <br/>
<a id="enable_debug" href="#">Enable remote debugging</a>
</div>
</body>
</html>
"""
return ngrok_template
@http.route('/enable_ngrok', type='http', auth='none', cors='*')
def enable_ngrok(self, auth_token):
if subprocess.call(['pgrep', 'ngrok']) == 1:
subprocess.Popen(['ngrok', 'tcp', '-authtoken', auth_token, '-log', '/tmp/ngrok.log', '22'])
return 'starting with ' + auth_token
else:
return 'already running'
|
galbrads/Gear_Manager | refs/heads/master | Tests/tests_util.py | 1 | import unittest
from PySide import QtCore
import Util
class IsNumTests(unittest.TestCase):
def test_IsFloat(self):
self.assertTrue(Util.is_number(5.2), 'Is Float')
def test_IsInt(self):
self.assertTrue(Util.is_number(5), 'Is Int')
def test_IsChar(self):
self.assertFalse(Util.is_number('C'), 'Is Char')
class RemoveDupTests(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(RemoveDupTests, self).__init__(*args, **kwargs)
self.dups = ['1', '3' '2', '3', '4', '5', '3', '5']
self.nodups = ['1', '2', '3', '4', '5']
def test_IsList(self):
self.assertRaises(ValueError, Util.remove_duplicates, 'not a list')
def test_LengthTest(self):
self.assertEqual(len(Util.remove_duplicates(self.dups)), len(set(self.dups)))
def test_Unique(self):
self.assertTrue(all([Util.remove_duplicates(self.dups).count(n) == 1 for n in Util.remove_duplicates(self.dups)]))
class DateTests(unittest.TestCase):
def testDates(self):
y = 2001; m = 1; d = 1
self.assertEqual(Util.convert_date('DB2Qt', '2001-01-01'), QtCore.QDate(y, m, d))
self.assertEqual(Util.convert_date('DB2Disp', '2001-01-01'), '1/1/2001')
self.assertEqual(Util.convert_date('Disp2DB', '1/1/2001'), '2001-01-01')
self.assertEqual(Util.convert_date('Disp2Qt', '1/1/2001'), QtCore.QDate(y, m, d))
self.assertEqual(Util.convert_date('Qt2Disp', QtCore.QDate(y, m, d)), '1/1/2001')
self.assertEqual(Util.convert_date('Qt2DB', QtCore.QDate(y, m, d)), '2001-01-01')
|
tbabej/astropy | refs/heads/master | astropy/coordinates/tests/test_sites.py | 2 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
from ...tests.helper import pytest, assert_quantity_allclose, remote_data, quantity_allclose
from ... import units as u
from .. import Longitude, Latitude, EarthLocation
from ..sites import get_builtin_sites, get_downloaded_sites, SiteRegistry
def test_builtin_sites():
reg = get_builtin_sites()
greenwich = reg['greenwich']
lon, lat, el = greenwich.to_geodetic()
assert_quantity_allclose(lon, Longitude('0:0:0', unit=u.deg),
atol=10*u.arcsec)
assert_quantity_allclose(lat, Latitude('51:28:40', unit=u.deg),
atol=1*u.arcsec)
assert_quantity_allclose(el, 46*u.m, atol=1*u.m)
names = reg.names
assert 'greenwich' in names
assert 'example_site' in names
with pytest.raises(KeyError) as exc:
reg['nonexistent site']
assert exc.value.args[0] == "Site 'nonexistent site' not in database. Use the 'names' attribute to see available sites."
@remote_data(source='astropy')
def test_online_stes():
reg = get_downloaded_sites()
keck = reg['keck']
lon, lat, el = keck.to_geodetic()
assert_quantity_allclose(lon, -Longitude('155:28.7', unit=u.deg),
atol=0.001*u.deg)
assert_quantity_allclose(lat, Latitude('19:49.7', unit=u.deg),
atol=0.001*u.deg)
assert_quantity_allclose(el, 4160*u.m, atol=1*u.m)
names = reg.names
assert 'keck' in names
assert 'ctio' in names
with pytest.raises(KeyError) as exc:
reg['nonexistent site']
assert exc.value.args[0] == "Site 'nonexistent site' not in database. Use the 'names' attribute to see available sites."
with pytest.raises(KeyError) as exc:
reg['kec']
assert exc.value.args[0] == "Site 'kec' not in database. Use the 'names' attribute to see available sites. Did you mean one of: 'keck'?'"
@remote_data(source='astropy')
# this will *try* the online so we have to make it remote_data, even though it
# could fall back on the non-remote version
def test_EarthLocation_basic():
greenwichel = EarthLocation.of_site('greenwich')
lon, lat, el = greenwichel.to_geodetic()
assert_quantity_allclose(lon, Longitude('0:0:0', unit=u.deg),
atol=10*u.arcsec)
assert_quantity_allclose(lat, Latitude('51:28:40', unit=u.deg),
atol=1*u.arcsec)
assert_quantity_allclose(el, 46*u.m, atol=1*u.m)
names = EarthLocation.get_site_names()
assert 'greenwich' in names
assert 'example_site' in names
with pytest.raises(KeyError) as exc:
EarthLocation.of_site('nonexistent site')
assert exc.value.args[0] == "Site 'nonexistent site' not in database. Use EarthLocation.get_site_names to see available sites."
def test_EarthLocation_state_offline():
EarthLocation._site_registry = None
EarthLocation._get_site_registry(force_builtin=True)
assert EarthLocation._site_registry is not None
oldreg = EarthLocation._site_registry
newreg = EarthLocation._get_site_registry()
assert oldreg is newreg
newreg = EarthLocation._get_site_registry(force_builtin=True)
assert oldreg is not newreg
@remote_data(source='astropy')
def test_EarthLocation_state_online():
EarthLocation._site_registry = None
EarthLocation._get_site_registry(force_download=True)
assert EarthLocation._site_registry is not None
oldreg = EarthLocation._site_registry
newreg = EarthLocation._get_site_registry()
assert oldreg is newreg
newreg = EarthLocation._get_site_registry(force_download=True)
assert oldreg is not newreg
def test_registry():
reg = SiteRegistry()
assert len(reg.names) == 0
names = ['sitea', 'site A']
loc = EarthLocation.from_geodetic(lat=1*u.deg, lon=2*u.deg,height=3*u.km)
reg.add_site(names, loc)
assert len(reg.names) == 2
loc1 = reg['SIteA']
assert loc1 is loc
loc2 = reg['sIte a']
assert loc2 is loc
def test_non_EarthLocation():
"""
A regression test for a typo bug pointed out at the bottom of
https://github.com/astropy/astropy/pull/4042
"""
class EarthLocation2(EarthLocation):
pass
# This lets keeps us from needing to do remote_data
# note that this does *not* mess up the registry for EarthLocation because
# registry is cached on a per-class basis
EarthLocation2._get_site_registry(force_builtin=True)
el2 = EarthLocation2.of_site('greenwich')
assert type(el2) is EarthLocation2
assert el2.info.name == 'Royal Observatory Greenwich'
def check_builtin_matches_remote(download_url=True):
"""
This function checks that the builtin sites registry is consistent with the
remote registry (or a registry at some other location).
Note that current this is *not* run by the testing suite (because it
doesn't start with "test", and is instead meant to be used as a check
before merging changes in astropy-data)
"""
builtin_registry = EarthLocation._get_site_registry(force_builtin=True)
dl_registry = EarthLocation._get_site_registry(force_download=download_url)
in_dl = {}
matches = {}
for name in builtin_registry.names:
in_dl[name] = name in dl_registry
if in_dl[name]:
matches[name] = quantity_allclose(builtin_registry[name], dl_registry[name])
else:
matches[name] = False
if not all(matches.values()):
# this makes sure we actually see which don't match
print("In builtin registry but not in download:")
for name in in_dl:
if not in_dl[name]:
print(' ', name)
print("In both but not the same value:")
for name in matches:
if not matches[name] and in_dl[name]:
print(' ', name, 'builtin:', builtin_registry[name], 'download:', dl_registry[name])
assert False, "Builtin and download registry aren't consistent - failures printed to stdout"
|
tylerjereddy/scipy | refs/heads/master | scipy/special/_precompute/wright_bessel.py | 12 | """Precompute coefficients of several series expansions
of Wright's generalized Bessel function Phi(a, b, x).
See https://dlmf.nist.gov/10.46.E1 with rho=a, beta=b, z=x.
"""
from argparse import ArgumentParser, RawTextHelpFormatter
import numpy as np
from scipy.integrate import quad
from scipy.optimize import minimize_scalar, curve_fit
from time import time
try:
import sympy # type: ignore[import]
from sympy import EulerGamma, Rational, S, Sum, \
factorial, gamma, gammasimp, pi, polygamma, symbols, zeta
from sympy.polys.polyfuncs import horner # type: ignore[import]
except ImportError:
pass
def series_small_a():
"""Tylor series expansion of Phi(a, b, x) in a=0 up to order 5.
"""
order = 5
a, b, x, k = symbols("a b x k")
A = [] # terms with a
X = [] # terms with x
B = [] # terms with b (polygammas)
# Phi(a, b, x) = exp(x)/gamma(b) * sum(A[i] * X[i] * B[i])
expression = Sum(x**k/factorial(k)/gamma(a*k+b), (k, 0, S.Infinity))
expression = gamma(b)/sympy.exp(x) * expression
# nth term of taylor series in a=0: a^n/n! * (d^n Phi(a, b, x)/da^n at a=0)
for n in range(0, order+1):
term = expression.diff(a, n).subs(a, 0).simplify().doit()
# set the whole bracket involving polygammas to 1
x_part = (term.subs(polygamma(0, b), 1)
.replace(polygamma, lambda *args: 0))
# sign convetion: x part always positive
x_part *= (-1)**n
A.append(a**n/factorial(n))
X.append(horner(x_part))
B.append(horner((term/x_part).simplify()))
s = "Tylor series expansion of Phi(a, b, x) in a=0 up to order 5.\n"
s += "Phi(a, b, x) = exp(x)/gamma(b) * sum(A[i] * X[i] * B[i], i=0..5)\n"
for name, c in zip(['A', 'X', 'B'], [A, X, B]):
for i in range(len(c)):
s += f"\n{name}[{i}] = " + str(c[i])
return s
# expansion of digamma
def dg_series(z, n):
"""Symbolic expansion of digamma(z) in z=0 to order n.
See https://dlmf.nist.gov/5.7.E4 and with https://dlmf.nist.gov/5.5.E2
"""
k = symbols("k")
return -1/z - EulerGamma + \
sympy.summation((-1)**k * zeta(k) * z**(k-1), (k, 2, n+1))
def pg_series(k, z, n):
"""Symbolic expansion of polygamma(k, z) in z=0 to order n."""
return sympy.diff(dg_series(z, n+k), z, k)
def series_small_a_small_b():
"""Tylor series expansion of Phi(a, b, x) in a=0 and b=0 up to order 5.
Be aware of cancellation of poles in b=0 of digamma(b)/Gamma(b) and
polygamma functions.
digamma(b)/Gamma(b) = -1 - 2*M_EG*b + O(b^2)
digamma(b)^2/Gamma(b) = 1/b + 3*M_EG + b*(-5/12*PI^2+7/2*M_EG^2) + O(b^2)
polygamma(1, b)/Gamma(b) = 1/b + M_EG + b*(1/12*PI^2 + 1/2*M_EG^2) + O(b^2)
and so on.
"""
order = 5
a, b, x, k = symbols("a b x k")
M_PI, M_EG, M_Z3 = symbols("M_PI M_EG M_Z3")
c_subs = {pi: M_PI, EulerGamma: M_EG, zeta(3): M_Z3}
A = [] # terms with a
X = [] # terms with x
B = [] # terms with b (polygammas expanded)
C = [] # terms that generate B
# Phi(a, b, x) = exp(x) * sum(A[i] * X[i] * B[i])
# B[0] = 1
# B[k] = sum(C[k] * b**k/k!, k=0..)
# Note: C[k] can be obtained from a series expansion of 1/gamma(b).
expression = gamma(b)/sympy.exp(x) * \
Sum(x**k/factorial(k)/gamma(a*k+b), (k, 0, S.Infinity))
# nth term of taylor series in a=0: a^n/n! * (d^n Phi(a, b, x)/da^n at a=0)
for n in range(0, order+1):
term = expression.diff(a, n).subs(a, 0).simplify().doit()
# set the whole bracket involving polygammas to 1
x_part = (term.subs(polygamma(0, b), 1)
.replace(polygamma, lambda *args: 0))
# sign convetion: x part always positive
x_part *= (-1)**n
# expansion of polygamma part with 1/gamma(b)
pg_part = term/x_part/gamma(b)
if n >= 1:
# Note: highest term is digamma^n
pg_part = pg_part.replace(polygamma,
lambda k, x: pg_series(k, x, order+1+n))
pg_part = (pg_part.series(b, 0, n=order+1-n)
.removeO()
.subs(polygamma(2, 1), -2*zeta(3))
.simplify()
)
A.append(a**n/factorial(n))
X.append(horner(x_part))
B.append(pg_part)
# Calculate C and put in the k!
C = sympy.Poly(B[1].subs(c_subs), b).coeffs()
C.reverse()
for i in range(len(C)):
C[i] = (C[i] * factorial(i)).simplify()
s = "Tylor series expansion of Phi(a, b, x) in a=0 and b=0 up to order 5."
s += "\nPhi(a, b, x) = exp(x) * sum(A[i] * X[i] * B[i], i=0..5)\n"
s += "B[0] = 1\n"
s += "B[i] = sum(C[k+i-1] * b**k/k!, k=0..)\n"
s += "\nM_PI = pi"
s += "\nM_EG = EulerGamma"
s += "\nM_Z3 = zeta(3)"
for name, c in zip(['A', 'X'], [A, X]):
for i in range(len(c)):
s += f"\n{name}[{i}] = "
s += str(c[i])
# For C, do also compute the values numerically
for i in range(len(C)):
s += f"\n# C[{i}] = "
s += str(C[i])
s += f"\nC[{i}] = "
s += str(C[i].subs({M_EG: EulerGamma, M_PI: pi, M_Z3: zeta(3)})
.evalf(17))
# Does B have the assumed structure?
s += "\n\nTest if B[i] does have the assumed structure."
s += "\nC[i] are derived from B[1] allone."
s += "\nTest B[2] == C[1] + b*C[2] + b^2/2*C[3] + b^3/6*C[4] + .."
test = sum([b**k/factorial(k) * C[k+1] for k in range(order-1)])
test = (test - B[2].subs(c_subs)).simplify()
s += f"\ntest successful = {test==S(0)}"
s += "\nTest B[3] == C[2] + b*C[3] + b^2/2*C[4] + .."
test = sum([b**k/factorial(k) * C[k+2] for k in range(order-2)])
test = (test - B[3].subs(c_subs)).simplify()
s += f"\ntest successful = {test==S(0)}"
return s
def asymptotic_series():
"""Asymptotic expansion for large x.
Phi(a, b, x) ~ Z^(1/2-b) * exp((1+a)/a * Z) * sum_k (-1)^k * C_k / Z^k
Z = (a*x)^(1/(1+a))
Wright (1935) lists the coefficients C_0 and C_1 (he calls them a_0 and
a_1). With slightly different notation, Paris (2017) lists coefficients
c_k up to order k=3.
Paris (2017) uses ZP = (1+a)/a * Z (ZP = Z of Paris) and
C_k = C_0 * (-a/(1+a))^k * c_k
"""
order = 8
class g(sympy.Function):
"""Helper function g according to Wright (1935)
g(n, rho, v) = (1 + (rho+2)/3 * v + (rho+2)*(rho+3)/(2*3) * v^2 + ...)
Note: Wright (1935) uses square root of above definition.
"""
nargs = 3
@classmethod
def eval(cls, n, rho, v):
if not n >= 0:
raise ValueError("must have n >= 0")
elif n == 0:
return 1
else:
return g(n-1, rho, v) \
+ gammasimp(gamma(rho+2+n)/gamma(rho+2)) \
/ gammasimp(gamma(3+n)/gamma(3))*v**n
class coef_C(sympy.Function):
"""Calculate coefficients C_m for integer m.
C_m is the coefficient of v^(2*m) in the Taylor expansion in v=0 of
Gamma(m+1/2)/(2*pi) * (2/(rho+1))^(m+1/2) * (1-v)^(-b)
* g(rho, v)^(-m-1/2)
"""
nargs = 3
@classmethod
def eval(cls, m, rho, beta):
if not m >= 0:
raise ValueError("must have m >= 0")
v = symbols("v")
expression = (1-v)**(-beta) * g(2*m, rho, v)**(-m-Rational(1, 2))
res = expression.diff(v, 2*m).subs(v, 0) / factorial(2*m)
res = res * (gamma(m + Rational(1, 2)) / (2*pi)
* (2/(rho+1))**(m + Rational(1, 2)))
return res
# in order to have nice ordering/sorting of expressions, we set a = xa.
xa, b, xap1 = symbols("xa b xap1")
C0 = coef_C(0, xa, b)
# a1 = a(1, rho, beta)
s = "Asymptotic expansion for large x\n"
s += "Phi(a, b, x) = Z**(1/2-b) * exp((1+a)/a * Z) \n"
s += " * sum((-1)**k * C[k]/Z**k, k=0..6)\n\n"
s += "Z = pow(a * x, 1/(1+a))\n"
s += "A[k] = pow(a, k)\n"
s += "B[k] = pow(b, k)\n"
s += "Ap1[k] = pow(1+a, k)\n\n"
s += "C[0] = 1./sqrt(2. * M_PI * Ap1[1])\n"
for i in range(1, order+1):
expr = (coef_C(i, xa, b) / (C0/(1+xa)**i)).simplify()
factor = [x.denominator() for x in sympy.Poly(expr).coeffs()]
factor = sympy.lcm(factor)
expr = (expr * factor).simplify().collect(b, sympy.factor)
expr = expr.xreplace({xa+1: xap1})
s += f"C[{i}] = C[0] / ({factor} * Ap1[{i}])\n"
s += f"C[{i}] *= {str(expr)}\n\n"
import re
re_a = re.compile(r'xa\*\*(\d+)')
s = re_a.sub(r'A[\1]', s)
re_b = re.compile(r'b\*\*(\d+)')
s = re_b.sub(r'B[\1]', s)
s = s.replace('xap1', 'Ap1[1]')
s = s.replace('xa', 'a')
# max integer = 2^31-1 = 2,147,483,647. Solution: Put a point after 10
# or more digits.
re_digits = re.compile(r'(\d{10,})')
s = re_digits.sub(r'\1.', s)
return s
def optimal_epsilon_integral():
"""Fit optimal choice of epsilon for integral representation.
The integrand of
int_0^pi P(eps, a, b, x, phi) * dphi
can exhibit oscillatory behaviour. It stems from the cosine of P and can be
minimized by minimizing the arc length of the argument
f(phi) = eps * sin(phi) - x * eps^(-a) * sin(a * phi) + (1 - b) * phi
of cos(f(phi)).
We minimize the arc length in eps for a grid of values (a, b, x) and fit a
parametric function to it.
"""
def fp(eps, a, b, x, phi):
"""Derivative of f w.r.t. phi."""
eps_a = np.power(1. * eps, -a)
return eps * np.cos(phi) - a * x * eps_a * np.cos(a * phi) + 1 - b
def arclength(eps, a, b, x, epsrel=1e-2, limit=100):
"""Compute Arc length of f.
Note that the arg length of a function f fro t0 to t1 is given by
int_t0^t1 sqrt(1 + f'(t)^2) dt
"""
return quad(lambda phi: np.sqrt(1 + fp(eps, a, b, x, phi)**2),
0, np.pi,
epsrel=epsrel, limit=100)[0]
# grid of minimal arc length values
data_a = [1e-3, 0.1, 0.5, 0.9, 1, 2, 4, 5, 6, 8]
data_b = [0, 1, 4, 7, 10]
data_x = [1, 1.5, 2, 4, 10, 20, 50, 100, 200, 500, 1e3, 5e3, 1e4]
data_a, data_b, data_x = np.meshgrid(data_a, data_b, data_x)
data_a, data_b, data_x = (data_a.flatten(), data_b.flatten(),
data_x.flatten())
best_eps = []
for i in range(data_x.size):
best_eps.append(
minimize_scalar(lambda eps: arclength(eps, data_a[i], data_b[i],
data_x[i]),
bounds=(1e-3, 1000),
method='Bounded', options={'xatol': 1e-3}).x
)
best_eps = np.array(best_eps)
# pandas would be nice, but here a dictionary is enough
df = {'a': data_a,
'b': data_b,
'x': data_x,
'eps': best_eps,
}
def func(data, A0, A1, A2, A3, A4, A5):
"""Compute parametric function to fit."""
a = data['a']
b = data['b']
x = data['x']
return (A0 * b * np.exp(-0.5 * a)
+ np.exp(A1 + 1 / (1 + a) * np.log(x) - A2 * np.exp(-A3 * a)
+ A4 / (1 + np.exp(A5 * a))))
func_params = list(curve_fit(func, df, df['eps'], method='trf')[0])
s = "Fit optimal eps for integrand P via minimal arc length\n"
s += "with parametric function:\n"
s += "optimal_eps = (A0 * b * exp(-a/2) + exp(A1 + 1 / (1 + a) * log(x)\n"
s += " - A2 * exp(-A3 * a) + A4 / (1 + exp(A5 * a)))\n\n"
s += "Fitted parameters A0 to A5 are:\n"
s += ', '.join(['{:.5g}'.format(x) for x in func_params])
return s
def main():
t0 = time()
parser = ArgumentParser(description=__doc__,
formatter_class=RawTextHelpFormatter)
parser.add_argument('action', type=int, choices=[1, 2, 3, 4],
help='chose what expansion to precompute\n'
'1 : Series for small a\n'
'2 : Series for small a and small b\n'
'3 : Asymptotic series for large x\n'
' This may take some time (>4h).\n'
'4 : Fit optimal eps for integral representation.'
)
args = parser.parse_args()
switch = {1: lambda: print(series_small_a()),
2: lambda: print(series_small_a_small_b()),
3: lambda: print(asymptotic_series()),
4: lambda: print(optimal_epsilon_integral())
}
switch.get(args.action, lambda: print("Invalid input."))()
print("\n{:.1f} minutes elapsed.\n".format((time() - t0)/60))
if __name__ == '__main__':
main()
|
bittorrent/bigcouch | refs/heads/master | couchjs/scons/scons-local-2.0.1/SCons/Tool/as.py | 61 | """SCons.Tool.as
Tool-specific initialization for as, the generic Posix assembler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/as.py 5134 2010/08/16 23:02:40 bdeegan"
import SCons.Defaults
import SCons.Tool
import SCons.Util
assemblers = ['as']
ASSuffixes = ['.s', '.asm', '.ASM']
ASPPSuffixes = ['.spp', '.SPP', '.sx']
if SCons.Util.case_sensitive_suffixes('.s', '.S'):
ASPPSuffixes.extend(['.S'])
else:
ASSuffixes.extend(['.S'])
def generate(env):
"""Add Builders and construction variables for as to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
shared_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
shared_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
env['AS'] = env.Detect(assemblers) or 'as'
env['ASFLAGS'] = SCons.Util.CLVar('')
env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
def exists(env):
return env.Detect(assemblers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
mhbu50/erpnext | refs/heads/develop | erpnext/agriculture/doctype/detected_disease/detected_disease.py | 23 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DetectedDisease(Document):
pass
|
emgee/formal | refs/heads/master | formal/test/test_form.py | 1 | from twisted.trial import unittest
from nevow import testutil
from nevow import context
import formal
from formal.validation import FieldRequiredError
class TestForm(unittest.TestCase):
def test_fieldName(self):
form = formal.Form()
form.addField('foo', formal.String())
self.assertRaises(ValueError, form.addField, 'spaceAtTheEnd ', formal.String())
self.assertRaises(ValueError, form.addField, 'got a space in it', formal.String())
def test_process(self):
form = formal.Form()
request = testutil.FakeRequest(args={'foo': ['bar', ]})
ctx = context.RequestContext(tag=request)
form.addField('foo', formal.String())
form.addAction(lambda *a, **kw: None)
d = form.process(ctx)
d.addCallback(self.failUnlessEqual, None)
def done(_):
self.failUnlessEqual(form.data['foo'], 'bar')
d.addCallback(done)
return d
def test_processError(self):
form = formal.Form()
request = testutil.FakeRequest()
ctx = context.RequestContext(tag=request)
form.addField('foo', formal.String(required=True))
form.addAction(lambda *a, **kw: None)
d = form.process(ctx)
def done(errors):
self.failIfEqual(errors, None)
self.failUnless(isinstance(errors.getFieldError('foo'), FieldRequiredError))
d.addCallbacks(done)
return d
|
2014cdbg7/2014cdbg7 | refs/heads/master | wsgi/static/Brython2.1.0-20140419-113919/Lib/textwrap.py | 745 | """Text wrapping and filling.
"""
# Copyright (C) 1999-2001 Gregory P. Ward.
# Copyright (C) 2002, 2003 Python Software Foundation.
# Written by Greg Ward <gward@python.net>
import re
__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent', 'indent']
# Hardcode the recognized whitespace characters to the US-ASCII
# whitespace characters. The main reason for doing this is that in
# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales
# that character winds up in string.whitespace. Respecting
# string.whitespace in those cases would 1) make textwrap treat 0xa0 the
# same as any other whitespace char, which is clearly wrong (it's a
# *non-breaking* space), 2) possibly cause problems with Unicode,
# since 0xa0 is not in range(128).
_whitespace = '\t\n\x0b\x0c\r '
class TextWrapper:
"""
Object for wrapping/filling text. The public interface consists of
the wrap() and fill() methods; the other methods are just there for
subclasses to override in order to tweak the default behaviour.
If you want to completely replace the main wrapping algorithm,
you'll probably have to override _wrap_chunks().
Several instance attributes control various aspects of wrapping:
width (default: 70)
the maximum width of wrapped lines (unless break_long_words
is false)
initial_indent (default: "")
string that will be prepended to the first line of wrapped
output. Counts towards the line's width.
subsequent_indent (default: "")
string that will be prepended to all lines save the first
of wrapped output; also counts towards each line's width.
expand_tabs (default: true)
Expand tabs in input text to spaces before further processing.
Each tab will become 0 .. 'tabsize' spaces, depending on its position
in its line. If false, each tab is treated as a single character.
tabsize (default: 8)
Expand tabs in input text to 0 .. 'tabsize' spaces, unless
'expand_tabs' is false.
replace_whitespace (default: true)
Replace all whitespace characters in the input text by spaces
after tab expansion. Note that if expand_tabs is false and
replace_whitespace is true, every tab will be converted to a
single space!
fix_sentence_endings (default: false)
Ensure that sentence-ending punctuation is always followed
by two spaces. Off by default because the algorithm is
(unavoidably) imperfect.
break_long_words (default: true)
Break words longer than 'width'. If false, those words will not
be broken, and some lines might be longer than 'width'.
break_on_hyphens (default: true)
Allow breaking hyphenated words. If true, wrapping will occur
preferably on whitespaces and right after hyphens part of
compound words.
drop_whitespace (default: true)
Drop leading and trailing whitespace from lines.
"""
unicode_whitespace_trans = {}
uspace = ord(' ')
for x in _whitespace:
unicode_whitespace_trans[ord(x)] = uspace
# This funky little regex is just the trick for splitting
# text up into word-wrappable chunks. E.g.
# "Hello there -- you goof-ball, use the -b option!"
# splits into
# Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
# (after stripping out empty strings).
wordsep_re = re.compile(
r'(\s+|' # any whitespace
r'[^\s\w]*\w+[^0-9\W]-(?=\w+[^0-9\W])|' # hyphenated words
r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
# This less funky little regex just split on recognized spaces. E.g.
# "Hello there -- you goof-ball, use the -b option!"
# splits into
# Hello/ /there/ /--/ /you/ /goof-ball,/ /use/ /the/ /-b/ /option!/
wordsep_simple_re = re.compile(r'(\s+)')
# XXX this is not locale- or charset-aware -- string.lowercase
# is US-ASCII only (and therefore English-only)
sentence_end_re = re.compile(r'[a-z]' # lowercase letter
r'[\.\!\?]' # sentence-ending punct.
r'[\"\']?' # optional end-of-quote
r'\Z') # end of chunk
def __init__(self,
width=70,
initial_indent="",
subsequent_indent="",
expand_tabs=True,
replace_whitespace=True,
fix_sentence_endings=False,
break_long_words=True,
drop_whitespace=True,
break_on_hyphens=True,
tabsize=8):
self.width = width
self.initial_indent = initial_indent
self.subsequent_indent = subsequent_indent
self.expand_tabs = expand_tabs
self.replace_whitespace = replace_whitespace
self.fix_sentence_endings = fix_sentence_endings
self.break_long_words = break_long_words
self.drop_whitespace = drop_whitespace
self.break_on_hyphens = break_on_hyphens
self.tabsize = tabsize
# -- Private methods -----------------------------------------------
# (possibly useful for subclasses to override)
def _munge_whitespace(self, text):
"""_munge_whitespace(text : string) -> string
Munge whitespace in text: expand tabs and convert all other
whitespace characters to spaces. Eg. " foo\tbar\n\nbaz"
becomes " foo bar baz".
"""
if self.expand_tabs:
text = text.expandtabs(self.tabsize)
if self.replace_whitespace:
text = text.translate(self.unicode_whitespace_trans)
return text
def _split(self, text):
"""_split(text : string) -> [string]
Split the text to wrap into indivisible chunks. Chunks are
not quite the same as words; see _wrap_chunks() for full
details. As an example, the text
Look, goof-ball -- use the -b option!
breaks into the following chunks:
'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
'use', ' ', 'the', ' ', '-b', ' ', 'option!'
if break_on_hyphens is True, or in:
'Look,', ' ', 'goof-ball', ' ', '--', ' ',
'use', ' ', 'the', ' ', '-b', ' ', option!'
otherwise.
"""
if self.break_on_hyphens is True:
chunks = self.wordsep_re.split(text)
else:
chunks = self.wordsep_simple_re.split(text)
chunks = [c for c in chunks if c]
return chunks
def _fix_sentence_endings(self, chunks):
"""_fix_sentence_endings(chunks : [string])
Correct for sentence endings buried in 'chunks'. Eg. when the
original text contains "... foo.\nBar ...", munge_whitespace()
and split() will convert that to [..., "foo.", " ", "Bar", ...]
which has one too few spaces; this method simply changes the one
space to two.
"""
i = 0
patsearch = self.sentence_end_re.search
while i < len(chunks)-1:
if chunks[i+1] == " " and patsearch(chunks[i]):
chunks[i+1] = " "
i += 2
else:
i += 1
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
"""_handle_long_word(chunks : [string],
cur_line : [string],
cur_len : int, width : int)
Handle a chunk of text (most likely a word, not whitespace) that
is too long to fit in any line.
"""
# Figure out when indent is larger than the specified width, and make
# sure at least one character is stripped off on every pass
if width < 1:
space_left = 1
else:
space_left = width - cur_len
# If we're allowed to break long words, then do so: put as much
# of the next chunk onto the current line as will fit.
if self.break_long_words:
cur_line.append(reversed_chunks[-1][:space_left])
reversed_chunks[-1] = reversed_chunks[-1][space_left:]
# Otherwise, we have to preserve the long word intact. Only add
# it to the current line if there's nothing already there --
# that minimizes how much we violate the width constraint.
elif not cur_line:
cur_line.append(reversed_chunks.pop())
# If we're not allowed to break long words, and there's already
# text on the current line, do nothing. Next time through the
# main loop of _wrap_chunks(), we'll wind up here again, but
# cur_len will be zero, so the next line will be entirely
# devoted to the long word that we can't handle right now.
def _wrap_chunks(self, chunks):
"""_wrap_chunks(chunks : [string]) -> [string]
Wrap a sequence of text chunks and return a list of lines of
length 'self.width' or less. (If 'break_long_words' is false,
some lines may be longer than this.) Chunks correspond roughly
to words and the whitespace between them: each chunk is
indivisible (modulo 'break_long_words'), but a line break can
come between any two chunks. Chunks should not have internal
whitespace; ie. a chunk is either all whitespace or a "word".
Whitespace chunks will be removed from the beginning and end of
lines, but apart from that whitespace is preserved.
"""
lines = []
if self.width <= 0:
raise ValueError("invalid width %r (must be > 0)" % self.width)
# Arrange in reverse order so items can be efficiently popped
# from a stack of chucks.
chunks.reverse()
while chunks:
# Start the list of chunks that will make up the current line.
# cur_len is just the length of all the chunks in cur_line.
cur_line = []
cur_len = 0
# Figure out which static string will prefix this line.
if lines:
indent = self.subsequent_indent
else:
indent = self.initial_indent
# Maximum width for this line.
width = self.width - len(indent)
# First chunk on line is whitespace -- drop it, unless this
# is the very beginning of the text (ie. no lines started yet).
if self.drop_whitespace and chunks[-1].strip() == '' and lines:
del chunks[-1]
while chunks:
l = len(chunks[-1])
# Can at least squeeze this chunk onto the current line.
if cur_len + l <= width:
cur_line.append(chunks.pop())
cur_len += l
# Nope, this line is full.
else:
break
# The current line is full, and the next chunk is too big to
# fit on *any* line (not just this one).
if chunks and len(chunks[-1]) > width:
self._handle_long_word(chunks, cur_line, cur_len, width)
# If the last chunk on this line is all whitespace, drop it.
if self.drop_whitespace and cur_line and cur_line[-1].strip() == '':
del cur_line[-1]
# Convert current line back to a string and store it in list
# of all lines (return value).
if cur_line:
lines.append(indent + ''.join(cur_line))
return lines
# -- Public interface ----------------------------------------------
def wrap(self, text):
"""wrap(text : string) -> [string]
Reformat the single paragraph in 'text' so it fits in lines of
no more than 'self.width' columns, and return a list of wrapped
lines. Tabs in 'text' are expanded with string.expandtabs(),
and all other whitespace characters (including newline) are
converted to space.
"""
text = self._munge_whitespace(text)
chunks = self._split(text)
if self.fix_sentence_endings:
self._fix_sentence_endings(chunks)
return self._wrap_chunks(chunks)
def fill(self, text):
"""fill(text : string) -> string
Reformat the single paragraph in 'text' to fit in lines of no
more than 'self.width' columns, and return a new string
containing the entire wrapped paragraph.
"""
return "\n".join(self.wrap(text))
# -- Convenience interface ---------------------------------------------
def wrap(text, width=70, **kwargs):
"""Wrap a single paragraph of text, returning a list of wrapped lines.
Reformat the single paragraph in 'text' so it fits in lines of no
more than 'width' columns, and return a list of wrapped lines. By
default, tabs in 'text' are expanded with string.expandtabs(), and
all other whitespace characters (including newline) are converted to
space. See TextWrapper class for available keyword args to customize
wrapping behaviour.
"""
w = TextWrapper(width=width, **kwargs)
return w.wrap(text)
def fill(text, width=70, **kwargs):
"""Fill a single paragraph of text, returning a new string.
Reformat the single paragraph in 'text' to fit in lines of no more
than 'width' columns, and return a new string containing the entire
wrapped paragraph. As with wrap(), tabs are expanded and other
whitespace characters converted to space. See TextWrapper class for
available keyword args to customize wrapping behaviour.
"""
w = TextWrapper(width=width, **kwargs)
return w.fill(text)
# -- Loosely related functionality -------------------------------------
_whitespace_only_re = re.compile('^[ \t]+$', re.MULTILINE)
_leading_whitespace_re = re.compile('(^[ \t]*)(?:[^ \t\n])', re.MULTILINE)
def dedent(text):
"""Remove any common leading whitespace from every line in `text`.
This can be used to make triple-quoted strings line up with the left
edge of the display, while still presenting them in the source code
in indented form.
Note that tabs and spaces are both treated as whitespace, but they
are not equal: the lines " hello" and "\thello" are
considered to have no common leading whitespace. (This behaviour is
new in Python 2.5; older versions of this module incorrectly
expanded tabs before searching for common leading whitespace.)
"""
# Look for the longest leading string of spaces and tabs common to
# all lines.
margin = None
text = _whitespace_only_re.sub('', text)
indents = _leading_whitespace_re.findall(text)
for indent in indents:
if margin is None:
margin = indent
# Current line more deeply indented than previous winner:
# no change (previous winner is still on top).
elif indent.startswith(margin):
pass
# Current line consistent with and no deeper than previous winner:
# it's the new winner.
elif margin.startswith(indent):
margin = indent
# Current line and previous winner have no common whitespace:
# there is no margin.
else:
margin = ""
break
# sanity check (testing/debugging only)
if 0 and margin:
for line in text.split("\n"):
assert not line or line.startswith(margin), \
"line = %r, margin = %r" % (line, margin)
if margin:
text = re.sub(r'(?m)^' + margin, '', text)
return text
def indent(text, prefix, predicate=None):
"""Adds 'prefix' to the beginning of selected lines in 'text'.
If 'predicate' is provided, 'prefix' will only be added to the lines
where 'predicate(line)' is True. If 'predicate' is not provided,
it will default to adding 'prefix' to all non-empty lines that do not
consist solely of whitespace characters.
"""
if predicate is None:
def predicate(line):
return line.strip()
def prefixed_lines():
for line in text.splitlines(True):
yield (prefix + line if predicate(line) else line)
return ''.join(prefixed_lines())
if __name__ == "__main__":
#print dedent("\tfoo\n\tbar")
#print dedent(" \thello there\n \t how are you?")
print(dedent("Hello there.\n This is indented."))
|
alfa-addon/addon | refs/heads/master | plugin.video.alfa/lib/python_libtorrent/linux_x86_64/1.0.9/__init__.py | 362 | #-*- coding: utf-8 -*-
'''
python-libtorrent for Kodi (script.module.libtorrent)
Copyright (C) 2015-2016 DiMartino, srg70, RussakHH, aisman
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
|
Medigate/cutiuta-server | refs/heads/master | cutiuta-server/env/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/jpcntx.py | 1776 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
masschallenge/django-accelerator | refs/heads/development | accelerator_abstract/models/base_nav_tree_item.py | 1 | from __future__ import unicode_literals
import swapper
from django.db import models
from sitetree.models import TreeItemBase
from accelerator_abstract.models.accelerator_model import AcceleratorModel
from accelerator_abstract.models.base_base_profile import (
EXPERT_USER_TYPE,
ENTREPRENEUR_USER_TYPE
)
NAV_TREE_USER_TYPES = ((EXPERT_USER_TYPE, 'Expert'),
(ENTREPRENEUR_USER_TYPE, 'Entrepreneur'),)
class BaseNavTreeItem(TreeItemBase, AcceleratorModel):
"""
The tree field specifies the NavTree object that this item belongs to.
The remaining fields of this model specify objects which are either
allowed to access this item. In all cases, a null value implies
"all programs" For example, if the `program` field is null,
then all programs are allowed to access this Item. If it is non-null,
then only the selected programs are allowed to access it.
"""
tree = models.ForeignKey(to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, "NavTree"),
on_delete=models.CASCADE)
user_role = models.ManyToManyField(
to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'UserRole'),
blank=True)
program_family = models.ManyToManyField(
to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'ProgramFamily'),
blank=True)
program = models.ManyToManyField(
to=swapper.get_model_name(
AcceleratorModel.Meta.app_label, 'Program'),
blank=True)
active_program = models.BooleanField(default=False)
user_type = models.CharField(
max_length=12,
choices=NAV_TREE_USER_TYPES,
blank=True,
)
display_single_item = models.BooleanField(default=True)
class Meta(AcceleratorModel.Meta):
db_table = 'accelerator_navtreeitem'
verbose_name_plural = "NavTreeItems"
unique_together = ('tree', 'title', 'url')
abstract = True
|
alexforencich/python-ivi | refs/heads/master | ivi/agilent/agilentMSO6054A.py | 2 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent6000 import *
class agilentMSO6054A(agilent6000):
"Agilent InfiniiVision MSO6054A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSO6054A')
super(agilentMSO6054A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 500e6
self._init_channels()
|
artwr/airflow | refs/heads/master | airflow/www/decorators.py | 2 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import gzip
import functools
import pendulum
from io import BytesIO as IO
from flask import after_this_request, flash, redirect, request, url_for, g
from airflow.models.log import Log
from airflow.utils.db import create_session
def action_logging(f):
"""
Decorator to log user actions
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
with create_session() as session:
if g.user.is_anonymous:
user = 'anonymous'
else:
user = g.user.username
log = Log(
event=f.__name__,
task_instance=None,
owner=user,
extra=str(list(request.args.items())),
task_id=request.args.get('task_id'),
dag_id=request.args.get('dag_id'))
if 'execution_date' in request.args:
log.execution_date = pendulum.parse(
request.args.get('execution_date'))
session.add(log)
return f(*args, **kwargs)
return wrapper
def gzipped(f):
"""
Decorator to make a view compressed
"""
@functools.wraps(f)
def view_func(*args, **kwargs):
@after_this_request
def zipper(response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
return response
response.direct_passthrough = False
if (response.status_code < 200 or response.status_code >= 300 or
'Content-Encoding' in response.headers):
return response
gzip_buffer = IO()
gzip_file = gzip.GzipFile(mode='wb',
fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Vary'] = 'Accept-Encoding'
response.headers['Content-Length'] = len(response.data)
return response
return f(*args, **kwargs)
return view_func
def has_dag_access(**dag_kwargs):
"""
Decorator to check whether the user has read / write permission on the dag.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
has_access = self.appbuilder.sm.has_access
dag_id = request.args.get('dag_id')
# if it is false, we need to check whether user has write access on the dag
can_dag_edit = dag_kwargs.get('can_dag_edit', False)
# 1. check whether the user has can_dag_edit permissions on all_dags
# 2. if 1 false, check whether the user
# has can_dag_edit permissions on the dag
# 3. if 2 false, check whether it is can_dag_read view,
# and whether user has the permissions
if (
has_access('can_dag_edit', 'all_dags') or
has_access('can_dag_edit', dag_id) or (not can_dag_edit and
(has_access('can_dag_read',
'all_dags') or
has_access('can_dag_read',
dag_id)))):
return f(self, *args, **kwargs)
else:
flash("Access is Denied", "danger")
return redirect(url_for(self.appbuilder.sm.auth_view.
__class__.__name__ + ".login"))
return wrapper
return decorator
|
maniteja123/numpy | refs/heads/master | benchmarks/benchmarks/bench_linalg.py | 51 | from __future__ import absolute_import, division, print_function
from .common import Benchmark, get_squares_, get_indexes_rand, TYPES1
import numpy as np
class Eindot(Benchmark):
def setup(self):
self.a = np.arange(60000.0).reshape(150, 400)
self.ac = self.a.copy()
self.at = self.a.T
self.atc = self.a.T.copy()
self.b = np.arange(240000.0).reshape(400, 600)
self.c = np.arange(600)
self.d = np.arange(400)
self.a3 = np.arange(480000.).reshape(60, 80, 100)
self.b3 = np.arange(192000.).reshape(80, 60, 40)
def time_dot_a_b(self):
np.dot(self.a, self.b)
def time_dot_d_dot_b_c(self):
np.dot(self.d, np.dot(self.b, self.c))
def time_dot_trans_a_at(self):
np.dot(self.a, self.at)
def time_dot_trans_a_atc(self):
np.dot(self.a, self.atc)
def time_dot_trans_at_a(self):
np.dot(self.at, self.a)
def time_dot_trans_atc_a(self):
np.dot(self.atc, self.a)
def time_einsum_i_ij_j(self):
np.einsum('i,ij,j', self.d, self.b, self.c)
def time_einsum_ij_jk_a_b(self):
np.einsum('ij,jk', self.a, self.b)
def time_einsum_ijk_jil_kl(self):
np.einsum('ijk,jil->kl', self.a3, self.b3)
def time_inner_trans_a_a(self):
np.inner(self.a, self.a)
def time_inner_trans_a_ac(self):
np.inner(self.a, self.ac)
def time_matmul_a_b(self):
np.matmul(self.a, self.b)
def time_matmul_d_matmul_b_c(self):
np.matmul(self.d, np.matmul(self.b, self.c))
def time_matmul_trans_a_at(self):
np.matmul(self.a, self.at)
def time_matmul_trans_a_atc(self):
np.matmul(self.a, self.atc)
def time_matmul_trans_at_a(self):
np.matmul(self.at, self.a)
def time_matmul_trans_atc_a(self):
np.matmul(self.atc, self.a)
def time_tensordot_a_b_axes_1_0_0_1(self):
np.tensordot(self.a3, self.b3, axes=([1, 0], [0, 1]))
class Linalg(Benchmark):
params = [['svd', 'pinv', 'det', 'norm'],
TYPES1]
param_names = ['op', 'type']
def setup(self, op, typename):
np.seterr(all='ignore')
self.func = getattr(np.linalg, op)
if op == 'cholesky':
# we need a positive definite
self.a = np.dot(get_squares_()[typename],
get_squares_()[typename].T)
else:
self.a = get_squares_()[typename]
# check that dtype is supported at all
try:
self.func(self.a[:2, :2])
except TypeError:
raise NotImplementedError()
def time_op(self, op, typename):
self.func(self.a)
class Lstsq(Benchmark):
def setup(self):
self.a = get_squares_()['float64']
self.b = get_indexes_rand()[:100].astype(np.float64)
def time_numpy_linalg_lstsq_a__b_float64(self):
np.linalg.lstsq(self.a, self.b)
|
newswangerd/ansible | refs/heads/devel | lib/ansible/plugins/shell/cmd.py | 29 | # Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: cmd
version_added: '2.8'
short_description: Windows Command Prompt
description:
- Used with the 'ssh' connection plugin and no C(DefaultShell) has been set on the Windows host.
extends_documentation_fragment:
- shell_windows
'''
import re
from ansible.plugins.shell.powershell import ShellModule as PSShellModule
# these are the metachars that have a special meaning in cmd that we want to escape when quoting
_find_unsafe = re.compile(r'[\s\(\)\%\!^\"\<\>\&\|]').search
class ShellModule(PSShellModule):
# Common shell filenames that this plugin handles
COMPATIBLE_SHELLS = frozenset()
# Family of shells this has. Must match the filename without extension
SHELL_FAMILY = 'cmd'
_SHELL_REDIRECT_ALLNULL = '>nul 2>&1'
_SHELL_AND = '&&'
# Used by various parts of Ansible to do Windows specific changes
_IS_WINDOWS = True
def quote(self, s):
# cmd does not support single quotes that the shlex_quote uses. We need to override the quoting behaviour to
# better match cmd.exe.
# https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
# Return an empty argument
if not s:
return '""'
if _find_unsafe(s) is None:
return s
# Escape the metachars as we are quoting the string to stop cmd from interpreting that metachar. For example
# 'file &whoami.exe' would result in 'file $(whoami.exe)' instead of the literal string
# https://stackoverflow.com/questions/3411771/multiple-character-replace-with-python
for c in '^()%!"<>&|': # '^' must be the first char that we scan and replace
if c in s:
# I can't find any docs that explicitly say this but to escape ", it needs to be prefixed with \^.
s = s.replace(c, ("\\^" if c == '"' else "^") + c)
return '^"' + s + '^"'
|
daenamkim/ansible | refs/heads/devel | lib/ansible/modules/files/template.py | 49 | # this is a virtual module that is entirely implemented server side
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: template
version_added: historical
short_description: Templates a file out to a remote server
description:
- Templates are processed by the Jinja2 templating language
(U(http://jinja.pocoo.org/docs/)) - documentation on the template
formatting can be found in the Template Designer Documentation
(U(http://jinja.pocoo.org/docs/templates/)).
- "Six additional variables can be used in templates:
C(ansible_managed) (configurable via the C(defaults) section of C(ansible.cfg)) contains a string which can be used to
describe the template name, host, modification time of the template file and the owner uid.
C(template_host) contains the node name of the template's machine.
C(template_uid) the numeric user id of the owner.
C(template_path) the path of the template.
C(template_fullpath) is the absolute path of the template.
C(template_run_date) is the date that the template was rendered."
options:
src:
description:
- Path of a Jinja2 formatted template on the Ansible controller. This can be a relative or absolute path.
required: true
dest:
description:
- Location to render the template to on the remote machine.
required: true
backup:
description:
- Create a backup file including the timestamp information so you can get
the original file back if you somehow clobbered it incorrectly.
type: bool
default: 'no'
newline_sequence:
description:
- Specify the newline sequence to use for templating files.
choices: [ '\n', '\r', '\r\n' ]
default: '\n'
version_added: '2.4'
block_start_string:
description:
- The string marking the beginning of a block.
default: '{%'
version_added: '2.4'
block_end_string:
description:
- The string marking the end of a block.
default: '%}'
version_added: '2.4'
variable_start_string:
description:
- The string marking the beginning of a print statement.
default: '{{'
version_added: '2.4'
variable_end_string:
description:
- The string marking the end of a print statement.
default: '}}'
version_added: '2.4'
trim_blocks:
description:
- If this is set to True the first newline after a block is removed (block, not variable tag!).
type: bool
default: 'no'
version_added: '2.4'
force:
description:
- the default is C(yes), which will replace the remote file when contents
are different than the source. If C(no), the file will only be transferred
if the destination does not exist.
type: bool
default: 'yes'
follow:
description:
- This flag indicates that filesystem links in the destination, if they exist, should be followed.
- Previous to Ansible 2.4, this was hardcoded as C(yes).
type: bool
default: 'no'
version_added: "2.4"
notes:
- For Windows you can use M(win_template) which uses '\r\n' as C(newline_sequence).
- Including a string that uses a date in the template will result in the template being marked 'changed' each time
- "Since Ansible version 0.9, templates are loaded with C(trim_blocks=True)."
- "Also, you can override jinja2 settings by adding a special header to template file.
i.e. C(#jinja2:variable_start_string:'[%', variable_end_string:'%]', trim_blocks: False)
which changes the variable interpolation markers to [% var %] instead of {{ var }}.
This is the best way to prevent evaluation of things that look like, but should not be Jinja2.
raw/endraw in Jinja2 will not work as you expect because templates in Ansible are recursively evaluated."
- You can use the C(copy) module with the C(content:) option if you prefer the template inline,
as part of the playbook.
author:
- Ansible Core Team
- Michael DeHaan
extends_documentation_fragment:
- files
- validate
'''
EXAMPLES = r'''
# Example from Ansible Playbooks
- template:
src: /mytemplates/foo.j2
dest: /etc/file.conf
owner: bin
group: wheel
mode: 0644
# The same example, but using symbolic modes equivalent to 0644
- template:
src: /mytemplates/foo.j2
dest: /etc/file.conf
owner: bin
group: wheel
mode: "u=rw,g=r,o=r"
# Create a DOS-style text file from a template
- template:
src: config.ini.j2
dest: /share/windows/config.ini
newline_sequence: '\r\n'
# Copy a new "sudoers" file into place, after passing validation with visudo
- template:
src: /mine/sudoers
dest: /etc/sudoers
validate: '/usr/sbin/visudo -cf %s'
# Update sshd configuration safely, avoid locking yourself out
- template:
src: etc/ssh/sshd_config.j2
dest: /etc/ssh/sshd_config
owner: root
group: root
mode: '0600'
validate: /usr/sbin/sshd -t -f %s
backup: yes
'''
|
homeworkprod/byceps | refs/heads/master | byceps/blueprints/admin/ticketing/authorization.py | 1 | """
byceps.blueprints.admin.ticketing.authorization
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.util.authorization import create_permission_enum
TicketingPermission = create_permission_enum('ticketing', [
'checkin',
'view',
])
|
weolar/miniblink49 | refs/heads/master | v8_7_5/tools/snapshot/asm_to_inline_asm.py | 9 | #!/usr/bin/env python
# Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''
Converts a given file in clang assembly syntax to a corresponding
representation in inline assembly. Specifically, this is used to convert
embedded.S to embedded.cc for Windows clang builds.
'''
import argparse
import sys
def asm_to_inl_asm(in_filename, out_filename):
with open(in_filename, 'r') as infile, open(out_filename, 'wb') as outfile:
outfile.write('__asm__(\n')
for line in infile:
# Escape " in .S file before outputing it to inline asm file.
line = line.replace('"', '\\"')
outfile.write(' "%s\\n"\n' % line.rstrip())
outfile.write(');\n')
return 0
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('input', help='Name of the input assembly file')
parser.add_argument('output', help='Name of the target CC file')
args = parser.parse_args()
sys.exit(asm_to_inl_asm(args.input, args.output))
|
kalinochkind/vkbot | refs/heads/master | pack.py | 1 | import os.path
import shutil
import tarfile
COMPRESSION_MODE = 'gz'
def pack_dirs(filename, dirs):
try:
with tarfile.open(filename, 'w:' + COMPRESSION_MODE) as f:
for d in dirs:
f.add(d)
except Exception as e:
print(e)
def pack(filename):
pack_dirs(filename, ['accounts', 'data'])
def pack_data(filename):
pack_dirs(filename, ['data'])
def unpack(filename):
try:
with tarfile.open(filename, 'r:' + COMPRESSION_MODE) as f:
if 'data' in f.getnames() and os.path.isdir('data'):
shutil.rmtree('data')
if 'accounts' in f.getnames() and os.path.isdir('accounts'):
shutil.rmtree('accounts')
f.extractall()
except Exception as e:
print(e)
|
DreamerKing/LightweightHtmlWidgets | refs/heads/master | publish-rc/v1.0/files/Ipy.Lib/encodings/punycode.py | 586 | # -*- coding: iso-8859-1 -*-
""" Codec for the Punicode encoding, as specified in RFC 3492
Written by Martin v. Löwis.
"""
import codecs
##################### Encoding #####################################
def segregate(str):
"""3.1 Basic code point segregation"""
base = []
extended = {}
for c in str:
if ord(c) < 128:
base.append(c)
else:
extended[c] = 1
extended = extended.keys()
extended.sort()
return "".join(base).encode("ascii"),extended
def selective_len(str, max):
"""Return the length of str, considering only characters below max."""
res = 0
for c in str:
if ord(c) < max:
res += 1
return res
def selective_find(str, char, index, pos):
"""Return a pair (index, pos), indicating the next occurrence of
char in str. index is the position of the character considering
only ordinals up to and including char, and pos is the position in
the full string. index/pos is the starting position in the full
string."""
l = len(str)
while 1:
pos += 1
if pos == l:
return (-1, -1)
c = str[pos]
if c == char:
return index+1, pos
elif c < char:
index += 1
def insertion_unsort(str, extended):
"""3.2 Insertion unsort coding"""
oldchar = 0x80
result = []
oldindex = -1
for c in extended:
index = pos = -1
char = ord(c)
curlen = selective_len(str, char)
delta = (curlen+1) * (char - oldchar)
while 1:
index,pos = selective_find(str,c,index,pos)
if index == -1:
break
delta += index - oldindex
result.append(delta-1)
oldindex = index
delta = 0
oldchar = char
return result
def T(j, bias):
# Punycode parameters: tmin = 1, tmax = 26, base = 36
res = 36 * (j + 1) - bias
if res < 1: return 1
if res > 26: return 26
return res
digits = "abcdefghijklmnopqrstuvwxyz0123456789"
def generate_generalized_integer(N, bias):
"""3.3 Generalized variable-length integers"""
result = []
j = 0
while 1:
t = T(j, bias)
if N < t:
result.append(digits[N])
return result
result.append(digits[t + ((N - t) % (36 - t))])
N = (N - t) // (36 - t)
j += 1
def adapt(delta, first, numchars):
if first:
delta //= 700
else:
delta //= 2
delta += delta // numchars
# ((base - tmin) * tmax) // 2 == 455
divisions = 0
while delta > 455:
delta = delta // 35 # base - tmin
divisions += 36
bias = divisions + (36 * delta // (delta + 38))
return bias
def generate_integers(baselen, deltas):
"""3.4 Bias adaptation"""
# Punycode parameters: initial bias = 72, damp = 700, skew = 38
result = []
bias = 72
for points, delta in enumerate(deltas):
s = generate_generalized_integer(delta, bias)
result.extend(s)
bias = adapt(delta, points==0, baselen+points+1)
return "".join(result)
def punycode_encode(text):
base, extended = segregate(text)
base = base.encode("ascii")
deltas = insertion_unsort(text, extended)
extended = generate_integers(len(base), deltas)
if base:
return base + "-" + extended
return extended
##################### Decoding #####################################
def decode_generalized_number(extended, extpos, bias, errors):
"""3.3 Generalized variable-length integers"""
result = 0
w = 1
j = 0
while 1:
try:
char = ord(extended[extpos])
except IndexError:
if errors == "strict":
raise UnicodeError, "incomplete punicode string"
return extpos + 1, None
extpos += 1
if 0x41 <= char <= 0x5A: # A-Z
digit = char - 0x41
elif 0x30 <= char <= 0x39:
digit = char - 22 # 0x30-26
elif errors == "strict":
raise UnicodeError("Invalid extended code point '%s'"
% extended[extpos])
else:
return extpos, None
t = T(j, bias)
result += digit * w
if digit < t:
return extpos, result
w = w * (36 - t)
j += 1
def insertion_sort(base, extended, errors):
"""3.2 Insertion unsort coding"""
char = 0x80
pos = -1
bias = 72
extpos = 0
while extpos < len(extended):
newpos, delta = decode_generalized_number(extended, extpos,
bias, errors)
if delta is None:
# There was an error in decoding. We can't continue because
# synchronization is lost.
return base
pos += delta+1
char += pos // (len(base) + 1)
if char > 0x10FFFF:
if errors == "strict":
raise UnicodeError, ("Invalid character U+%x" % char)
char = ord('?')
pos = pos % (len(base) + 1)
base = base[:pos] + unichr(char) + base[pos:]
bias = adapt(delta, (extpos == 0), len(base))
extpos = newpos
return base
def punycode_decode(text, errors):
pos = text.rfind("-")
if pos == -1:
base = ""
extended = text
else:
base = text[:pos]
extended = text[pos+1:]
base = unicode(base, "ascii", errors)
extended = extended.upper()
return insertion_sort(base, extended, errors)
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
res = punycode_encode(input)
return res, len(input)
def decode(self,input,errors='strict'):
if errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+errors
res = punycode_decode(input, errors)
return res, len(input)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return punycode_encode(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
if self.errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError, "Unsupported error handling "+self.errors
return punycode_decode(input, self.errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='punycode',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
davidak/openxenmanager | refs/heads/master | src/OXM/oxcSERVER_storage.py | 2 | # -----------------------------------------------------------------------
# OpenXenManager
#
# Copyright (C) 2009 Alberto Gonzalez Rodriguez alberto@pesadilla.org
# Copyright (C) 2014 Daniel Lintott <daniel@serverb.co.uk>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -----------------------------------------------------------------------
import xml.dom.minidom
import xml.sax.saxutils as saxutils
class oxcSERVERstorage:
stg_ref = None
stg_uuid = None
def fill_hw_hba(self, ref, list):
#<?xml version="1.0"?><methodCall><methodName>SR.probe</methodName><params><param><value><string>OpaqueRef:c9ea013c-cbce-0e85-6863-66d8e7b66ea7</string></value></param><param><value><string>OpaqueRef:5c0a69d1-7719-946b-7f3c-683a7058338d</string></value></param><param><value><struct /></value></param><param><value><string>lvmohba</string></value></param><param><value><struct /></value></param></params></methodCall>
list.clear()
res = self.connection.SR.probe(self.session_uuid, ref, {}, "lvmohba", {})
if len(res['ErrorDescription']) > 2:
result = res['ErrorDescription'][3]
dom = xml.dom.minidom.parseString(result)
nodes = dom.getElementsByTagName("BlockDevice")
disks = {}
for node in nodes:
size = self.convert_bytes(node.getElementsByTagName("size")[0].childNodes[0].data.strip())
serial = node.getElementsByTagName("serial")[0].childNodes[0].data.strip()
scsiid = node.getElementsByTagName("SCSIid")[0].childNodes[0].data.strip()
adapter = node.getElementsByTagName("adapter")[0].childNodes[0].data.strip()
channel = node.getElementsByTagName("channel")[0].childNodes[0].data.strip()
id = node.getElementsByTagName("id")[0].childNodes[0].data.strip()
lun = node.getElementsByTagName("lun")[0].childNodes[0].data.strip()
vendor = node.getElementsByTagName("vendor")[0].childNodes[0].data.strip()
path = node.getElementsByTagName("path")[0].childNodes[0].data.strip()
if vendor not in disks:
disks[vendor] = []
disks[vendor].append([" %s %s %s %s:%s:%s:%s" % (size, serial, scsiid, adapter, channel, id, lun),scsiid,path])
for ref in disks.keys():
list.append(["<b>" + ref + "</b>", False, "", ""])
for lun in disks[ref]:
list.append([lun[0], True, lun[1], lun[2]])
return 0
else:
self.wine.show_error_dlg("No LUNs were found. Please verify your hardware configuration")
return 1
def rescan_isos(self, ref):
res = self.connection.Async.SR.scan(self.session_uuid, ref)
if "Value" in res:
self.track_tasks[res['Value']] = ref
else:
print res
def detach_storage(self, ref):
for pbd in self.all['SR'][ref]['PBDs']:
res = self.connection.Async.PBD.unplug(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = ref
else:
print res
if "Value" in res:
value = res["Value"]
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
res = self.connection.PBD.destroy(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = ref
else:
print res
def forget_storage(self, ref):
if self.all['SR'][ref]['allowed_operations'].count("unplug"):
for pbd in self.all['SR'][ref]['PBDs']:
res = self.connection.Async.PBD.unplug(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = ref
value = res["Value"]
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
res = self.connection.Async.PBD.destroy(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = ref
value = res["Value"]
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
else:
print res
else:
print res
res = self.connection.Async.SR.forget(self.session_uuid, ref)
if "Value" in res:
self.track_tasks[res['Value']] = ref
else:
print res
def delete_vdi(self, ref_vdi, ref_vm):
for ref_vbd in self.all['VDI'][ref_vdi]['VBDs']:
res = self.connection.VBD.destroy(self.session_uuid, ref_vbd)
if "Value" in res:
self.track_tasks[res['Value']] = ref_vm
else:
print res
res = self.connection.VDI.destroy(self.session_uuid, ref_vdi)
if "Value" in res:
self.track_tasks[res['Value']] = ref_vm
else:
print res
def reattach_nfs_iso(self, sr, name, share, options):
# FIXME
ref = self.all['host'].keys()[0]
pbd = {
"uuid" : "",
"host" : ref,
"SR" : sr,
"device_config" : {
"location" : share,
"options": options
},
"currentyle_attached" : False,
"other_config" : {}
}
self.connection.SR.set_name_label(self.session_uuid, sr, name)
self.connection.SR.set_name_description(self.session_uuid, sr, "NFS ISO Library [%s]" % (share))
res = self.connection.Async.PBD.create(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = ref
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
result = saxutils.unescape(task['result']).replace("<value>","").replace("</value>","").replace(""", '"')
res = self.connection.Async.PBD.plug(self.session_uuid, result)
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
if task["status"] == "success":
return 0
else:
self.wine.show_error_dlg(str(task["error_info"]))
return 1
else:
print res
def create_nfs_iso(self, ref, name, share, options):
sr = {
"location" : share,
"options" : options
}
value = self.connection.SR.create(self.session_uuid, ref, sr, "0", name, "NFS ISO Library [%s]" % (share), "iso", "iso", True, {})
if "ErrorDescription" in value:
self.wine.show_error_dlg(value["ErrorDescription"][2])
return 1
else:
return 0
def reattach_cifs_iso(self, sr, name, share, options, user="", password=""):
ref = self.all['host'].keys()[0]
pbd = {
"uuid" : "",
"host" : ref,
"SR" : sr,
"device_config" : {
"location" : share,
"type": "cifs",
"options": options
},
"currentyle_attached" : False,
"other_config" : {}
}
self.connection.SR.set_name_label(self.session_uuid, sr, name)
self.connection.SR.set_name_description(self.session_uuid, sr, "CIFS ISO Library [%s]" % (share))
res = self.connection.Async.PBD.create(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = ref
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
result = saxutils.unescape(task['result']).replace("<value>","").replace("</value>","").replace(""", '"')
res = self.connection.Async.PBD.plug(self.session_uuid, result)
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
if task["status"] == "success":
return 0
else:
self.wine.show_error_dlg(str(task["error_info"]))
return 1
else:
print res
def create_cifs_iso(self, ref, name, share, options, user="", password=""):
sr = {
"location" : share,
"type" : "cifs",
"options" : options,
"username" : user,
"cifspassword" : password,
}
value = self.connection.SR.create(self.session_uuid, ref, sr, "0", name, "CIFS ISO Library [%s]" % (share), "iso", "iso", True, {})
if "ErrorDescription" in value:
self.wine.show_error_dlg(value["ErrorDescription"][2])
return 1
else:
return 0
def create_nfs_vhd(self, ref, name, host, path, options, create=None):
sr = {
"serverpath" : path,
"server" : host,
"options" : options
}
res = self.connection.SR.create(self.session_uuid, ref, sr, str(0), name, "NF SR [%s:%s]" % (host, path), "nfs", "", True, {})
if "Value" in res:
self.track_tasks[res['Value']] = ref
else:
print res
def create_aoe(self, ref, name, path, create=None):
sr = {
"device" : path,
}
res = self.connection.SR.create(self.session_uuid, ref, sr, str(0), name, "AoE SR [%s]" % (path), "lvm", "", True, {})
if "Value" in res:
self.track_tasks[res['Value']] = ref
else:
print res
def reattach_aoe(self, ref, name, path, create, uuid):
sr = self.connection.SR.get_by_uuid(self.session_uuid, uuid)
sr = self.connection.SR.introduce(self.session_uuid, uuid, name, "AOE SR [%s]" % (path), "lvm", "", True, {})['Value']
pbd = {
"uuid" : "",
"host" : ref,
"SR" : sr,
"device_config" : {
"device" : path,
},
"currently_attached" : False,
"other_config" : {}
}
ref = self.connection.PBD.create(self.session_uuid, pbd)['Value']
self.connection.PBD.plug(self.session_uuid, ref)
def reattach_nfs_vhd(self, ref, name, host, path, options, create, uuid):
sr = self.connection.SR.get_by_uuid(self.session_uuid, uuid)
sr = self.connection.SR.introduce(self.session_uuid, uuid, name, "NFS SR [%s:%s]" % (host, path), "nfs", "", True, {})['Value']
pbd = {
"uuid" : "",
"host" : ref,
"SR" : sr,
"device_config" : {
"serverpath" : path,
"server" : host,
"options": options
},
"currently_attached" : False,
"other_config" : {}
}
ref = self.connection.PBD.create(self.session_uuid, pbd)['Value']
self.connection.PBD.plug(self.session_uuid, ref)
def format_hardware_hba(self, ref, uuid, name, path):
sr = {
"SCSIid" : uuid,
}
res = self.connection.SR.create(self.session_uuid, ref, sr, "0", name, "Hardware HBA SR [%s]" % (path), "lvmohba", "", False, {})
if "Value" in res:
self.track_tasks[res['Value']] = self.host_vm[ref][0]
print self.connection.SR.set_other_config(self.session_uuid, res['Value'], {"auto-scan": "false"})
else:
print res
def reattach_and_introduce_hardware_hba(self, ref, uuid, name, path):
res = self.connection.SR.introduce(self.session_uuid, self.stg_uuid, name, "Hardware HBA SR [%s]" % (path), "lvmohba", "", False, {})
pbd = {
"uuid" : "",
"host" : ref,
"SR" : res['Value'],
"device_config" : {
"SCSIid" : uuid,
},
"currentyle_attached" : False,
"other_config" : {}
}
res = self.connection.Async.PBD.create(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = self.host_vm[ref][0]
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
result = saxutils.unescape(task['result']).replace("<value>","").replace("</value>","").replace(""", '"')
res = self.connection.Async.PBD.plug(self.session_uuid, result)
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
if task["status"] == "success":
return 0
else:
self.wine.show_error_dlg(str(task["error_info"]))
return 1
else:
print res
def reattach_hardware_hba(self, ref, uuid, name, path):
ref = self.all['host'].keys()[0]
pbd = {
"uuid" : "",
"host" : ref,
"SR" : self.stg_ref,
"device_config" : {
"SCSIid" : uuid,
},
"currentyle_attached" : False,
"other_config" : {}
}
self.connection.SR.set_name_label(self.session_uuid, self.stg_ref, name)
self.connection.SR.set_name_description(self.session_uuid, self.stg_ref, "Hardware HBA SR [%s]" % (path))
res = self.connection.Async.PBD.create(self.session_uuid, pbd)
if "Value" in res:
self.track_tasks[res['Value']] = self.host_vm[ref][0]
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
result = saxutils.unescape(task['result']).replace("<value>","").replace("</value>","").replace(""", '"')
res = self.connection.Async.PBD.plug(self.session_uuid, result)
value = res['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
if task["status"] == "success":
return 0
else:
self.wine.show_error_dlg(str(task["error_info"]))
return 1
else:
print res
pass
"""
sr = {
"SCSIid" : uuid,
}
res = self.connection.SR.create(self.session_uuid, ref, sr, "0", name, "Hardware HBA SR [IBM - %s]" % (path), "lvmohba", "", False, {})
if "Value" in res:
self.track_tasks[res['Value']] = self.host_vm[ref][0]
print self.connection.SR.set_other_config(self.session_uuid, res['Value'], {"auto-scan": "false"})
else:
print res
"""
def check_hardware_hba(self, ref, uuid, text):
result = self.connection.SR.probe(self.session_uuid, ref, {"SCSIid" : uuid }, "lvmohba", {})['Value']
dom = xml.dom.minidom.parseString(result)
nodes = dom.getElementsByTagName("UUID")
if len(nodes):
reattach = True
self.stg_uuid = nodes[0].childNodes[0].data.strip()
for storage_ref in self.all['SR'].keys():
storage = self.all['SR'][storage_ref]
if storage["uuid"] == self.stg_uuid:
self.stg_ref = storage_ref
if len(storage['PBDs']):
reattach = False
if reattach:
if self.stg_ref:
return [2, self.all['SR'][self.stg_ref]['name_label'], self.all['host'][ref]['name_label']]
else:
return [3, text, self.all['host'][ref]['name_label']]
else:
return [1, self.all['SR'][self.stg_ref]['name_label'], self.all['host'][ref]['name_label']]
else:
return [0, None, None]
def check_iscsi(self, ref, name, host, port, scsiid, targetiqn, user, password):
sr = {
"port" : port,
"target" : host,
"SCSIid" : scsiid,
"targetIQN" : targetiqn
}
if user:
sr["chapuser"] = user
if password:
sr["chappassword"] = password
value = self.connection.Async.SR.probe(self.session_uuid, ref, sr, "lvmoiscsi", {})['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
result = saxutils.unescape(task['result']).replace("<value>","").replace("</value>","").replace(""", '"')
print result
dom = xml.dom.minidom.parseString(result)
nodes = dom.getElementsByTagName("UUID")
if len(nodes):
return nodes[0].childNodes[0].data.strip()
else:
return None
#ref = self.connection.SR.create(self.session_uuid, ref, sr, "0", name, "iSCSI SR [%s (%s)]" % (host, targetiqn), "lvmoiscsi", "", True, {})
#print ref
def create_iscsi(self, ref, name, host, port, scsiid, targetiqn, user, password):
sr = {
"port" : port,
"target" : host,
"SCSIid" : scsiid,
"targetIQN" : targetiqn
}
if user:
sr["chapuser"] = user
if password:
sr["chappassword"] = password
res = self.connection.Async.SR.create(self.session_uuid, ref, sr, "0", name, "iSCSI SR [%s (%s)]" % (host, targetiqn), "lvmoiscsi", "", True, {})
def reattach_iscsi(self, ref, name, host, port, scsiid, targetiqn, user, password, lun):
res = self.connection.SR.introduce(self.session_uuid, lun, name, "iSCSI SR [%s (%s)]" % (host, targetiqn), "lvmoiscsi", "", True, {})
print res
pbd = {
"uuid" : "",
"host" : ref,
"SR" : res['Value'],
"device_config" : {
"port" : port,
"target" : host,
"SCSIid" : scsiid,
"targetIQN" : targetiqn
},
"currently_attached" : False,
"other_config" : {}
}
if user:
pbd["device_config"]["chapuser"] = user
if password:
pbd["device_config"]["chappassword"] = password
res = self.connection.PBD.create(self.session_uuid, pbd)
print res
print self.connection.Async.PBD.plug(self.session_uuid, res['Value'])
"""
sr = {
"port" : port,
"target" : host,
"SCSIid" : scsiid,
"targetIQN" : targetiqn
}
if user:
sr["chapuser"] = user
if password:
sr["chappassword"] = password
res = self.connection.Async.SR.create(self.session_uuid, ref, sr, "0", name, "iSCSI SR [%s (%s)]" % (host, targetiqn), "lvmoiscsi", "", True, {})
"""
def scan_aoe(self, ref, lista, path):
sr = {
"device" : path,
}
value = self.connection.Async.SR.probe(self.session_uuid, ref, sr, "lvm", {})['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
print task
if task['result'].count("<value>"):
result = saxutils.unescape(task['result']).replace("<value>","").replace("</value>","").replace(""", '"')
dom = xml.dom.minidom.parseString(result)
nodes = dom.getElementsByTagName("SRlist")
if len(nodes[0].childNodes):
for i in range(1,len(nodes[0].childNodes),2):
ref = nodes[0].childNodes[i].childNodes[1].childNodes[0].data.strip()
print ref
print self.search_storage_uuid(ref)
if self.search_storage_uuid(ref) == False:
lista.append([ref, ref])
if lista.__len__() > 0:
return 2
else:
return 1
else:
if len(task["error_info"]) > 2:
self.wine.show_error_dlg(task["error_info"][2])
else:
self.wine.show_error_dlg(task["error_info"][1])
self.connection.task.destroy(self.session_uuid, value)
return 0
def scan_nfs_vhd(self, ref, list, host, path, options):
sr = {
"serverpath" : path,
"server" : host,
"options" : options,
}
value = self.connection.Async.SR.probe(self.session_uuid, ref, sr, "nfs", {})['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
if task['result'].count("<value>"):
result = saxutils.unescape(task['result']).replace("<value>","").replace("</value>","").replace(""", '"')
dom = xml.dom.minidom.parseString(result)
nodes = dom.getElementsByTagName("SRlist")
if len(nodes[0].childNodes):
for i in range(1,len(nodes[0].childNodes),2):
ref = nodes[0].childNodes[i].childNodes[1].childNodes[0].data.strip()
if self.search_storage_uuid(ref) == False:
list.append([ref, ref])
if list.__len__() > 0:
return 2
else:
return 1
else:
self.wine.show_error_dlg(task["error_info"][2])
self.connection.task.destroy(self.session_uuid, value)
return 0
def search_storage_uuid(self, uuid):
"""
Function to search a storage with specify uuid, returns True if found
"""
for stg in self.all['SR'].keys():
if self.all['SR'][stg]["uuid"] == uuid:
return True
return False
def fill_iscsi_target_iqn(self, ref, list, target, port, user=None, password=None):
list.clear()
sr = {
"port" : port,
"target": target,
}
if user:
sr["chapuser"] = user
if password:
sr["chappassword"] = password
value = self.connection.Async.SR.create(self.session_uuid, ref, sr, "0", "__gui__", "SHOULD NEVER BE CREATED","lvmoiscsi","user", True, {})['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
if task["error_info"][3]:
dom = xml.dom.minidom.parseString(task["error_info"][3])
nodes = dom.getElementsByTagName("TGT")
ix = 1
for i in range(0, len(nodes)):
index = nodes[i].childNodes[1].childNodes[0].data.strip()
ip = nodes[i].childNodes[3].childNodes[0].data.strip()
target = nodes[i].childNodes[5].childNodes[0].data.strip()
list.append([target, "%s (%s)" % (target, ip)])
self.connection.task.destroy(self.session_uuid, value)
return True
else:
self.wine.show_error_dlg(task["error_info"][2])
self.connection.task.destroy(self.session_uuid, value)
return False
def fill_iscsi_target_lun(self, ref, list, target, targetiqn, port, user=None, password=None):
list.clear()
sr = {
"port" : port,
"target": target,
}
# chapuser
# chappassword
if user:
sr["chapuser"] = user
if password:
sr["chappassword"] = password
sr["targetIQN"] = targetiqn
value = self.connection.Async.SR.create(self.session_uuid, ref, sr, "0", "__gui__", "SHOULD NEVER BE CREATED","lvmoiscsi","user", True, {})['Value']
task = self.connection.task.get_record(self.session_uuid, value)['Value']
while task["status"] == "pending":
task = self.connection.task.get_record(self.session_uuid, value)['Value']
if task["error_info"][3]:
dom = xml.dom.minidom.parseString(task["error_info"][3])
nodes = dom.getElementsByTagName("LUN")
for i in range(0, len(nodes)):
vendor = nodes[i].getElementsByTagName("vendor")[0].childNodes[0].data.strip()
#serial = nodes[i].getElementsByTagName("serial")[0].childNodes[0].data.strip()
lunid = nodes[i].getElementsByTagName("LUNid")[0].childNodes[0].data.strip()
size = nodes[i].getElementsByTagName("size")[0].childNodes[0].data.strip()
scsiid = nodes[i].getElementsByTagName("SCSIid")[0].childNodes[0].data.strip()
list.append([scsiid, "LUN %s: %s (%s)" % (lunid, self.convert_bytes(size), vendor)])
self.connection.task.destroy(self.session_uuid, value)
return True
else:
self.wine.show_error_dlg(task["error_info"][2])
self.connection.task.destroy(self.session_uuid, value)
return False
|
psychopy/versions | refs/heads/master | psychopy/visual/basevisual.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Provides class BaseVisualStim and mixins; subclass to get visual stimuli
"""
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
from __future__ import absolute_import, division, print_function
from builtins import object
from past.builtins import basestring
from pathlib import Path
# Ensure setting pyglet.options['debug_gl'] to False is done prior to any
# other calls to pyglet or pyglet submodules, otherwise it may not get picked
# up by the pyglet GL engine and have no effect.
# Shaders will work but require OpenGL2.0 drivers AND PyOpenGL3.0+
import pyglet
pyglet.options['debug_gl'] = False
GL = pyglet.gl
try:
from PIL import Image
except ImportError:
from . import Image
import copy
import sys
import os
from psychopy import logging
# tools must only be imported *after* event or MovieStim breaks on win32
# (JWP has no idea why!)
from psychopy.tools.arraytools import val2array
from psychopy.tools.attributetools import (attributeSetter, logAttrib,
setAttribute)
from psychopy.tools.colorspacetools import dkl2rgb, lms2rgb
from psychopy.tools.monitorunittools import (cm2pix, deg2pix, pix2cm,
pix2deg, convertToPix)
from psychopy.visual.helpers import (pointInPolygon, polygonsOverlap,
setColor, findImageFile)
from psychopy.tools.typetools import float_uint8
from psychopy.tools.arraytools import makeRadialMatrix
from . import globalVars
import numpy
from numpy import pi
from psychopy.constants import NOT_STARTED, STARTED, STOPPED
reportNImageResizes = 5 # permitted number of resizes
"""
There are several base and mix-in visual classes for multiple inheritance:
- MinimalStim: non-visual house-keeping code common to all visual stim
RatingScale inherits only from MinimalStim.
- WindowMixin: attributes/methods about the stim relative to
a visual.Window.
- LegacyVisualMixin: deprecated visual methods (eg, setRGB) added
to BaseVisualStim
- ColorMixin: for Stim that need color methods (most, not Movie)
color-related methods and attribs
- ContainerMixin: for stim that need polygon .contains() methods.
Most need this, but not Text. .contains(), .overlaps()
- TextureMixin: for texture methods namely _createTexture
(Grating, not Text)
seems to work; caveat: There were issues in earlier (non-MI) versions
of using _createTexture so it was pulled out of classes.
Now it's inside classes again. Should be watched.
- BaseVisualStim: = Minimal + Window + Legacy. Furthermore adds c
ommon attributes like orientation, opacity, contrast etc.
Typically subclass BaseVisualStim to create new visual stim classes, and add
mixin(s) as needed to add functionality.
"""
class MinimalStim(object):
"""Non-visual methods and attributes for BaseVisualStim and RatingScale.
Includes: name, autoDraw, autoLog, status, __str__
"""
def __init__(self, name=None, autoLog=None):
if name not in (None, ''):
self.__dict__['name'] = name
else:
self.__dict__['name'] = 'unnamed %s' % self.__class__.__name__
self.status = NOT_STARTED
self.autoLog = autoLog
super(MinimalStim, self).__init__()
if self.autoLog:
msg = ("%s is calling MinimalStim.__init__() with autolog=True. "
"Set autoLog to True only at the end of __init__())")
logging.warning(msg % self.__class__.__name__)
def __str__(self, complete=False):
"""
"""
if hasattr(self, '_initParams'):
className = self.__class__.__name__
paramStrings = []
for param in self._initParams:
if hasattr(self, param):
val = getattr(self, param)
valStr = repr(getattr(self, param))
if len(repr(valStr)) > 50 and not complete:
if val.__class__.__name__ == 'attributeSetter':
_name = val.__getattribute__.__class__.__name__
else:
_name = val.__class__.__name__
valStr = "%s(...)" % _name
else:
valStr = 'UNKNOWN'
paramStrings.append("%s=%s" % (param, valStr))
# this could be used if all params are known to exist:
# paramStrings = ["%s=%s" %(param, getattr(self, param))
# for param in self._initParams]
params = ", ".join(paramStrings)
s = "%s(%s)" % (className, params)
else:
s = object.__repr__(self)
return s
# Might seem simple at first, but this ensures that "name" attribute
# appears in docs and that name setting and updating is logged.
@attributeSetter
def name(self, value):
"""String or None. The name of the object to be using during
logged messages about this stim. If you have multiple stimuli
in your experiment this really helps to make sense of log files!
If name = None your stimulus will be called "unnamed <type>", e.g.
visual.TextStim(win) will be called "unnamed TextStim" in the logs.
"""
self.__dict__['name'] = value
@attributeSetter
def autoDraw(self, value):
"""Determines whether the stimulus should be automatically drawn
on every frame flip.
Value should be: `True` or `False`. You do NOT need to set this
on every frame flip!
"""
self.__dict__['autoDraw'] = value
toDraw = self.win._toDraw
toDrawDepths = self.win._toDrawDepths
beingDrawn = (self in toDraw)
if value == beingDrawn:
return # nothing to do
elif value:
# work out where to insert the object in the autodraw list
depthArray = numpy.array(toDrawDepths)
# all indices where true:
iis = numpy.where(depthArray < self.depth)[0]
if len(iis): # we featured somewhere before the end of the list
toDraw.insert(iis[0], self)
toDrawDepths.insert(iis[0], self.depth)
else:
toDraw.append(self)
toDrawDepths.append(self.depth)
self.status = STARTED
elif value == False:
# remove from autodraw lists
toDrawDepths.pop(toDraw.index(self)) # remove from depths
toDraw.remove(self) # remove from draw list
# Remove from editable list (if present)
for c in self.win._editableChildren:
if c() == self:
self.win._editableChildren.remove(c)
self.status = STOPPED
def setAutoDraw(self, value, log=None):
"""Sets autoDraw. Usually you can use 'stim.attribute = value'
syntax instead, but use this method to suppress the log message.
"""
setAttribute(self, 'autoDraw', value, log)
@attributeSetter
def autoLog(self, value):
"""Whether every change in this stimulus should be auto logged.
Value should be: `True` or `False`. Set to `False` if your
stimulus is updating frequently (e.g. updating its position every
frame) and you want to avoid swamping the log file with
messages that aren't likely to be useful.
"""
self.__dict__['autoLog'] = value
def setAutoLog(self, value=True, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message.
"""
setAttribute(self, 'autoLog', value, log)
class LegacyVisualMixin(object):
"""Class to hold deprecated visual methods and attributes.
Intended only for use as a mixin class for BaseVisualStim, to maintain
backwards compatibility while reducing clutter in class BaseVisualStim.
"""
# def __init__(self):
# super(LegacyVisualMixin, self).__init__()
def _calcSizeRendered(self):
"""DEPRECATED in 1.80.00. This functionality is now handled
by _updateVertices() and verticesPix
"""
# raise DeprecationWarning, "_calcSizeRendered() was deprecated in
# 1.80.00. This functionality is now handled by _updateVertices()
# and verticesPix"
if self.units in ['norm', 'pix', 'height']:
self._sizeRendered = copy.copy(self.size)
elif self.units in ['deg', 'degs']:
self._sizeRendered = deg2pix(self.size, self.win.monitor)
elif self.units == 'cm':
self._sizeRendered = cm2pix(self.size, self.win.monitor)
else:
logging.error("Stimulus units should be 'height', 'norm', "
"'deg', 'cm' or 'pix', not '%s'" % self.units)
def _calcPosRendered(self):
"""DEPRECATED in 1.80.00. This functionality is now handled
by _updateVertices() and verticesPix.
"""
# raise DeprecationWarning, "_calcSizeRendered() was deprecated
# in 1.80.00. This functionality is now handled by
# _updateVertices() and verticesPix"
if self.units in ['norm', 'pix', 'height']:
self._posRendered = copy.copy(self.pos)
elif self.units in ['deg', 'degs']:
self._posRendered = deg2pix(self.pos, self.win.monitor)
elif self.units == 'cm':
self._posRendered = cm2pix(self.pos, self.win.monitor)
def _getPolyAsRendered(self):
"""DEPRECATED. Return a list of vertices as rendered.
"""
oriRadians = numpy.radians(self.ori)
sinOri = numpy.sin(-oriRadians)
cosOri = numpy.cos(-oriRadians)
x = (self._verticesRendered[:, 0] * cosOri -
self._verticesRendered[:, 1] * sinOri)
y = (self._verticesRendered[:, 0] * sinOri +
self._verticesRendered[:, 1] * cosOri)
return numpy.column_stack((x, y)) + self._posRendered
def setDKL(self, newDKL, operation=''):
"""DEPRECATED since v1.60.05: Please use the `color` attribute
"""
self._set('dkl', val=newDKL, op=operation)
self.setRGB(dkl2rgb(self.dkl, self.win.dkl_rgb))
def setLMS(self, newLMS, operation=''):
"""DEPRECATED since v1.60.05: Please use the `color` attribute
"""
self._set('lms', value=newLMS, op=operation)
self.setRGB(lms2rgb(self.lms, self.win.lms_rgb))
def setRGB(self, newRGB, operation='', log=None):
"""DEPRECATED since v1.60.05: Please use the `color` attribute
"""
from psychopy.visual.helpers import setTexIfNoShaders
self._set('rgb', newRGB, operation)
setTexIfNoShaders(self)
if self.__class__.__name__ == 'TextStim' and not self.useShaders:
self._needSetText = True
@attributeSetter
def depth(self, value):
"""DEPRECATED. Depth is now controlled simply by drawing order.
"""
self.__dict__['depth'] = value
class ColorMixin(object):
"""Mixin class for visual stim that need color and or contrast.
"""
# def __init__(self):
# super(ColorStim, self).__init__()
@attributeSetter
def color(self, value):
"""Color of the stimulus
Value should be one of:
+ string: to specify a :ref:`colorNames`. Any of the standard
html/X11 `color names
<http://www.w3schools.com/html/html_colornames.asp>`
can be used.
+ :ref:`hexColors`
+ numerically: (scalar or triplet) for DKL, RGB or
other :ref:`colorspaces`. For
these, :ref:`operations <attrib-operations>` are supported.
When color is specified using numbers, it is interpreted with
respect to the stimulus' current colorSpace. If color is given as a
single value (scalar) then this will be applied to all 3 channels.
Examples
--------
For whatever stim you have::
stim.color = 'white'
stim.color = 'RoyalBlue' # (the case is actually ignored)
stim.color = '#DDA0DD' # DDA0DD is hexadecimal for plum
stim.color = [1.0, -1.0, -1.0] # if stim.colorSpace='rgb':
# a red color in rgb space
stim.color = [0.0, 45.0, 1.0] # if stim.colorSpace='dkl':
# DKL space with elev=0, azimuth=45
stim.color = [0, 0, 255] # if stim.colorSpace='rgb255':
# a blue stimulus using rgb255 space
stim.color = 255 # interpreted as (255, 255, 255)
# which is white in rgb255.
:ref:`Operations <attrib-operations>` work as normal for all numeric
colorSpaces (e.g. 'rgb', 'hsv' and 'rgb255') but not for strings, like
named and hex. For example, assuming that colorSpace='rgb'::
stim.color += [1, 1, 1] # increment all guns by 1 value
stim.color *= -1 # multiply the color by -1 (which in this
# space inverts the contrast)
stim.color *= [0.5, 0, 1] # decrease red, remove green, keep blue
You can use `setColor` if you want to set color and colorSpace in one
line. These two are equivalent::
stim.setColor((0, 128, 255), 'rgb255')
# ... is equivalent to
stim.colorSpace = 'rgb255'
stim.color = (0, 128, 255)
"""
self.setColor(
value, log=False) # logging already done by attributeSettter
@attributeSetter
def colorSpace(self, value):
"""The name of the color space currently being used
Value should be: a string or None
For strings and hex values this is not needed.
If None the default colorSpace for the stimulus is
used (defined during initialisation).
Please note that changing colorSpace does not change stimulus
parameters. Thus you usually want to specify colorSpace before
setting the color. Example::
# A light green text
stim = visual.TextStim(win, 'Color me!',
color=(0, 1, 0), colorSpace='rgb')
# An almost-black text
stim.colorSpace = 'rgb255'
# Make it light green again
stim.color = (128, 255, 128)
"""
self.__dict__['colorSpace'] = value
@attributeSetter
def contrast(self, value):
"""A value that is simply multiplied by the color
Value should be: a float between -1 (negative) and 1 (unchanged).
:ref:`Operations <attrib-operations>` supported.
Set the contrast of the stimulus, i.e. scales how far the stimulus
deviates from the middle grey. You can also use the stimulus
`opacity` to control contrast, but that cannot be negative.
Examples::
stim.contrast = 1.0 # unchanged contrast
stim.contrast = 0.5 # decrease contrast
stim.contrast = 0.0 # uniform, no contrast
stim.contrast = -0.5 # slightly inverted
stim.contrast = -1.0 # totally inverted
Setting contrast outside range -1 to 1 is permitted, but may
produce strange results if color values exceeds the monitor limits.::
stim.contrast = 1.2 # increases contrast
stim.contrast = -1.2 # inverts with increased contrast
"""
self.__dict__['contrast'] = value
# If we don't have shaders we need to rebuild the stimulus
if hasattr(self, 'useShaders'):
if not self.useShaders:
# we'll need to update the textures for the stimulus
# (sometime before drawing but not now)
if self.__class__.__name__ == 'TextStim':
self.text = self.text # call attributeSetter
# GratingStim, RadialStim, ImageStim etc
elif hasattr(self, '_needTextureUpdate'):
self._needTextureUpdate = True
elif (hasattr(self, 'fillColor') # a derivative of shapestim
or self.__class__.__name__ == 'DotStim'):
pass # no need for shaders or rebuilding
elif self.autoLog:
logging.warning('Tried to set contrast while useShaders '
'= False but stimulus was not rebuilt. '
'Contrast might remain unchanged. {}'
.format(self))
elif self.autoLog:
logging.warning('Contrast was set on class where useShaders was '
'undefined. Contrast might remain unchanged')
def setColor(self, color, colorSpace=None, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message
and/or set colorSpace simultaneously.
"""
# NB: the setColor helper function! Not this function itself :-)
setColor(self, color, colorSpace=colorSpace, operation=operation,
rgbAttrib='rgb', # or 'fillRGB' etc
colorAttrib='color')
if self.__class__.__name__ == 'TextStim' and not self.useShaders:
self._needSetText = True
logAttrib(self, log, 'color',
value='%s (%s)' % (self.color, self.colorSpace))
def setContrast(self, newContrast, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message
"""
setAttribute(self, 'contrast', newContrast, log, operation)
def _getDesiredRGB(self, rgb, colorSpace, contrast):
""" Convert color to RGB while adding contrast.
Requires self.rgb, self.colorSpace and self.contrast
"""
# Ensure that we work on 0-centered color (to make negative contrast
# values work)
if colorSpace not in ['rgb', 'dkl', 'lms', 'hsv']:
rgb = rgb / 127.5 - 1
# Convert to RGB in range 0:1 and scaled for contrast
# NB glColor will clamp it to be 0-1 (whether or not we use FBO)
desiredRGB = (rgb * contrast + 1) / 2.0
if not self.win.useFBO:
# Check that boundaries are not exceeded. If we have an FBO that
# can handle this
if numpy.any(desiredRGB > 1.0) or numpy.any(desiredRGB < 0):
msg = ('Desired color %s (in RGB 0->1 units) falls '
'outside the monitor gamut. Drawing blue instead')
logging.warning(msg % desiredRGB)
desiredRGB = [0.0, 0.0, 1.0]
return desiredRGB
class ContainerMixin(object):
"""Mixin class for visual stim that have verticesPix attrib
and .contains() methods.
"""
def __init__(self):
super(ContainerMixin, self).__init__()
self._verticesBase = numpy.array(
[[0.5, -0.5], [-0.5, -0.5], [-0.5, 0.5], [0.5, 0.5]]) # sqr
self._borderBase = numpy.array(
[[0.5, -0.5], [-0.5, -0.5], [-0.5, 0.5], [0.5, 0.5]]) # sqr
self._rotationMatrix = [[1., 0.], [0., 1.]] # no rotation by default
@property
def verticesPix(self):
"""This determines the coordinates of the vertices for the
current stimulus in pixels, accounting for size, ori, pos and units
"""
# because this is a property getter we can check /on-access/ if it
# needs updating :-)
if self._needVertexUpdate:
self._updateVertices()
return self.__dict__['verticesPix']
@property
def _borderPix(self):
"""Allows for a dynamic border that differs from self.vertices, gets
updated dynamically with identical transformations.
"""
if not hasattr(self, 'border'):
msg = "%s._borderPix requested without .border" % self.name
logging.error(msg)
raise AttributeError(msg)
if self._needVertexUpdate:
self._updateVertices()
return self.__dict__['_borderPix']
def _updateVertices(self):
"""Sets Stim.verticesPix and ._borderPix from pos, size, ori,
flipVert, flipHoriz
"""
# check whether stimulus needs flipping in either direction
flip = numpy.array([1, 1])
if hasattr(self, 'flipHoriz') and self.flipHoriz:
flip[0] = -1 # True=(-1), False->(+1)
if hasattr(self, 'flipVert') and self.flipVert:
flip[1] = -1 # True=(-1), False->(+1)
if hasattr(self, '_tesselVertices'): # Shapes need to render from this
verts = self._tesselVertices
elif hasattr(self, 'vertices'):
verts = self.vertices
else:
verts = self._verticesBase
# set size and orientation, combine with position and convert to pix:
if hasattr(self, 'fieldSize'):
# this is probably a DotStim and size is handled differently
verts = numpy.dot(verts * flip, self._rotationMatrix)
else:
verts = numpy.dot(self.size * verts * flip, self._rotationMatrix)
verts = convertToPix(vertices=verts, pos=self.pos,
win=self.win, units=self.units)
self.__dict__['verticesPix'] = verts
if hasattr(self, 'border'):
#border = self.border
border = numpy.dot(self.size * self.border *
flip, self._rotationMatrix)
border = convertToPix(
vertices=border, pos=self.pos, win=self.win, units=self.units)
self.__dict__['_borderPix'] = border
self._needVertexUpdate = False
self._needUpdate = True # but we presumably need to update the list
def contains(self, x, y=None, units=None):
"""Returns True if a point x,y is inside the stimulus' border.
Can accept variety of input options:
+ two separate args, x and y
+ one arg (list, tuple or array) containing two vals (x,y)
+ an object with a getPos() method that returns x,y, such
as a :class:`~psychopy.event.Mouse`.
Returns `True` if the point is within the area defined either by its
`border` attribute (if one defined), or its `vertices` attribute if
there is no .border. This method handles
complex shapes, including concavities and self-crossings.
Note that, if your stimulus uses a mask (such as a Gaussian) then
this is not accounted for by the `contains` method; the extent of the
stimulus is determined purely by the size, position (pos), and
orientation (ori) settings (and by the vertices for shape stimuli).
See Coder demos: shapeContains.py
See Coder demos: shapeContains.py
"""
# get the object in pixels
if hasattr(x, 'border'):
xy = x._borderPix # access only once - this is a property
units = 'pix' # we can forget about the units
elif hasattr(x, 'verticesPix'):
# access only once - this is a property (slower to access)
xy = x.verticesPix
units = 'pix' # we can forget about the units
elif hasattr(x, 'getPos'):
xy = x.getPos()
units = x.units
elif type(x) in [list, tuple, numpy.ndarray]:
xy = numpy.array(x)
else:
xy = numpy.array((x, y))
# try to work out what units x,y has
if units is None:
if hasattr(xy, 'units'):
units = xy.units
else:
units = self.units
if units != 'pix':
xy = convertToPix(xy, pos=(0, 0), units=units, win=self.win)
# ourself in pixels
if hasattr(self, 'border'):
poly = self._borderPix # e.g., outline vertices
elif hasattr(self, 'boundingBox'):
if abs(self.ori) > 0.1:
raise RuntimeError("TextStim.contains() doesn't currently "
"support rotated text.")
w, h = self.boundingBox # e.g., outline vertices
x, y = self.posPix
poly = numpy.array([[x+w/2, y-h/2], [x-w/2, y-h/2],
[x-w/2, y+h/2], [x+w/2, y+h/2]])
else:
poly = self.verticesPix # e.g., tessellated vertices
return pointInPolygon(xy[0], xy[1], poly=poly)
def overlaps(self, polygon):
"""Returns `True` if this stimulus intersects another one.
If `polygon` is another stimulus instance, then the vertices
and location of that stimulus will be used as the polygon.
Overlap detection is typically very good, but it
can fail with very pointy shapes in a crossed-swords configuration.
Note that, if your stimulus uses a mask (such as a Gaussian blob)
then this is not accounted for by the `overlaps` method; the extent
of the stimulus is determined purely by the size, pos, and
orientation settings (and by the vertices for shape stimuli).
See coder demo, shapeContains.py
"""
return polygonsOverlap(self, polygon)
class TextureMixin(object):
"""Mixin class for visual stim that have textures.
Could move visual.helpers.setTexIfNoShaders() into here
"""
# def __init__(self):
# super(TextureMixin, self).__init__()
def _createTexture(self, tex, id, pixFormat,
stim, res=128, maskParams=None,
forcePOW2=True, dataType=None,
wrapping=True):
"""
:params:
id:
is the texture ID
pixFormat:
GL.GL_ALPHA, GL.GL_RGB
useShaders:
bool
interpolate:
bool (determines whether texture will
use GL_LINEAR or GL_NEAREST
res:
the resolution of the texture (unless
a bitmap image is used)
dataType:
None, GL.GL_UNSIGNED_BYTE, GL_FLOAT.
Only affects image files (numpy arrays will be float)
For grating stimuli (anything that needs multiple cycles)
forcePOW2 should be set to be True. Otherwise the wrapping
of the texture will not work.
"""
# Create an intensity texture, ranging -1:1.0
notSqr = False # most of the options will be creating a sqr texture
wasImage = False # change this if image loading works
useShaders = stim.useShaders
interpolate = stim.interpolate
if dataType is None:
if useShaders and pixFormat == GL.GL_RGB:
dataType = GL.GL_FLOAT
else:
dataType = GL.GL_UNSIGNED_BYTE
# Fill out unspecified portions of maskParams with default values
if maskParams is None:
maskParams = {}
# fringeWidth affects the proportion of the stimulus diameter that is
# devoted to the raised cosine.
allMaskParams = {'fringeWidth': 0.2, 'sd': 3}
allMaskParams.update(maskParams)
sin = numpy.sin
if type(tex) == numpy.ndarray:
# handle a numpy array
# for now this needs to be an NxN intensity array
intensity = tex.astype(numpy.float32)
if intensity.max() > 1 or intensity.min() < -1:
logging.error('numpy arrays used as textures should be in '
'the range -1(black):1(white)')
if len(tex.shape) == 3:
wasLum = False
else:
wasLum = True
# is it 1D?
if tex.shape[0] == 1:
stim._tex1D = True
res = tex.shape[1]
elif len(tex.shape) == 1 or tex.shape[1] == 1:
stim._tex1D = True
res = tex.shape[0]
else:
stim._tex1D = False
# check if it's a square power of two
maxDim = max(tex.shape)
powerOf2 = 2**numpy.ceil(numpy.log2(maxDim))
if (forcePOW2 and
(tex.shape[0] != powerOf2 or
tex.shape[1] != powerOf2)):
logging.error("Requiring a square power of two (e.g. "
"16 x 16, 256 x 256) texture but didn't "
"receive one")
res = tex.shape[0]
if useShaders:
dataType = GL.GL_FLOAT
elif tex in (None, "none", "None", "color"):
# 4x4 (2x2 is SUPPOSED to be fine but generates weird colors!)
res = 1
intensity = numpy.ones([res, res], numpy.float32)
wasLum = True
wrapping = True # override any wrapping setting for None
elif tex == "sin":
# NB 1j*res is a special mgrid notation
onePeriodX, onePeriodY = numpy.mgrid[0:res, 0:2 * pi:1j * res]
intensity = numpy.sin(onePeriodY - pi / 2)
wasLum = True
elif tex == "sqr": # square wave (symmetric duty cycle)
# NB 1j*res is a special mgrid notation
onePeriodX, onePeriodY = numpy.mgrid[0:res, 0:2 * pi:1j * res]
sinusoid = numpy.sin(onePeriodY - pi / 2)
intensity = numpy.where(sinusoid > 0, 1, -1)
wasLum = True
elif tex == "saw":
intensity = (numpy.linspace(-1.0, 1.0, res, endpoint=True) *
numpy.ones([res, 1]))
wasLum = True
elif tex == "tri":
# -1:3 means the middle is at +1
intens = numpy.linspace(-1.0, 3.0, res, endpoint=True)
# remove from 3 to get back down to -1
intens[res // 2 + 1 :] = 2.0 - intens[res // 2 + 1 :]
intensity = intens * numpy.ones([res, 1]) # make 2D
wasLum = True
elif tex == "sinXsin":
# NB 1j*res is a special mgrid notation
onePeriodX, onePeriodY = numpy.mgrid[0:2 * pi:1j * res,
0:2 * pi:1j * res]
intensity = sin(onePeriodX - pi / 2) * sin(onePeriodY - pi / 2)
wasLum = True
elif tex == "sqrXsqr":
# NB 1j*res is a special mgrid notation
onePeriodX, onePeriodY = numpy.mgrid[0:2 * pi:1j * res,
0:2 * pi:1j * res]
sinusoid = sin(onePeriodX - pi / 2) * sin(onePeriodY - pi / 2)
intensity = numpy.where(sinusoid > 0, 1, -1)
wasLum = True
elif tex == "circle":
rad = makeRadialMatrix(res)
intensity = (rad <= 1) * 2 - 1
wasLum = True
elif tex == "gauss":
rad = makeRadialMatrix(res)
# 3sd.s by the edge of the stimulus
invVar = (1.0 / allMaskParams['sd']) ** 2.0
intensity = numpy.exp( -rad**2.0 / (2.0 * invVar)) * 2 - 1
wasLum = True
elif tex == "cross":
X, Y = numpy.mgrid[-1:1:1j * res, -1:1:1j * res]
tfNegCross = (((X < -0.2) & (Y < -0.2)) |
((X < -0.2) & (Y > 0.2)) |
((X > 0.2) & (Y < -0.2)) |
((X > 0.2) & (Y > 0.2)))
# tfNegCross == True at places where the cross is transparent,
# i.e. the four corners
intensity = numpy.where(tfNegCross, -1, 1)
wasLum = True
elif tex == "radRamp": # a radial ramp
rad = makeRadialMatrix(res)
intensity = 1 - 2 * rad
# clip off the corners (circular)
intensity = numpy.where(rad < -1, intensity, -1)
wasLum = True
elif tex == "raisedCos": # A raised cosine
wasLum = True
hammingLen = 1000 # affects the 'granularity' of the raised cos
rad = makeRadialMatrix(res)
intensity = numpy.zeros_like(rad)
intensity[numpy.where(rad < 1)] = 1
frng = allMaskParams['fringeWidth']
raisedCosIdx = numpy.where(
[numpy.logical_and(rad <= 1, rad >= 1 - frng)])[1:]
# Make a raised_cos (half a hamming window):
raisedCos = numpy.hamming(hammingLen)[ : hammingLen // 2]
raisedCos -= numpy.min(raisedCos)
raisedCos /= numpy.max(raisedCos)
# Measure the distance from the edge - this is your index into the
# hamming window:
dFromEdge = numpy.abs(
(1 - allMaskParams['fringeWidth']) - rad[raisedCosIdx])
dFromEdge /= numpy.max(dFromEdge)
dFromEdge *= numpy.round(hammingLen/2)
# This is the indices into the hamming (larger for small distances
# from the edge!):
portionIdx = (-1 * dFromEdge).astype(int)
# Apply the raised cos to this portion:
intensity[raisedCosIdx] = raisedCos[portionIdx]
# Scale it into the interval -1:1:
intensity = intensity - 0.5
intensity /= numpy.max(intensity)
# Sometimes there are some remaining artifacts from this process,
# get rid of them:
artifactIdx = numpy.where(numpy.logical_and(intensity == -1,
rad < 0.99))
intensity[artifactIdx] = 1
artifactIdx = numpy.where(numpy.logical_and(intensity == 1,
rad > 0.99))
intensity[artifactIdx] = 0
else:
if isinstance(tex, (basestring, Path)):
# maybe tex is the name of a file:
filename = findImageFile(tex)
if not filename:
msg = "Couldn't find image %s; check path? (tried: %s)"
logging.error(msg % (tex, os.path.abspath(tex)))
logging.flush()
raise IOError(msg % (tex, os.path.abspath(tex)))
try:
im = Image.open(filename)
im = im.transpose(Image.FLIP_TOP_BOTTOM)
except IOError:
msg = "Found file '%s', failed to load as an image"
logging.error(msg % (filename))
logging.flush()
msg = "Found file '%s' [= %s], failed to load as an image"
raise IOError(msg % (tex, os.path.abspath(tex)))
else:
# can't be a file; maybe its an image already in memory?
try:
im = tex.copy().transpose(Image.FLIP_TOP_BOTTOM)
except AttributeError: # nope, not an image in memory
msg = "Couldn't make sense of requested image."
logging.error(msg)
logging.flush()
raise AttributeError(msg)
# at this point we have a valid im
stim._origSize = im.size
wasImage = True
# is it 1D?
if im.size[0] == 1 or im.size[1] == 1:
logging.error("Only 2D textures are supported at the moment")
else:
maxDim = max(im.size)
powerOf2 = int(2**numpy.ceil(numpy.log2(maxDim)))
if im.size[0] != powerOf2 or im.size[1] != powerOf2:
if not forcePOW2:
notSqr = True
elif globalVars.nImageResizes < reportNImageResizes:
msg = ("Image '%s' was not a square power-of-two ' "
"'image. Linearly interpolating to be %ix%i")
logging.warning(msg % (tex, powerOf2, powerOf2))
globalVars.nImageResizes += 1
im = im.resize([powerOf2, powerOf2], Image.BILINEAR)
elif globalVars.nImageResizes == reportNImageResizes:
logging.warning("Multiple images have needed resizing"
" - I'll stop bothering you!")
im = im.resize([powerOf2, powerOf2], Image.BILINEAR)
# is it Luminance or RGB?
if pixFormat == GL.GL_ALPHA and im.mode != 'L':
# we have RGB and need Lum
wasLum = True
im = im.convert("L") # force to intensity (need if was rgb)
elif im.mode == 'L': # we have lum and no need to change
wasLum = True
if useShaders:
dataType = GL.GL_FLOAT
elif pixFormat == GL.GL_RGB:
# we want RGB and might need to convert from CMYK or Lm
# texture = im.tostring("raw", "RGB", 0, -1)
im = im.convert("RGBA")
wasLum = False
if dataType == GL.GL_FLOAT:
# convert from ubyte to float
# much faster to avoid division 2/255
intensity = numpy.array(im).astype(
numpy.float32) * 0.0078431372549019607 - 1.0
else:
intensity = numpy.array(im)
if pixFormat == GL.GL_RGB and wasLum and dataType == GL.GL_FLOAT:
# grating stim on good machine
# keep as float32 -1:1
if (sys.platform != 'darwin' and
stim.win.glVendor.startswith('nvidia')):
# nvidia under win/linux might not support 32bit float
# could use GL_LUMINANCE32F_ARB here but check shader code?
internalFormat = GL.GL_RGB16F_ARB
else:
# we've got a mac or an ATI card and can handle
# 32bit float textures
# could use GL_LUMINANCE32F_ARB here but check shader code?
internalFormat = GL.GL_RGB32F_ARB
# initialise data array as a float
data = numpy.ones((intensity.shape[0], intensity.shape[1], 3),
numpy.float32)
data[:, :, 0] = intensity # R
data[:, :, 1] = intensity # G
data[:, :, 2] = intensity # B
elif (pixFormat == GL.GL_RGB and
wasLum and
dataType != GL.GL_FLOAT and
stim.useShaders):
# was a lum image: stick with ubyte for speed
internalFormat = GL.GL_RGB
# initialise data array as a float
data = numpy.ones((intensity.shape[0], intensity.shape[1], 3),
numpy.ubyte)
data[:, :, 0] = intensity # R
data[:, :, 1] = intensity # G
data[:, :, 2] = intensity # B
# Grating on legacy hardware, or ImageStim with wasLum=True
elif pixFormat == GL.GL_RGB and wasLum and not stim.useShaders:
# scale by rgb and convert to ubyte
internalFormat = GL.GL_RGB
if stim.colorSpace in ('rgb', 'dkl', 'lms', 'hsv'):
rgb = stim.rgb
else:
# colour is not a float - convert to float to do the scaling
rgb = (stim.rgb / 127.5) - 1.0
# if wasImage it will also have ubyte values for the intensity
if wasImage:
intensity = (intensity / 127.5) - 1.0
# scale by rgb
# initialise data array as a float
data = numpy.ones((intensity.shape[0], intensity.shape[1], 4),
numpy.float32)
data[:, :, 0] = intensity * rgb[0] + stim.rgbPedestal[0] # R
data[:, :, 1] = intensity * rgb[1] + stim.rgbPedestal[1] # G
data[:, :, 2] = intensity * rgb[2] + stim.rgbPedestal[2] # B
data[:, :, :-1] = data[:, :, :-1] * stim.contrast
# convert to ubyte
data = float_uint8(data)
elif pixFormat == GL.GL_RGB and dataType == GL.GL_FLOAT:
# probably a custom rgb array or rgb image
internalFormat = GL.GL_RGB32F_ARB
data = intensity
elif pixFormat == GL.GL_RGB:
# not wasLum, not useShaders - an RGB bitmap with no shader
# optionsintensity.min()
internalFormat = GL.GL_RGB
data = intensity # float_uint8(intensity)
elif pixFormat == GL.GL_ALPHA:
internalFormat = GL.GL_ALPHA
dataType = GL.GL_UNSIGNED_BYTE
if wasImage:
data = intensity
else:
data = float_uint8(intensity)
# check for RGBA textures
if len(data.shape) > 2 and data.shape[2] == 4:
if pixFormat == GL.GL_RGB:
pixFormat = GL.GL_RGBA
if internalFormat == GL.GL_RGB:
internalFormat = GL.GL_RGBA
elif internalFormat == GL.GL_RGB32F_ARB:
internalFormat = GL.GL_RGBA32F_ARB
texture = data.ctypes # serialise
# bind the texture in openGL
GL.glEnable(GL.GL_TEXTURE_2D)
GL.glBindTexture(GL.GL_TEXTURE_2D, id) # bind that name to the target
# makes the texture map wrap (this is actually default anyway)
if wrapping:
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_REPEAT)
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_REPEAT)
else:
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP)
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP)
# data from PIL/numpy is packed, but default for GL is 4 bytes
GL.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1)
# important if using bits++ because GL_LINEAR
# sometimes extrapolates to pixel vals outside range
if interpolate:
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR)
if useShaders:
# GL_GENERATE_MIPMAP was only available from OpenGL 1.4
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR)
GL.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_GENERATE_MIPMAP,
GL.GL_TRUE)
GL.glTexImage2D(GL.GL_TEXTURE_2D, 0, internalFormat,
data.shape[1], data.shape[0], 0,
pixFormat, dataType, texture)
else: # use glu
GL.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER,
GL.GL_LINEAR_MIPMAP_NEAREST)
GL.gluBuild2DMipmaps(GL.GL_TEXTURE_2D, internalFormat,
data.shape[1], data.shape[0],
pixFormat, dataType, texture)
else:
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST)
GL.glTexParameteri(
GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST)
GL.glTexImage2D(GL.GL_TEXTURE_2D, 0, internalFormat,
data.shape[1], data.shape[0], 0,
pixFormat, dataType, texture)
GL.glTexEnvi(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE,
GL.GL_MODULATE) # ?? do we need this - think not!
# unbind our texture so that it doesn't affect other rendering
GL.glBindTexture(GL.GL_TEXTURE_2D, 0)
return wasLum
def clearTextures(self):
"""Clear all textures associated with the stimulus.
As of v1.61.00 this is called automatically during garbage collection
of your stimulus, so doesn't need calling explicitly by the user.
"""
GL.glDeleteTextures(1, self._texID)
if hasattr(self, '_maskID'):
GL.glDeleteTextures(1, self._maskID)
@attributeSetter
def mask(self, value):
"""The alpha mask (forming the shape of the image)
This can be one of various options:
+ 'circle', 'gauss', 'raisedCos', 'cross'
+ **None** (resets to default)
+ the name of an image file (most formats supported)
+ a numpy array (1xN or NxN) ranging -1:1
"""
self.__dict__['mask'] = value
if self.__class__.__name__ == 'ImageStim':
dataType = GL.GL_UNSIGNED_BYTE
else:
dataType = None
self._createTexture(
value, id=self._maskID, pixFormat=GL.GL_ALPHA, dataType=dataType,
stim=self, res=self.texRes, maskParams=self.maskParams,
wrapping=False)
def setMask(self, value, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message.
"""
setAttribute(self, 'mask', value, log)
@attributeSetter
def texRes(self, value):
"""Power-of-two int. Sets the resolution of the mask and texture.
texRes is overridden if an array or image is provided as mask.
:ref:`Operations <attrib-operations>` supported.
"""
self.__dict__['texRes'] = value
# ... now rebuild textures (call attributeSetters without logging).
if hasattr(self, 'tex'):
setAttribute(self, 'tex', self.tex, log=False)
if hasattr(self, 'mask'):
setAttribute(self, 'mask', self.mask, log=False)
@attributeSetter
def maskParams(self, value):
"""Various types of input. Default to None.
This is used to pass additional parameters to the mask if those are
needed.
- For 'gauss' mask, pass dict {'sd': 5} to control
standard deviation.
- For the 'raisedCos' mask, pass a dict: {'fringeWidth':0.2},
where 'fringeWidth' is a parameter (float, 0-1), determining
the proportion of the patch that will be blurred by the raised
cosine edge."""
self.__dict__['maskParams'] = value
# call attributeSetter without log
setAttribute(self, 'mask', self.mask, log=False)
@attributeSetter
def interpolate(self, value):
"""Whether to interpolate (linearly) the texture in the stimulus
If set to False then nearest neighbour will be used when needed,
otherwise some form of interpolation will be used.
"""
self.__dict__['interpolate'] = value
class WindowMixin(object):
"""Window-related attributes and methods.
Used by BaseVisualStim, SimpleImageStim and ElementArrayStim."""
@attributeSetter
def win(self, value):
"""The :class:`~psychopy.visual.Window` object in which the
stimulus will be rendered by default. (required)
Example, drawing same stimulus in two different windows and display
simultaneously. Assuming that you have two windows and a stimulus
(win1, win2 and stim)::
stim.win = win1 # stimulus will be drawn in win1
stim.draw() # stimulus is now drawn to win1
stim.win = win2 # stimulus will be drawn in win2
stim.draw() # it is now drawn in win2
win1.flip(waitBlanking=False) # do not wait for next
# monitor update
win2.flip() # wait for vertical blanking.
Note that this just changes **default** window for stimulus.
You could also specify window-to-draw-to when drawing::
stim.draw(win1)
stim.draw(win2)
"""
self.__dict__['win'] = value
@attributeSetter
def units(self, value):
"""
None, 'norm', 'cm', 'deg', 'degFlat', 'degFlatPos', or 'pix'
If None then the current units of the
:class:`~psychopy.visual.Window` will be used.
See :ref:`units` for explanation of other options.
Note that when you change units, you don't change the stimulus
parameters and it is likely to change appearance. Example::
# This stimulus is 20% wide and 50% tall with respect to window
stim = visual.PatchStim(win, units='norm', size=(0.2, 0.5)
# This stimulus is 0.2 degrees wide and 0.5 degrees tall.
stim.units = 'deg'
"""
if value != None and len(value):
self.__dict__['units'] = value
else:
self.__dict__['units'] = self.win.units
# Update size and position if they are defined (tested as numeric).
# If not, this is probably
# during some init and they will be defined later, given the new unit.
try:
# quick and dirty way to check that both are numeric. This avoids
# the heavier attributeSetter calls.
self.size * self.pos
self.size = self.size
self.pos = self.pos
except Exception:
pass
@attributeSetter
def useShaders(self, value):
"""Should shaders be used to render the stimulus
(typically leave as `True`)
If the system support the use of OpenGL shader language then leaving
this set to True is highly recommended. If shaders cannot be used then
various operations will be slower (notably, changes to stimulus color
or contrast)
"""
if value == True and self.win._haveShaders == False:
logging.error("Shaders were requested but aren't available. "
"Shaders need OpenGL 2.0+ drivers")
if value != self.useShaders: # if there's a change...
self.__dict__['useShaders'] = value
if hasattr(self, 'tex'):
self.tex = self.tex # calling attributeSetter
elif hasattr(self, 'mask'):
# calling attributeSetter (does the same as mask)
self.mask = self.mask
if hasattr(self, '_imName'):
self.setImage(self._imName, log=False)
if self.__class__.__name__ == 'TextStim':
self._needSetText = True
self._needUpdate = True
def setUseShaders(self, value=True, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message"""
setAttribute(self, 'useShaders', value, log) # call attributeSetter
def draw(self):
raise NotImplementedError('Stimulus classes must override '
'visual.BaseVisualStim.draw')
def _selectWindow(self, win):
"""Switch drawing to the specified window. Calls the window's
_setCurrent() method which handles the switch.
"""
win._setCurrent()
def _updateList(self):
"""The user shouldn't need this method since it gets called
after every call to .set()
Chooses between using and not using shaders each call.
"""
if self.useShaders:
self._updateListShaders()
else:
self._updateListNoShaders()
class BaseVisualStim(MinimalStim, WindowMixin, LegacyVisualMixin):
"""A template for a visual stimulus class.
Actual visual stim like GratingStim, TextStim etc... are based on this.
Not finished...?
Methods defined here will override Minimal & Legacy, but best to avoid
that for simplicity & clarity.
"""
def __init__(self, win, units=None, name='', autoLog=None):
self.autoLog = False # just to start off during init, set at end
self.win = win
self.units = units
self._rotationMatrix = [[1., 0.], [0., 1.]] # no rotation by default
# self.autoLog is set at end of MinimalStim.__init__
super(BaseVisualStim, self).__init__(name=name, autoLog=autoLog)
if self.autoLog:
msg = ("%s is calling BaseVisualStim.__init__() with autolog=True"
". Set autoLog to True only at the end of __init__())")
logging.warning(msg % (self.__class__.__name__))
@attributeSetter
def opacity(self, value):
"""Determines how visible the stimulus is relative to background
The value should be a single float ranging 1.0 (opaque) to 0.0
(transparent). :ref:`Operations <attrib-operations>` are supported.
Precisely how this is used depends on the :ref:`blendMode`.
"""
self.__dict__['opacity'] = value
if not 0 <= value <= 1 and self.autoLog:
logging.warning('Setting opacity outside range 0.0 - 1.0'
' has no additional effect')
# opacity is coded by the texture, if not using shaders
if hasattr(self, 'useShaders') and not self.useShaders:
if hasattr(self, 'mask'):
self.mask = self.mask # call attributeSetter
@attributeSetter
def ori(self, value):
"""The orientation of the stimulus (in degrees).
Should be a single value (:ref:`scalar <attrib-scalar>`).
:ref:`Operations <attrib-operations>` are supported.
Orientation convention is like a clock: 0 is vertical, and positive
values rotate clockwise. Beyond 360 and below zero values wrap
appropriately.
"""
self.__dict__['ori'] = value
radians = value * 0.017453292519943295
sin, cos = numpy.sin, numpy.cos
self._rotationMatrix = numpy.array([[cos(radians), -sin(radians)],
[sin(radians), cos(radians)]])
self._needVertexUpdate = True # need to update update vertices
self._needUpdate = True
@attributeSetter
def size(self, value):
"""The size (width, height) of the stimulus in the stimulus
:ref:`units <units>`
Value should be :ref:`x,y-pair <attrib-xy>`,
:ref:`scalar <attrib-scalar>` (applies to both dimensions)
or None (resets to default). :ref:`Operations <attrib-operations>`
are supported.
Sizes can be negative (causing a mirror-image reversal) and can
extend beyond the window.
Example::
stim.size = 0.8 # Set size to (xsize, ysize) = (0.8, 0.8)
print(stim.size) # Outputs array([0.8, 0.8])
stim.size += (0.5, -0.5) # make wider and flatter: (1.3, 0.3)
Tip: if you can see the actual pixel range this corresponds to by
looking at `stim._sizeRendered`
"""
array = numpy.array
value = val2array(value) # Check correct user input
self._requestedSize = copy.copy(value) # to track whether we're using a default
# None --> set to default
if value is None:
# Set the size to default (e.g. to the size of the loaded image
# calculate new size
if self._origSize is None: # not an image from a file
# this was PsychoPy's original default
value = numpy.array([0.5, 0.5])
else:
# we have an image; calculate the size in `units` that matches
# original pixel size
# also scale for retina display (virtual pixels are bigger)
if self.win.useRetina:
winSize = self.win.size / 2
else:
winSize = self.win.size
# then handle main scale
if self.units == 'pix':
value = numpy.array(self._origSize)
elif self.units in ('deg', 'degFlatPos', 'degFlat'):
# NB when no size has been set (assume to use orig size
# in pix) this should not be corrected for flat anyway,
# so degFlat == degFlatPos
value = pix2deg(array(self._origSize, float),
self.win.monitor)
elif self.units == 'norm':
value = 2 * array(self._origSize, float) / winSize
elif self.units == 'height':
value = array(self._origSize, float) / winSize[1]
elif self.units == 'cm':
value = pix2cm(array(self._origSize, float),
self.win.monitor)
else:
msg = ("Failed to create default size for ImageStim. "
"Unsupported unit, %s")
raise AttributeError(msg % repr(self.units))
self.__dict__['size'] = value
self._needVertexUpdate = True
self._needUpdate = True
if hasattr(self, '_calcCyclesPerStim'):
self._calcCyclesPerStim()
@attributeSetter
def pos(self, value):
"""The position of the center of the stimulus in the stimulus
:ref:`units <units>`
`value` should be an :ref:`x,y-pair <attrib-xy>`.
:ref:`Operations <attrib-operations>` are also supported.
Example::
stim.pos = (0.5, 0) # Set slightly to the right of center
stim.pos += (0.5, -1) # Increment pos rightwards and upwards.
Is now (1.0, -1.0)
stim.pos *= 0.2 # Move stim towards the center.
Is now (0.2, -0.2)
Tip: If you need the position of stim in pixels, you can obtain
it like this:
from psychopy.tools.monitorunittools import posToPix
posPix = posToPix(stim)
"""
self.__dict__['pos'] = val2array(value, False, False)
self._needVertexUpdate = True
self._needUpdate = True
def setPos(self, newPos, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message.
"""
setAttribute(self, 'pos', val2array(newPos, False), log, operation)
def setDepth(self, newDepth, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message
"""
setAttribute(self, 'depth', newDepth, log, operation)
def setSize(self, newSize, operation='', units=None, log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message
"""
if units is None:
# need to change this to create several units from one
units = self.units
setAttribute(self, 'size', val2array(newSize, False), log, operation)
def setOri(self, newOri, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message
"""
setAttribute(self, 'ori', newOri, log, operation)
def setOpacity(self, newOpacity, operation='', log=None):
"""Usually you can use 'stim.attribute = value' syntax instead,
but use this method if you need to suppress the log message
"""
setAttribute(self, 'opacity', newOpacity, log, operation)
def _set(self, attrib, val, op='', log=None):
"""DEPRECATED since 1.80.04 + 1.
Use setAttribute() and val2array() instead.
"""
# format the input value as float vectors
if type(val) in [tuple, list, numpy.ndarray]:
val = val2array(val)
# Set attribute with operation and log
setAttribute(self, attrib, val, log, op)
# For DotStim
if attrib in ('nDots', 'coherence'):
self.coherence = round(self.coherence * self.nDots) / self.nDots
|
vipins/ccccms | refs/heads/master | env/Lib/site-packages/django/contrib/gis/sitemaps/kml.py | 482 | from django.core import urlresolvers
from django.contrib.sitemaps import Sitemap
from django.contrib.gis.db.models.fields import GeometryField
from django.db import models
class KMLSitemap(Sitemap):
"""
A minimal hook to produce KML sitemaps.
"""
geo_format = 'kml'
def __init__(self, locations=None):
# If no locations specified, then we try to build for
# every model in installed applications.
self.locations = self._build_kml_sources(locations)
def _build_kml_sources(self, sources):
"""
Goes through the given sources and returns a 3-tuple of
the application label, module name, and field name of every
GeometryField encountered in the sources.
If no sources are provided, then all models.
"""
kml_sources = []
if sources is None:
sources = models.get_models()
for source in sources:
if isinstance(source, models.base.ModelBase):
for field in source._meta.fields:
if isinstance(field, GeometryField):
kml_sources.append((source._meta.app_label,
source._meta.module_name,
field.name))
elif isinstance(source, (list, tuple)):
if len(source) != 3:
raise ValueError('Must specify a 3-tuple of (app_label, module_name, field_name).')
kml_sources.append(source)
else:
raise TypeError('KML Sources must be a model or a 3-tuple.')
return kml_sources
def get_urls(self, page=1, site=None):
"""
This method is overrridden so the appropriate `geo_format` attribute
is placed on each URL element.
"""
urls = Sitemap.get_urls(self, page=page, site=site)
for url in urls: url['geo_format'] = self.geo_format
return urls
def items(self):
return self.locations
def location(self, obj):
return urlresolvers.reverse('django.contrib.gis.sitemaps.views.%s' % self.geo_format,
kwargs={'label' : obj[0],
'model' : obj[1],
'field_name': obj[2],
}
)
class KMZSitemap(KMLSitemap):
geo_format = 'kmz'
|
SlicerRt/SlicerDebuggingTools | refs/heads/master | PyDevRemoteDebug/ptvsd-4.1.3/ptvsd/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py | 2 | import os.path
import sys
from _pydevd_bundle.pydevd_constants import Null
#=======================================================================================================================
# get_coverage_files
#=======================================================================================================================
def get_coverage_files(coverage_output_dir, number_of_files):
base_dir = coverage_output_dir
ret = []
i = 0
while len(ret) < number_of_files:
while True:
f = os.path.join(base_dir, '.coverage.%s' % i)
i += 1
if not os.path.exists(f):
ret.append(f)
break #Break only inner for.
return ret
#=======================================================================================================================
# start_coverage_support
#=======================================================================================================================
def start_coverage_support(configuration):
return start_coverage_support_from_params(
configuration.coverage_output_dir,
configuration.coverage_output_file,
configuration.jobs,
configuration.coverage_include,
)
#=======================================================================================================================
# start_coverage_support_from_params
#=======================================================================================================================
def start_coverage_support_from_params(coverage_output_dir, coverage_output_file, jobs, coverage_include):
coverage_files = []
coverage_instance = Null()
if coverage_output_dir or coverage_output_file:
try:
import coverage #@UnresolvedImport
except:
sys.stderr.write('Error: coverage module could not be imported\n')
sys.stderr.write('Please make sure that the coverage module (http://nedbatchelder.com/code/coverage/)\n')
sys.stderr.write('is properly installed in your interpreter: %s\n' % (sys.executable,))
import traceback;traceback.print_exc()
else:
if coverage_output_dir:
if not os.path.exists(coverage_output_dir):
sys.stderr.write('Error: directory for coverage output (%s) does not exist.\n' % (coverage_output_dir,))
elif not os.path.isdir(coverage_output_dir):
sys.stderr.write('Error: expected (%s) to be a directory.\n' % (coverage_output_dir,))
else:
n = jobs
if n <= 0:
n += 1
n += 1 #Add 1 more for the current process (which will do the initial import).
coverage_files = get_coverage_files(coverage_output_dir, n)
os.environ['COVERAGE_FILE'] = coverage_files.pop(0)
coverage_instance = coverage.coverage(source=[coverage_include])
coverage_instance.start()
elif coverage_output_file:
#Client of parallel run.
os.environ['COVERAGE_FILE'] = coverage_output_file
coverage_instance = coverage.coverage(source=[coverage_include])
coverage_instance.start()
return coverage_files, coverage_instance
|
projectodd/kubernetes | refs/heads/kubesh | examples/cluster-dns/images/frontend/client.py | 504 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import requests
import socket
from urlparse import urlparse
def CheckServiceAddress(address):
hostname = urlparse(address).hostname
service_address = socket.gethostbyname(hostname)
print service_address
def GetServerResponse(address):
print 'Send request to:', address
response = requests.get(address)
print response
print response.content
def Main():
parser = argparse.ArgumentParser()
parser.add_argument('address')
args = parser.parse_args()
CheckServiceAddress(args.address)
GetServerResponse(args.address)
if __name__ == "__main__":
Main()
|
aewhatley/scikit-learn | refs/heads/master | sklearn/externals/joblib/hashing.py | 194 | """
Fast cryptographic hash of Python objects, with a special case for fast
hashing of numpy arrays.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import warnings
import pickle
import hashlib
import sys
import types
import struct
import io
if sys.version_info[0] < 3:
Pickler = pickle.Pickler
else:
Pickler = pickle._Pickler
class _ConsistentSet(object):
""" Class used to ensure the hash of Sets is preserved
whatever the order of its items.
"""
def __init__(self, set_sequence):
self._sequence = sorted(set_sequence)
class _MyHash(object):
""" Class used to hash objects that won't normally pickle """
def __init__(self, *args):
self.args = args
class Hasher(Pickler):
""" A subclass of pickler, to do cryptographic hashing, rather than
pickling.
"""
def __init__(self, hash_name='md5'):
self.stream = io.BytesIO()
Pickler.__init__(self, self.stream, protocol=2)
# Initialise the hash obj
self._hash = hashlib.new(hash_name)
def hash(self, obj, return_digest=True):
try:
self.dump(obj)
except pickle.PicklingError as e:
warnings.warn('PicklingError while hashing %r: %r' % (obj, e))
dumps = self.stream.getvalue()
self._hash.update(dumps)
if return_digest:
return self._hash.hexdigest()
def save(self, obj):
if isinstance(obj, (types.MethodType, type({}.pop))):
# the Pickler cannot pickle instance methods; here we decompose
# them into components that make them uniquely identifiable
if hasattr(obj, '__func__'):
func_name = obj.__func__.__name__
else:
func_name = obj.__name__
inst = obj.__self__
if type(inst) == type(pickle):
obj = _MyHash(func_name, inst.__name__)
elif inst is None:
# type(None) or type(module) do not pickle
obj = _MyHash(func_name, inst)
else:
cls = obj.__self__.__class__
obj = _MyHash(func_name, inst, cls)
Pickler.save(self, obj)
# The dispatch table of the pickler is not accessible in Python
# 3, as these lines are only bugware for IPython, we skip them.
def save_global(self, obj, name=None, pack=struct.pack):
# We have to override this method in order to deal with objects
# defined interactively in IPython that are not injected in
# __main__
kwargs = dict(name=name, pack=pack)
if sys.version_info >= (3, 4):
del kwargs['pack']
try:
Pickler.save_global(self, obj, **kwargs)
except pickle.PicklingError:
Pickler.save_global(self, obj, **kwargs)
module = getattr(obj, "__module__", None)
if module == '__main__':
my_name = name
if my_name is None:
my_name = obj.__name__
mod = sys.modules[module]
if not hasattr(mod, my_name):
# IPython doesn't inject the variables define
# interactively in __main__
setattr(mod, my_name, obj)
dispatch = Pickler.dispatch.copy()
# builtin
dispatch[type(len)] = save_global
# type
dispatch[type(object)] = save_global
# classobj
dispatch[type(Pickler)] = save_global
# function
dispatch[type(pickle.dump)] = save_global
def _batch_setitems(self, items):
# forces order of keys in dict to ensure consistent hash
Pickler._batch_setitems(self, iter(sorted(items)))
def save_set(self, set_items):
# forces order of items in Set to ensure consistent hash
Pickler.save(self, _ConsistentSet(set_items))
dispatch[type(set())] = save_set
class NumpyHasher(Hasher):
""" Special case the hasher for when numpy is loaded.
"""
def __init__(self, hash_name='md5', coerce_mmap=False):
"""
Parameters
----------
hash_name: string
The hash algorithm to be used
coerce_mmap: boolean
Make no difference between np.memmap and np.ndarray
objects.
"""
self.coerce_mmap = coerce_mmap
Hasher.__init__(self, hash_name=hash_name)
# delayed import of numpy, to avoid tight coupling
import numpy as np
self.np = np
if hasattr(np, 'getbuffer'):
self._getbuffer = np.getbuffer
else:
self._getbuffer = memoryview
def save(self, obj):
""" Subclass the save method, to hash ndarray subclass, rather
than pickling them. Off course, this is a total abuse of
the Pickler class.
"""
if isinstance(obj, self.np.ndarray) and not obj.dtype.hasobject:
# Compute a hash of the object:
try:
# memoryview is not supported for some dtypes,
# e.g. datetime64, see
# https://github.com/numpy/numpy/issues/4983. The
# workaround is to view the array as bytes before
# taking the memoryview
obj_bytes_view = obj.view(self.np.uint8)
self._hash.update(self._getbuffer(obj_bytes_view))
# ValueError is raised by .view when the array is not contiguous
# BufferError is raised by Python 3 in the hash update if
# the array is Fortran rather than C contiguous
except (ValueError, BufferError):
# Cater for non-single-segment arrays: this creates a
# copy, and thus aleviates this issue.
# XXX: There might be a more efficient way of doing this
obj_bytes_view = obj.flatten().view(self.np.uint8)
self._hash.update(self._getbuffer(obj_bytes_view))
# We store the class, to be able to distinguish between
# Objects with the same binary content, but different
# classes.
if self.coerce_mmap and isinstance(obj, self.np.memmap):
# We don't make the difference between memmap and
# normal ndarrays, to be able to reload previously
# computed results with memmap.
klass = self.np.ndarray
else:
klass = obj.__class__
# We also return the dtype and the shape, to distinguish
# different views on the same data with different dtypes.
# The object will be pickled by the pickler hashed at the end.
obj = (klass, ('HASHED', obj.dtype, obj.shape, obj.strides))
Hasher.save(self, obj)
def hash(obj, hash_name='md5', coerce_mmap=False):
""" Quick calculation of a hash to identify uniquely Python objects
containing numpy arrays.
Parameters
-----------
hash_name: 'md5' or 'sha1'
Hashing algorithm used. sha1 is supposedly safer, but md5 is
faster.
coerce_mmap: boolean
Make no difference between np.memmap and np.ndarray
"""
if 'numpy' in sys.modules:
hasher = NumpyHasher(hash_name=hash_name, coerce_mmap=coerce_mmap)
else:
hasher = Hasher(hash_name=hash_name)
return hasher.hash(obj)
|
zhaishaomin/LDS-prefetcher-research | refs/heads/master | gem5_src/arch/x86/isa/insts/simd64/integer/data_transfer/move_mask.py | 91 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop PMOVMSKB_R_MMX {
limm reg, 0
movsign reg, mmxm, size=1, ext=0
};
'''
|
pytest-dev/pytest | refs/heads/main | testing/python/integration.py | 3 | from typing import Any
import pytest
from _pytest import runner
from _pytest._code import getfslineno
from _pytest.fixtures import getfixturemarker
from _pytest.pytester import Pytester
class TestOEJSKITSpecials:
def test_funcarg_non_pycollectobj(
self, pytester: Pytester, recwarn
) -> None: # rough jstests usage
pytester.makeconftest(
"""
import pytest
def pytest_pycollect_makeitem(collector, name, obj):
if name == "MyClass":
return MyCollector.from_parent(collector, name=name)
class MyCollector(pytest.Collector):
def reportinfo(self):
return self.fspath, 3, "xyz"
"""
)
modcol = pytester.getmodulecol(
"""
import pytest
@pytest.fixture
def arg1(request):
return 42
class MyClass(object):
pass
"""
)
# this hook finds funcarg factories
rep = runner.collect_one_node(collector=modcol)
# TODO: Don't treat as Any.
clscol: Any = rep.result[0]
clscol.obj = lambda arg1: None
clscol.funcargs = {}
pytest._fillfuncargs(clscol)
assert clscol.funcargs["arg1"] == 42
def test_autouse_fixture(
self, pytester: Pytester, recwarn
) -> None: # rough jstests usage
pytester.makeconftest(
"""
import pytest
def pytest_pycollect_makeitem(collector, name, obj):
if name == "MyClass":
return MyCollector.from_parent(collector, name=name)
class MyCollector(pytest.Collector):
def reportinfo(self):
return self.fspath, 3, "xyz"
"""
)
modcol = pytester.getmodulecol(
"""
import pytest
@pytest.fixture(autouse=True)
def hello():
pass
@pytest.fixture
def arg1(request):
return 42
class MyClass(object):
pass
"""
)
# this hook finds funcarg factories
rep = runner.collect_one_node(modcol)
# TODO: Don't treat as Any.
clscol: Any = rep.result[0]
clscol.obj = lambda: None
clscol.funcargs = {}
pytest._fillfuncargs(clscol)
assert not clscol.funcargs
def test_wrapped_getfslineno() -> None:
def func():
pass
def wrap(f):
func.__wrapped__ = f # type: ignore
func.patchings = ["qwe"] # type: ignore
return func
@wrap
def wrapped_func(x, y, z):
pass
fs, lineno = getfslineno(wrapped_func)
fs2, lineno2 = getfslineno(wrap)
assert lineno > lineno2, "getfslineno does not unwrap correctly"
class TestMockDecoration:
def test_wrapped_getfuncargnames(self) -> None:
from _pytest.compat import getfuncargnames
def wrap(f):
def func():
pass
func.__wrapped__ = f # type: ignore
return func
@wrap
def f(x):
pass
values = getfuncargnames(f)
assert values == ("x",)
def test_getfuncargnames_patching(self):
from _pytest.compat import getfuncargnames
from unittest.mock import patch
class T:
def original(self, x, y, z):
pass
@patch.object(T, "original")
def f(x, y, z):
pass
values = getfuncargnames(f)
assert values == ("y", "z")
def test_unittest_mock(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
import unittest.mock
class T(unittest.TestCase):
@unittest.mock.patch("os.path.abspath")
def test_hello(self, abspath):
import os
os.path.abspath("hello")
abspath.assert_any_call("hello")
"""
)
reprec = pytester.inline_run()
reprec.assertoutcome(passed=1)
def test_unittest_mock_and_fixture(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
import os.path
import unittest.mock
import pytest
@pytest.fixture
def inject_me():
pass
@unittest.mock.patch.object(os.path, "abspath",
new=unittest.mock.MagicMock)
def test_hello(inject_me):
import os
os.path.abspath("hello")
"""
)
reprec = pytester.inline_run()
reprec.assertoutcome(passed=1)
def test_unittest_mock_and_pypi_mock(self, pytester: Pytester) -> None:
pytest.importorskip("mock", "1.0.1")
pytester.makepyfile(
"""
import mock
import unittest.mock
class TestBoth(object):
@unittest.mock.patch("os.path.abspath")
def test_hello(self, abspath):
import os
os.path.abspath("hello")
abspath.assert_any_call("hello")
@mock.patch("os.path.abspath")
def test_hello_mock(self, abspath):
import os
os.path.abspath("hello")
abspath.assert_any_call("hello")
"""
)
reprec = pytester.inline_run()
reprec.assertoutcome(passed=2)
def test_mock_sentinel_check_against_numpy_like(self, pytester: Pytester) -> None:
"""Ensure our function that detects mock arguments compares against sentinels using
identity to circumvent objects which can't be compared with equality against others
in a truth context, like with numpy arrays (#5606).
"""
pytester.makepyfile(
dummy="""
class NumpyLike:
def __init__(self, value):
self.value = value
def __eq__(self, other):
raise ValueError("like numpy, cannot compare against others for truth")
FOO = NumpyLike(10)
"""
)
pytester.makepyfile(
"""
from unittest.mock import patch
import dummy
class Test(object):
@patch("dummy.FOO", new=dummy.NumpyLike(50))
def test_hello(self):
assert dummy.FOO.value == 50
"""
)
reprec = pytester.inline_run()
reprec.assertoutcome(passed=1)
def test_mock(self, pytester: Pytester) -> None:
pytest.importorskip("mock", "1.0.1")
pytester.makepyfile(
"""
import os
import unittest
import mock
class T(unittest.TestCase):
@mock.patch("os.path.abspath")
def test_hello(self, abspath):
os.path.abspath("hello")
abspath.assert_any_call("hello")
def mock_basename(path):
return "mock_basename"
@mock.patch("os.path.abspath")
@mock.patch("os.path.normpath")
@mock.patch("os.path.basename", new=mock_basename)
def test_someting(normpath, abspath, tmp_path):
abspath.return_value = "this"
os.path.normpath(os.path.abspath("hello"))
normpath.assert_any_call("this")
assert os.path.basename("123") == "mock_basename"
"""
)
reprec = pytester.inline_run()
reprec.assertoutcome(passed=2)
calls = reprec.getcalls("pytest_runtest_logreport")
funcnames = [
call.report.location[2] for call in calls if call.report.when == "call"
]
assert funcnames == ["T.test_hello", "test_someting"]
def test_mock_sorting(self, pytester: Pytester) -> None:
pytest.importorskip("mock", "1.0.1")
pytester.makepyfile(
"""
import os
import mock
@mock.patch("os.path.abspath")
def test_one(abspath):
pass
@mock.patch("os.path.abspath")
def test_two(abspath):
pass
@mock.patch("os.path.abspath")
def test_three(abspath):
pass
"""
)
reprec = pytester.inline_run()
calls = reprec.getreports("pytest_runtest_logreport")
calls = [x for x in calls if x.when == "call"]
names = [x.nodeid.split("::")[-1] for x in calls]
assert names == ["test_one", "test_two", "test_three"]
def test_mock_double_patch_issue473(self, pytester: Pytester) -> None:
pytest.importorskip("mock", "1.0.1")
pytester.makepyfile(
"""
from mock import patch
from pytest import mark
@patch('os.getcwd')
@patch('os.path')
@mark.slow
class TestSimple(object):
def test_simple_thing(self, mock_path, mock_getcwd):
pass
"""
)
reprec = pytester.inline_run()
reprec.assertoutcome(passed=1)
class TestReRunTests:
def test_rerun(self, pytester: Pytester) -> None:
pytester.makeconftest(
"""
from _pytest.runner import runtestprotocol
def pytest_runtest_protocol(item, nextitem):
runtestprotocol(item, log=False, nextitem=nextitem)
runtestprotocol(item, log=True, nextitem=nextitem)
"""
)
pytester.makepyfile(
"""
import pytest
count = 0
req = None
@pytest.fixture
def fix(request):
global count, req
assert request != req
req = request
print("fix count %s" % count)
count += 1
def test_fix(fix):
pass
"""
)
result = pytester.runpytest("-s")
result.stdout.fnmatch_lines(
"""
*fix count 0*
*fix count 1*
"""
)
result.stdout.fnmatch_lines(
"""
*2 passed*
"""
)
def test_pytestconfig_is_session_scoped() -> None:
from _pytest.fixtures import pytestconfig
marker = getfixturemarker(pytestconfig)
assert marker is not None
assert marker.scope == "session"
class TestNoselikeTestAttribute:
def test_module_with_global_test(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
__test__ = False
def test_hello():
pass
"""
)
reprec = pytester.inline_run()
assert not reprec.getfailedcollections()
calls = reprec.getreports("pytest_runtest_logreport")
assert not calls
def test_class_and_method(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
__test__ = True
def test_func():
pass
test_func.__test__ = False
class TestSome(object):
__test__ = False
def test_method(self):
pass
"""
)
reprec = pytester.inline_run()
assert not reprec.getfailedcollections()
calls = reprec.getreports("pytest_runtest_logreport")
assert not calls
def test_unittest_class(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
import unittest
class TC(unittest.TestCase):
def test_1(self):
pass
class TC2(unittest.TestCase):
__test__ = False
def test_2(self):
pass
"""
)
reprec = pytester.inline_run()
assert not reprec.getfailedcollections()
call = reprec.getcalls("pytest_collection_modifyitems")[0]
assert len(call.items) == 1
assert call.items[0].cls.__name__ == "TC"
def test_class_with_nasty_getattr(self, pytester: Pytester) -> None:
"""Make sure we handle classes with a custom nasty __getattr__ right.
With a custom __getattr__ which e.g. returns a function (like with a
RPC wrapper), we shouldn't assume this meant "__test__ = True".
"""
# https://github.com/pytest-dev/pytest/issues/1204
pytester.makepyfile(
"""
class MetaModel(type):
def __getattr__(cls, key):
return lambda: None
BaseModel = MetaModel('Model', (), {})
class Model(BaseModel):
__metaclass__ = MetaModel
def test_blah(self):
pass
"""
)
reprec = pytester.inline_run()
assert not reprec.getfailedcollections()
call = reprec.getcalls("pytest_collection_modifyitems")[0]
assert not call.items
class TestParameterize:
"""#351"""
def test_idfn_marker(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
def idfn(param):
if param == 0:
return 'spam'
elif param == 1:
return 'ham'
else:
return None
@pytest.mark.parametrize('a,b', [(0, 2), (1, 2)], ids=idfn)
def test_params(a, b):
pass
"""
)
res = pytester.runpytest("--collect-only")
res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
def test_idfn_fixture(self, pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
def idfn(param):
if param == 0:
return 'spam'
elif param == 1:
return 'ham'
else:
return None
@pytest.fixture(params=[0, 1], ids=idfn)
def a(request):
return request.param
@pytest.fixture(params=[1, 2], ids=idfn)
def b(request):
return request.param
def test_params(a, b):
pass
"""
)
res = pytester.runpytest("--collect-only")
res.stdout.fnmatch_lines(["*spam-2*", "*ham-2*"])
|
angelapper/odoo | refs/heads/9.0 | openerp/addons/test_access_rights/__init__.py | 2355 | # -*- coding: utf-8 -*-
import models
|
programadorjc/django | refs/heads/master | tests/migrations/test_multidb.py | 366 | import unittest
from django.db import connection, migrations, models
from django.db.migrations.state import ProjectState
from django.test import override_settings
from .test_operations import OperationTestBase
try:
import sqlparse
except ImportError:
sqlparse = None
class AgnosticRouter(object):
"""
A router that doesn't have an opinion regarding migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return None
class MigrateNothingRouter(object):
"""
A router that doesn't allow migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return False
class MigrateEverythingRouter(object):
"""
A router that always allows migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return True
class MigrateWhenFooRouter(object):
"""
A router that allows migrating depending on a hint.
"""
def allow_migrate(self, db, app_label, **hints):
return hints.get('foo', False)
class MultiDBOperationTests(OperationTestBase):
multi_db = True
def _test_create_model(self, app_label, should_run):
"""
Tests that CreateModel honours multi-db settings.
"""
operation = migrations.CreateModel(
"Pony",
[("id", models.AutoField(primary_key=True))],
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Test the database alteration
self.assertTableNotExists("%s_pony" % app_label)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
if should_run:
self.assertTableExists("%s_pony" % app_label)
else:
self.assertTableNotExists("%s_pony" % app_label)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertTableNotExists("%s_pony" % app_label)
@override_settings(DATABASE_ROUTERS=[AgnosticRouter()])
def test_create_model(self):
"""
Test when router doesn't have an opinion (i.e. CreateModel should run).
"""
self._test_create_model("test_mltdb_crmo", should_run=True)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_create_model2(self):
"""
Test when router returns False (i.e. CreateModel shouldn't run).
"""
self._test_create_model("test_mltdb_crmo2", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()])
def test_create_model3(self):
"""
Test when router returns True (i.e. CreateModel should run).
"""
self._test_create_model("test_mltdb_crmo3", should_run=True)
def test_create_model4(self):
"""
Test multiple routers.
"""
with override_settings(DATABASE_ROUTERS=[AgnosticRouter(), AgnosticRouter()]):
self._test_create_model("test_mltdb_crmo4", should_run=True)
with override_settings(DATABASE_ROUTERS=[MigrateNothingRouter(), MigrateEverythingRouter()]):
self._test_create_model("test_mltdb_crmo4", should_run=False)
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter(), MigrateNothingRouter()]):
self._test_create_model("test_mltdb_crmo4", should_run=True)
def _test_run_sql(self, app_label, should_run, hints=None):
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]):
project_state = self.set_up_test_model(app_label)
sql = """
INSERT INTO {0}_pony (pink, weight) VALUES (1, 3.55);
INSERT INTO {0}_pony (pink, weight) VALUES (3, 5.0);
""".format(app_label)
operation = migrations.RunSQL(sql, hints=hints or {})
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = project_state.apps.get_model(app_label, "Pony")
if should_run:
self.assertEqual(Pony.objects.count(), 2)
else:
self.assertEqual(Pony.objects.count(), 0)
@unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse")
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_run_sql(self):
self._test_run_sql("test_mltdb_runsql", should_run=False)
@unittest.skipIf(sqlparse is None and connection.features.requires_sqlparse_for_splitting, "Missing sqlparse")
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_sql2(self):
self._test_run_sql("test_mltdb_runsql2", should_run=False)
self._test_run_sql("test_mltdb_runsql2", should_run=True, hints={'foo': True})
def _test_run_python(self, app_label, should_run, hints=None):
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]):
project_state = self.set_up_test_model(app_label)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model(app_label, "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
operation = migrations.RunPython(inner_method, hints=hints or {})
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = project_state.apps.get_model(app_label, "Pony")
if should_run:
self.assertEqual(Pony.objects.count(), 2)
else:
self.assertEqual(Pony.objects.count(), 0)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_run_python(self):
self._test_run_python("test_mltdb_runpython", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_python2(self):
self._test_run_python("test_mltdb_runpython2", should_run=False)
self._test_run_python("test_mltdb_runpython2", should_run=True, hints={'foo': True})
|
analurandis/Tur | refs/heads/master | backend/venv/Lib/site-packages/Cheetah/Template.py | 14 | '''
Provides the core API for Cheetah.
See the docstring in the Template class and the Users' Guide for more information
'''
################################################################################
## DEPENDENCIES
import sys # used in the error handling code
import re # used to define the internal delims regex
import logging
import string
import os.path
import time # used in the cache refresh code
from random import randrange
import imp
import inspect
import StringIO
import traceback
import pprint
import cgi # Used by .webInput() if the template is a CGI script.
import types
try:
from threading import Lock
except ImportError:
class Lock:
def acquire(self):
pass
def release(self):
pass
filetype = None
if isinstance(sys.version_info[:], tuple):
# Python 2.xx
filetype = types.FileType
def createMethod(func, cls):
return types.MethodType(func, None, cls)
else:
import io
filetype = io.IOBase
def createMethod(func, cls):
return types.MethodType(func, cls)
from Cheetah.Version import convertVersionStringToTuple, MinCompatibleVersionTuple
from Cheetah.Version import MinCompatibleVersion
# Base classes for Template
from Cheetah.Servlet import Servlet
# More intra-package imports ...
from Cheetah.Parser import ParseError, SourceReader
from Cheetah.Compiler import Compiler, DEFAULT_COMPILER_SETTINGS
from Cheetah import ErrorCatchers # for placeholder tags
from Cheetah import Filters # the output filters
from Cheetah.convertTmplPathToModuleName import convertTmplPathToModuleName
from Cheetah.Utils.Misc import checkKeywords # Used in Template.__init__
from Cheetah.Utils.Indenter import Indenter # Used in Template.__init__ and for
# placeholders
from Cheetah.NameMapper import NotFound, valueFromSearchList
from Cheetah.CacheStore import MemoryCacheStore, MemcachedCacheStore
from Cheetah.CacheRegion import CacheRegion
from Cheetah.Utils.WebInputMixin import _Converter, _lookup, NonNumericInputError
from Cheetah.Unspecified import Unspecified
# Decide whether to use the file modification time in file's cache key
__checkFileMtime = True
def checkFileMtime(value):
globals()['__checkFileMtime'] = value
class Error(Exception):
pass
class PreprocessError(Error):
pass
def hashList(l):
hashedList = []
for v in l:
if isinstance(v, dict):
v = hashDict(v)
elif isinstance(v, list):
v = hashList(v)
hashedList.append(v)
return hash(tuple(hashedList))
def hashDict(d):
items = sorted(d.items())
hashedList = []
for k, v in items:
if isinstance(v, dict):
v = hashDict(v)
elif isinstance(v, list):
v = hashList(v)
hashedList.append((k, v))
return hash(tuple(hashedList))
################################################################################
## MODULE GLOBALS AND CONSTANTS
def _genUniqueModuleName(baseModuleName):
"""The calling code is responsible for concurrency locking.
"""
if baseModuleName not in sys.modules:
finalName = baseModuleName
else:
finalName = ('cheetah_%s_%s_%s'%(baseModuleName,
str(time.time()).replace('.', '_'),
str(randrange(10000, 99999))))
return finalName
# Cache of a cgi.FieldStorage() instance, maintained by .webInput().
# This is only relavent to templates used as CGI scripts.
_formUsedByWebInput = None
def updateLinecache(filename, src):
import linecache
size = len(src)
mtime = time.time()
lines = src.splitlines()
fullname = filename
linecache.cache[filename] = size, mtime, lines, fullname
class CompileCacheItem(object):
pass
class TemplatePreprocessor(object):
'''
This is used with the preprocessors argument to Template.compile().
See the docstring for Template.compile
** Preprocessors are an advanced topic **
'''
def __init__(self, settings):
self._settings = settings
def preprocess(self, source, file):
"""Create an intermediate template and return the source code
it outputs
"""
settings = self._settings
if not source: # @@TR: this needs improving
if isinstance(file, (str, unicode)): # it's a filename.
f = open(file)
source = f.read()
f.close()
elif hasattr(file, 'read'):
source = file.read()
file = None
templateAPIClass = settings.templateAPIClass
possibleKwArgs = [
arg for arg in
inspect.getargs(templateAPIClass.compile.im_func.func_code)[0]
if arg not in ('klass', 'source', 'file',)]
compileKwArgs = {}
for arg in possibleKwArgs:
if hasattr(settings, arg):
compileKwArgs[arg] = getattr(settings, arg)
tmplClass = templateAPIClass.compile(source=source, file=file, **compileKwArgs)
tmplInstance = tmplClass(**settings.templateInitArgs)
outputSource = settings.outputTransformer(tmplInstance)
outputFile = None
return outputSource, outputFile
class Template(Servlet):
'''
This class provides a) methods used by templates at runtime and b)
methods for compiling Cheetah source code into template classes.
This documentation assumes you already know Python and the basics of object
oriented programming. If you don't know Python, see the sections of the
Cheetah Users' Guide for non-programmers. It also assumes you have read
about Cheetah's syntax in the Users' Guide.
The following explains how to use Cheetah from within Python programs or via
the interpreter. If you statically compile your templates on the command
line using the 'cheetah' script, this is not relevant to you. Statically
compiled Cheetah template modules/classes (e.g. myTemplate.py:
MyTemplateClasss) are just like any other Python module or class. Also note,
most Python web frameworks (Webware, Aquarium, mod_python, Turbogears,
CherryPy, Quixote, etc.) provide plugins that handle Cheetah compilation for
you.
There are several possible usage patterns:
1) tclass = Template.compile(src)
t1 = tclass() # or tclass(namespaces=[namespace,...])
t2 = tclass() # or tclass(namespaces=[namespace2,...])
outputStr = str(t1) # or outputStr = t1.aMethodYouDefined()
Template.compile provides a rich and very flexible API via its
optional arguments so there are many possible variations of this
pattern. One example is:
tclass = Template.compile('hello $name from $caller', baseclass=dict)
print tclass(name='world', caller='me')
See the Template.compile() docstring for more details.
2) tmplInstance = Template(src)
# or Template(src, namespaces=[namespace,...])
outputStr = str(tmplInstance) # or outputStr = tmplInstance.aMethodYouDefined(...args...)
Notes on the usage patterns:
usage pattern 1)
This is the most flexible, but it is slightly more verbose unless you
write a wrapper function to hide the plumbing. Under the hood, all
other usage patterns are based on this approach. Templates compiled
this way can #extend (subclass) any Python baseclass: old-style or
new-style (based on object or a builtin type).
usage pattern 2)
This was Cheetah's original usage pattern. It returns an instance,
but you can still access the generated class via
tmplInstance.__class__. If you want to use several different
namespace 'searchLists' with a single template source definition,
you're better off with Template.compile (1).
Limitations (use pattern 1 instead):
- Templates compiled this way can only #extend subclasses of the
new-style 'object' baseclass. Cheetah.Template is a subclass of
'object'. You also can not #extend dict, list, or other builtin
types.
- If your template baseclass' __init__ constructor expects args there
is currently no way to pass them in.
If you need to subclass a dynamically compiled Cheetah class, do something like this:
from Cheetah.Template import Template
T1 = Template.compile('$meth1 #def meth1: this is meth1 in T1')
T2 = Template.compile('#implements meth1\nthis is meth1 redefined in T2', baseclass=T1)
print T1, T1()
print T2, T2()
Note about class and instance attribute names:
Attributes used by Cheetah have a special prefix to avoid confusion with
the attributes of the templates themselves or those of template
baseclasses.
Class attributes which are used in class methods look like this:
klass._CHEETAH_useCompilationCache (_CHEETAH_xxx)
Instance attributes look like this:
klass._CHEETAH__globalSetVars (_CHEETAH__xxx with 2 underscores)
'''
# this is used by ._addCheetahPlumbingCodeToClass()
_CHEETAH_requiredCheetahMethods = (
'_initCheetahInstance',
'searchList',
'errorCatcher',
'getVar',
'varExists',
'getFileContents',
'i18n',
'runAsMainProgram',
'respond',
'shutdown',
'webInput',
'serverSidePath',
'generatedClassCode',
'generatedModuleCode',
'_getCacheStore',
'_getCacheStoreIdPrefix',
'_createCacheRegion',
'getCacheRegion',
'getCacheRegions',
'refreshCache',
'_handleCheetahInclude',
'_getTemplateAPIClassForIncludeDirectiveCompilation',
)
_CHEETAH_requiredCheetahClassMethods = ('subclass',)
_CHEETAH_requiredCheetahClassAttributes = ('cacheRegionClass', 'cacheStore',
'cacheStoreIdPrefix', 'cacheStoreClass')
## the following are used by .compile(). Most are documented in its docstring.
_CHEETAH_cacheModuleFilesForTracebacks = False
_CHEETAH_cacheDirForModuleFiles = None # change to a dirname
_CHEETAH_compileCache = dict() # cache store for compiled code and classes
# To do something other than simple in-memory caching you can create an
# alternative cache store. It just needs to support the basics of Python's
# mapping/dict protocol. E.g.:
# class AdvCachingTemplate(Template):
# _CHEETAH_compileCache = MemoryOrFileCache()
_CHEETAH_compileLock = Lock() # used to prevent race conditions
_CHEETAH_defaultMainMethodName = None
_CHEETAH_compilerSettings = None
_CHEETAH_compilerClass = Compiler
_CHEETAH_compilerInstance = None
_CHEETAH_cacheCompilationResults = True
_CHEETAH_useCompilationCache = True
_CHEETAH_keepRefToGeneratedCode = True
_CHEETAH_defaultBaseclassForTemplates = None
_CHEETAH_defaultClassNameForTemplates = None
# defaults to DEFAULT_COMPILER_SETTINGS['mainMethodName']:
_CHEETAH_defaultMainMethodNameForTemplates = None
_CHEETAH_defaultModuleNameForTemplates = 'DynamicallyCompiledCheetahTemplate'
_CHEETAH_defaultModuleGlobalsForTemplates = None
_CHEETAH_preprocessors = None
_CHEETAH_defaultPreprocessorClass = TemplatePreprocessor
## The following attributes are used by instance methods:
_CHEETAH_generatedModuleCode = None
NonNumericInputError = NonNumericInputError
_CHEETAH_cacheRegionClass = CacheRegion
_CHEETAH_cacheStoreClass = MemoryCacheStore
#_CHEETAH_cacheStoreClass = MemcachedCacheStore
_CHEETAH_cacheStore = None
_CHEETAH_cacheStoreIdPrefix = None
@classmethod
def _getCompilerClass(klass, source=None, file=None):
return klass._CHEETAH_compilerClass
@classmethod
def _getCompilerSettings(klass, source=None, file=None):
return klass._CHEETAH_compilerSettings
@classmethod
def compile(klass, source=None, file=None,
returnAClass=True,
compilerSettings=Unspecified,
compilerClass=Unspecified,
moduleName=None,
className=Unspecified,
mainMethodName=Unspecified,
baseclass=Unspecified,
moduleGlobals=Unspecified,
cacheCompilationResults=Unspecified,
useCache=Unspecified,
preprocessors=Unspecified,
cacheModuleFilesForTracebacks=Unspecified,
cacheDirForModuleFiles=Unspecified,
commandlineopts=None,
keepRefToGeneratedCode=Unspecified,
):
"""
The core API for compiling Cheetah source code into template classes.
This class method compiles Cheetah source code and returns a python
class. You then create template instances using that class. All
Cheetah's other compilation API's use this method under the hood.
Internally, this method a) parses the Cheetah source code and generates
Python code defining a module with a single class in it, b) dynamically
creates a module object with a unique name, c) execs the generated code
in that module's namespace then inserts the module into sys.modules, and
d) returns a reference to the generated class. If you want to get the
generated python source code instead, pass the argument
returnAClass=False.
It caches generated code and classes. See the descriptions of the
arguments'cacheCompilationResults' and 'useCache' for details. This
doesn't mean that templates will automatically recompile themselves when
the source file changes. Rather, if you call Template.compile(src) or
Template.compile(file=path) repeatedly it will attempt to return a
cached class definition instead of recompiling.
Hooks are provided template source preprocessing. See the notes on the
'preprocessors' arg.
If you are an advanced user and need to customize the way Cheetah parses
source code or outputs Python code, you should check out the
compilerSettings argument.
Arguments:
You must provide either a 'source' or 'file' arg, but not both:
- source (string or None)
- file (string path, file-like object, or None)
The rest of the arguments are strictly optional. All but the first
have defaults in attributes of the Template class which can be
overridden in subclasses of this class. Working with most of these is
an advanced topic.
- returnAClass=True
If false, return the generated module code rather than a class.
- compilerSettings (a dict)
Default: Template._CHEETAH_compilerSettings=None
a dictionary of settings to override those defined in
DEFAULT_COMPILER_SETTINGS. These can also be overridden in your
template source code with the #compiler or #compiler-settings
directives.
- compilerClass (a class)
Default: Template._CHEETAH_compilerClass=Cheetah.Compiler.Compiler
a subclass of Cheetah.Compiler.Compiler. Mucking with this is a
very advanced topic.
- moduleName (a string)
Default:
Template._CHEETAH_defaultModuleNameForTemplates
='DynamicallyCompiledCheetahTemplate'
What to name the generated Python module. If the provided value is
None and a file arg was given, the moduleName is created from the
file path. In all cases if the moduleName provided is already in
sys.modules it is passed through a filter that generates a unique
variant of the name.
- className (a string)
Default: Template._CHEETAH_defaultClassNameForTemplates=None
What to name the generated Python class. If the provided value is
None, the moduleName is use as the class name.
- mainMethodName (a string)
Default:
Template._CHEETAH_defaultMainMethodNameForTemplates
=None (and thus DEFAULT_COMPILER_SETTINGS['mainMethodName'])
What to name the main output generating method in the compiled
template class.
- baseclass (a string or a class)
Default: Template._CHEETAH_defaultBaseclassForTemplates=None
Specifies the baseclass for the template without manually
including an #extends directive in the source. The #extends
directive trumps this arg.
If the provided value is a string you must make sure that a class
reference by that name is available to your template, either by
using an #import directive or by providing it in the arg
'moduleGlobals'.
If the provided value is a class, Cheetah will handle all the
details for you.
- moduleGlobals (a dict)
Default: Template._CHEETAH_defaultModuleGlobalsForTemplates=None
A dict of vars that will be added to the global namespace of the
module the generated code is executed in, prior to the execution
of that code. This should be Python values, not code strings!
- cacheCompilationResults (True/False)
Default: Template._CHEETAH_cacheCompilationResults=True
Tells Cheetah to cache the generated code and classes so that they
can be reused if Template.compile() is called multiple times with
the same source and options.
- useCache (True/False)
Default: Template._CHEETAH_useCompilationCache=True
Should the compilation cache be used? If True and a previous
compilation created a cached template class with the same source
code, compiler settings and other options, the cached template
class will be returned.
- cacheModuleFilesForTracebacks (True/False)
Default: Template._CHEETAH_cacheModuleFilesForTracebacks=False
In earlier versions of Cheetah tracebacks from exceptions that
were raised inside dynamically compiled Cheetah templates were
opaque because Python didn't have access to a python source file
to use in the traceback:
File "xxxx.py", line 192, in getTextiledContent
content = str(template(searchList=searchList))
File "cheetah_yyyy.py", line 202, in __str__
File "cheetah_yyyy.py", line 187, in respond
File "cheetah_yyyy.py", line 139, in writeBody
ZeroDivisionError: integer division or modulo by zero
It is now possible to keep those files in a cache dir and allow
Python to include the actual source lines in tracebacks and makes
them much easier to understand:
File "xxxx.py", line 192, in getTextiledContent
content = str(template(searchList=searchList))
File "/tmp/CheetahCacheDir/cheetah_yyyy.py", line 202, in __str__
def __str__(self): return self.respond()
File "/tmp/CheetahCacheDir/cheetah_yyyy.py", line 187, in respond
self.writeBody(trans=trans)
File "/tmp/CheetahCacheDir/cheetah_yyyy.py", line 139, in writeBody
__v = 0/0 # $(0/0)
ZeroDivisionError: integer division or modulo by zero
- cacheDirForModuleFiles (a string representing a dir path)
Default: Template._CHEETAH_cacheDirForModuleFiles=None
See notes on cacheModuleFilesForTracebacks.
- preprocessors
Default: Template._CHEETAH_preprocessors=None
** THIS IS A VERY ADVANCED TOPIC **
These are used to transform the source code prior to compilation.
They provide a way to use Cheetah as a code generator for Cheetah
code. In other words, you use one Cheetah template to output the
source code for another Cheetah template.
The major expected use cases are:
a) 'compile-time caching' aka 'partial template binding',
wherein an intermediate Cheetah template is used to output
the source for the final Cheetah template. The intermediate
template is a mix of a modified Cheetah syntax (the
'preprocess syntax') and standard Cheetah syntax. The
preprocessor syntax is executed at compile time and outputs
Cheetah code which is then compiled in turn. This approach
allows one to completely soft-code all the elements in the
template which are subject to change yet have it compile to
extremely efficient Python code with everything but the
elements that must be variable at runtime (per browser
request, etc.) compiled as static strings. Examples of this
usage pattern will be added to the Cheetah Users' Guide.
The'preprocess syntax' is just Cheetah's standard one with
alternatives for the $ and # tokens:
e.g. '@' and '%' for code like this
@aPreprocessVar $aRuntimeVar
%if aCompileTimeCondition then yyy else zzz
%% preprocessor comment
#if aRunTimeCondition then aaa else bbb
## normal comment
$aRuntimeVar
b) adding #import and #extends directives dynamically based on
the source
If preprocessors are provided, Cheetah pipes the source code
through each one in the order provided. Each preprocessor should
accept the args (source, file) and should return a tuple (source,
file).
The argument value should be a list, but a single non-list value
is acceptable and will automatically be converted into a list.
Each item in the list will be passed through
Template._normalizePreprocessor(). The items should either match
one of the following forms:
- an object with a .preprocess(source, file) method
- a callable with the following signature:
source, file = f(source, file)
or one of the forms below:
- a single string denoting the 2 'tokens' for the preprocess
syntax. The tokens should be in the order (placeholderToken,
directiveToken) and should separated with a space:
e.g. '@ %'
klass = Template.compile(src, preprocessors='@ %')
# or
klass = Template.compile(src, preprocessors=['@ %'])
- a dict with the following keys or an object with the
following attributes (all are optional, but nothing will
happen if you don't provide at least one):
- tokens: same as the single string described above. You can
also provide a tuple of 2 strings.
- searchList: the searchList used for preprocess $placeholders
- compilerSettings: used in the compilation of the intermediate
template
- templateAPIClass: an optional subclass of `Template`
- outputTransformer: a simple hook for passing in a callable
which can do further transformations of the preprocessor
output, or do something else like debug logging. The
default is str().
+ any keyword arguments to Template.compile which you want to
provide for the compilation of the intermediate template.
klass = Template.compile(src,
preprocessors=[ dict(tokens='@ %', searchList=[...]) ] )
"""
errmsg = "arg '%s' must be %s"
if not isinstance(source, (types.NoneType, basestring)):
raise TypeError(errmsg % ('source', 'string or None'))
if not isinstance(file, (types.NoneType, basestring, filetype)):
raise TypeError(errmsg %
('file', 'string, file-like object, or None'))
if baseclass is Unspecified:
baseclass = klass._CHEETAH_defaultBaseclassForTemplates
if isinstance(baseclass, Template):
baseclass = baseclass.__class__
if not isinstance(baseclass, (types.NoneType, basestring, type)):
raise TypeError(errmsg % ('baseclass', 'string, class or None'))
if cacheCompilationResults is Unspecified:
cacheCompilationResults = klass._CHEETAH_cacheCompilationResults
if not isinstance(cacheCompilationResults, (int, bool)):
raise TypeError(errmsg % ('cacheCompilationResults', 'boolean'))
if useCache is Unspecified:
useCache = klass._CHEETAH_useCompilationCache
if not isinstance(useCache, (int, bool)):
raise TypeError(errmsg % ('useCache', 'boolean'))
if compilerSettings is Unspecified:
compilerSettings = klass._getCompilerSettings(source, file) or {}
if not isinstance(compilerSettings, dict):
raise TypeError(errmsg % ('compilerSettings', 'dictionary'))
if compilerClass is Unspecified:
compilerClass = klass._getCompilerClass(source, file)
if preprocessors is Unspecified:
preprocessors = klass._CHEETAH_preprocessors
if keepRefToGeneratedCode is Unspecified:
keepRefToGeneratedCode = klass._CHEETAH_keepRefToGeneratedCode
if not isinstance(keepRefToGeneratedCode, (int, bool)):
raise TypeError(errmsg % ('keepReftoGeneratedCode', 'boolean'))
if not isinstance(moduleName, (types.NoneType, basestring)):
raise TypeError(errmsg % ('moduleName', 'string or None'))
__orig_file__ = None
if not moduleName:
if file and isinstance(file, basestring):
moduleName = convertTmplPathToModuleName(file)
__orig_file__ = file
else:
moduleName = klass._CHEETAH_defaultModuleNameForTemplates
if className is Unspecified:
className = klass._CHEETAH_defaultClassNameForTemplates
if not isinstance(className, (types.NoneType, basestring)):
raise TypeError(errmsg % ('className', 'string or None'))
className = re.sub(r'^_+','', className or moduleName)
if mainMethodName is Unspecified:
mainMethodName = klass._CHEETAH_defaultMainMethodNameForTemplates
if not isinstance(mainMethodName, (types.NoneType, basestring)):
raise TypeError(errmsg % ('mainMethodName', 'string or None'))
if moduleGlobals is Unspecified:
moduleGlobals = klass._CHEETAH_defaultModuleGlobalsForTemplates
if cacheModuleFilesForTracebacks is Unspecified:
cacheModuleFilesForTracebacks = klass._CHEETAH_cacheModuleFilesForTracebacks
if not isinstance(cacheModuleFilesForTracebacks, (int, bool)):
raise TypeError(errmsg %
('cacheModuleFilesForTracebacks', 'boolean'))
if cacheDirForModuleFiles is Unspecified:
cacheDirForModuleFiles = klass._CHEETAH_cacheDirForModuleFiles
if not isinstance(cacheDirForModuleFiles, (types.NoneType, basestring)):
raise TypeError(errmsg %
('cacheDirForModuleFiles', 'string or None'))
##################################################
## handle any preprocessors
if preprocessors:
origSrc = source
source, file = klass._preprocessSource(source, file, preprocessors)
##################################################
## compilation, using cache if requested/possible
baseclassValue = None
baseclassName = None
if baseclass:
if isinstance(baseclass, basestring):
baseclassName = baseclass
elif isinstance(baseclass, type):
# @@TR: should soft-code this
baseclassName = 'CHEETAH_dynamicallyAssignedBaseClass_'+baseclass.__name__
baseclassValue = baseclass
cacheHash = None
cacheItem = None
if source or isinstance(file, basestring):
compilerSettingsHash = None
if compilerSettings:
compilerSettingsHash = hashDict(compilerSettings)
moduleGlobalsHash = None
if moduleGlobals:
moduleGlobalsHash = hashDict(moduleGlobals)
fileHash = None
if file:
fileHash = str(hash(file))
if globals()['__checkFileMtime']:
fileHash += str(os.path.getmtime(file))
try:
# @@TR: find some way to create a cacheHash that is consistent
# between process restarts. It would allow for caching the
# compiled module on disk and thereby reduce the startup time
# for applications that use a lot of dynamically compiled
# templates.
cacheHash = ''.join([str(v) for v in
[hash(source),
fileHash,
className,
moduleName,
mainMethodName,
hash(compilerClass),
hash(baseclass),
compilerSettingsHash,
moduleGlobalsHash,
hash(cacheDirForModuleFiles),
]])
except:
#@@TR: should add some logging to this
pass
outputEncoding = 'ascii'
compiler = None
if useCache and cacheHash and cacheHash in klass._CHEETAH_compileCache:
cacheItem = klass._CHEETAH_compileCache[cacheHash]
generatedModuleCode = cacheItem.code
else:
compiler = compilerClass(source, file,
moduleName=moduleName,
mainClassName=className,
baseclassName=baseclassName,
mainMethodName=mainMethodName,
settings=(compilerSettings or {}))
if commandlineopts:
compiler.setShBang(commandlineopts.shbang)
compiler.compile()
generatedModuleCode = compiler.getModuleCode()
outputEncoding = compiler.getModuleEncoding()
if not returnAClass:
# This is a bit of a hackish solution to make sure we're setting the proper
# encoding on generated code that is destined to be written to a file
if not outputEncoding == 'ascii':
generatedModuleCode = generatedModuleCode.split('\n')
generatedModuleCode.insert(1, '# -*- coding: %s -*-' % outputEncoding)
generatedModuleCode = '\n'.join(generatedModuleCode)
return generatedModuleCode.encode(outputEncoding)
else:
if cacheItem:
cacheItem.lastCheckoutTime = time.time()
return cacheItem.klass
try:
klass._CHEETAH_compileLock.acquire()
uniqueModuleName = _genUniqueModuleName(moduleName)
__file__ = uniqueModuleName+'.py' # relative file path with no dir part
if cacheModuleFilesForTracebacks:
if not os.path.exists(cacheDirForModuleFiles):
raise Exception('%s does not exist'%cacheDirForModuleFiles)
__file__ = os.path.join(cacheDirForModuleFiles, __file__)
# @@TR: might want to assert that it doesn't already exist
open(__file__, 'w').write(generatedModuleCode)
# @@TR: should probably restrict the perms, etc.
mod = types.ModuleType(str(uniqueModuleName))
if moduleGlobals:
for k, v in moduleGlobals.items():
setattr(mod, k, v)
mod.__file__ = __file__
if __orig_file__ and os.path.exists(__orig_file__):
# this is used in the WebKit filemonitoring code
mod.__orig_file__ = __orig_file__
if baseclass and baseclassValue:
setattr(mod, baseclassName, baseclassValue)
##
try:
co = compile(generatedModuleCode, __file__, 'exec')
exec(co, mod.__dict__)
except SyntaxError, e:
try:
parseError = genParserErrorFromPythonException(
source, file, generatedModuleCode, exception=e)
except:
updateLinecache(__file__, generatedModuleCode)
e.generatedModuleCode = generatedModuleCode
raise e
else:
raise parseError
except Exception, e:
updateLinecache(__file__, generatedModuleCode)
e.generatedModuleCode = generatedModuleCode
raise
##
sys.modules[uniqueModuleName] = mod
finally:
klass._CHEETAH_compileLock.release()
templateClass = getattr(mod, className)
if (cacheCompilationResults
and cacheHash
and cacheHash not in klass._CHEETAH_compileCache):
cacheItem = CompileCacheItem()
cacheItem.cacheTime = cacheItem.lastCheckoutTime = time.time()
cacheItem.code = generatedModuleCode
cacheItem.klass = templateClass
templateClass._CHEETAH_isInCompilationCache = True
klass._CHEETAH_compileCache[cacheHash] = cacheItem
else:
templateClass._CHEETAH_isInCompilationCache = False
if keepRefToGeneratedCode or cacheCompilationResults:
templateClass._CHEETAH_generatedModuleCode = generatedModuleCode
# If we have a compiler object, let's set it to the compiler class
# to help the directive analyzer code
if compiler:
templateClass._CHEETAH_compilerInstance = compiler
return templateClass
@classmethod
def subclass(klass, *args, **kws):
"""Takes the same args as the .compile() classmethod and returns a
template that is a subclass of the template this method is called from.
T1 = Template.compile(' foo - $meth1 - bar\n#def meth1: this is T1.meth1')
T2 = T1.subclass('#implements meth1\n this is T2.meth1')
"""
kws['baseclass'] = klass
if isinstance(klass, Template):
templateAPIClass = klass
else:
templateAPIClass = Template
return templateAPIClass.compile(*args, **kws)
@classmethod
def _preprocessSource(klass, source, file, preprocessors):
"""Iterates through the .compile() classmethod's preprocessors argument
and pipes the source code through each each preprocessor.
It returns the tuple (source, file) which is then used by
Template.compile to finish the compilation.
"""
if not isinstance(preprocessors, (list, tuple)):
preprocessors = [preprocessors]
for preprocessor in preprocessors:
preprocessor = klass._normalizePreprocessorArg(preprocessor)
source, file = preprocessor.preprocess(source, file)
return source, file
@classmethod
def _normalizePreprocessorArg(klass, arg):
"""Used to convert the items in the .compile() classmethod's
preprocessors argument into real source preprocessors. This permits the
use of several shortcut forms for defining preprocessors.
"""
if hasattr(arg, 'preprocess'):
return arg
elif hasattr(arg, '__call__'):
class WrapperPreprocessor:
def preprocess(self, source, file):
return arg(source, file)
return WrapperPreprocessor()
else:
class Settings(object):
placeholderToken = None
directiveToken = None
settings = Settings()
if isinstance(arg, str) or isinstance(arg, (list, tuple)):
settings.tokens = arg
elif isinstance(arg, dict):
for k, v in arg.items():
setattr(settings, k, v)
else:
settings = arg
settings = klass._normalizePreprocessorSettings(settings)
return klass._CHEETAH_defaultPreprocessorClass(settings)
@classmethod
def _normalizePreprocessorSettings(klass, settings):
settings.keepRefToGeneratedCode = True
def normalizeSearchList(searchList):
if not isinstance(searchList, (list, tuple)):
searchList = [searchList]
return searchList
def normalizeTokens(tokens):
if isinstance(tokens, str):
return tokens.split() # space delimited string e.g.'@ %'
elif isinstance(tokens, (list, tuple)):
return tokens
else:
raise PreprocessError('invalid tokens argument: %r'%tokens)
if hasattr(settings, 'tokens'):
(settings.placeholderToken,
settings.directiveToken) = normalizeTokens(settings.tokens)
if (not getattr(settings, 'compilerSettings', None)
and not getattr(settings, 'placeholderToken', None) ):
raise TypeError(
'Preprocessor requires either a "tokens" or a "compilerSettings" arg.'
' Neither was provided.')
if not hasattr(settings, 'templateInitArgs'):
settings.templateInitArgs = {}
if 'searchList' not in settings.templateInitArgs:
if not hasattr(settings, 'searchList') and hasattr(settings, 'namespaces'):
settings.searchList = settings.namespaces
elif not hasattr(settings, 'searchList'):
settings.searchList = []
settings.templateInitArgs['searchList'] = settings.searchList
settings.templateInitArgs['searchList'] = (
normalizeSearchList(settings.templateInitArgs['searchList']))
if not hasattr(settings, 'outputTransformer'):
settings.outputTransformer = unicode
if not hasattr(settings, 'templateAPIClass'):
class PreprocessTemplateAPIClass(klass): pass
settings.templateAPIClass = PreprocessTemplateAPIClass
if not hasattr(settings, 'compilerSettings'):
settings.compilerSettings = {}
klass._updateSettingsWithPreprocessTokens(
compilerSettings=settings.compilerSettings,
placeholderToken=settings.placeholderToken,
directiveToken=settings.directiveToken
)
return settings
@classmethod
def _updateSettingsWithPreprocessTokens(
klass, compilerSettings, placeholderToken, directiveToken):
if (placeholderToken and 'cheetahVarStartToken' not in compilerSettings):
compilerSettings['cheetahVarStartToken'] = placeholderToken
if directiveToken:
if 'directiveStartToken' not in compilerSettings:
compilerSettings['directiveStartToken'] = directiveToken
if 'directiveEndToken' not in compilerSettings:
compilerSettings['directiveEndToken'] = directiveToken
if 'commentStartToken' not in compilerSettings:
compilerSettings['commentStartToken'] = directiveToken*2
if 'multiLineCommentStartToken' not in compilerSettings:
compilerSettings['multiLineCommentStartToken'] = (
directiveToken+'*')
if 'multiLineCommentEndToken' not in compilerSettings:
compilerSettings['multiLineCommentEndToken'] = (
'*'+directiveToken)
if 'EOLSlurpToken' not in compilerSettings:
compilerSettings['EOLSlurpToken'] = directiveToken
@classmethod
def _addCheetahPlumbingCodeToClass(klass, concreteTemplateClass):
"""If concreteTemplateClass is not a subclass of Cheetah.Template, add
the required cheetah methods and attributes to it.
This is called on each new template class after it has been compiled.
If concreteTemplateClass is not a subclass of Cheetah.Template but
already has method with the same name as one of the required cheetah
methods, this will skip that method.
"""
for methodname in klass._CHEETAH_requiredCheetahMethods:
if not hasattr(concreteTemplateClass, methodname):
method = getattr(Template, methodname)
newMethod = createMethod(method.im_func, concreteTemplateClass)
setattr(concreteTemplateClass, methodname, newMethod)
for classMethName in klass._CHEETAH_requiredCheetahClassMethods:
if not hasattr(concreteTemplateClass, classMethName):
meth = getattr(klass, classMethName)
setattr(concreteTemplateClass, classMethName, classmethod(meth.im_func))
for attrname in klass._CHEETAH_requiredCheetahClassAttributes:
attrname = '_CHEETAH_'+attrname
if not hasattr(concreteTemplateClass, attrname):
attrVal = getattr(klass, attrname)
setattr(concreteTemplateClass, attrname, attrVal)
if (not hasattr(concreteTemplateClass, '__str__')
or concreteTemplateClass.__str__ is object.__str__):
mainMethNameAttr = '_mainCheetahMethod_for_'+concreteTemplateClass.__name__
mainMethName = getattr(concreteTemplateClass, mainMethNameAttr, None)
if mainMethName:
def __str__(self):
rc = getattr(self, mainMethName)()
if isinstance(rc, unicode):
return rc.encode('utf-8')
return rc
def __unicode__(self):
return getattr(self, mainMethName)()
elif (hasattr(concreteTemplateClass, 'respond')
and concreteTemplateClass.respond!=Servlet.respond):
def __str__(self):
rc = self.respond()
if isinstance(rc, unicode):
return rc.encode('utf-8')
return rc
def __unicode__(self):
return self.respond()
else:
def __str__(self):
rc = None
if hasattr(self, mainMethNameAttr):
rc = getattr(self, mainMethNameAttr)()
elif hasattr(self, 'respond'):
rc = self.respond()
else:
rc = super(self.__class__, self).__str__()
if isinstance(rc, unicode):
return rc.encode('utf-8')
return rc
def __unicode__(self):
if hasattr(self, mainMethNameAttr):
return getattr(self, mainMethNameAttr)()
elif hasattr(self, 'respond'):
return self.respond()
else:
return super(self.__class__, self).__unicode__()
__str__ = createMethod(__str__, concreteTemplateClass)
__unicode__ = createMethod(__unicode__, concreteTemplateClass)
setattr(concreteTemplateClass, '__str__', __str__)
setattr(concreteTemplateClass, '__unicode__', __unicode__)
def __init__(self, source=None,
namespaces=None, searchList=None,
# use either or. They are aliases for the same thing.
file=None,
filter='RawOrEncodedUnicode', # which filter from Cheetah.Filters
filtersLib=Filters,
errorCatcher=None,
compilerSettings=Unspecified, # control the behaviour of the compiler
_globalSetVars=None, # used internally for #include'd templates
_preBuiltSearchList=None # used internally for #include'd templates
):
"""a) compiles a new template OR b) instantiates an existing template.
Read this docstring carefully as there are two distinct usage patterns.
You should also read this class' main docstring.
a) to compile a new template:
t = Template(source=aSourceString)
# or
t = Template(file='some/path')
# or
t = Template(file=someFileObject)
# or
namespaces = [{'foo':'bar'}]
t = Template(source=aSourceString, namespaces=namespaces)
# or
t = Template(file='some/path', namespaces=namespaces)
print t
b) to create an instance of an existing, precompiled template class:
## i) first you need a reference to a compiled template class:
tclass = Template.compile(source=src) # or just Template.compile(src)
# or
tclass = Template.compile(file='some/path')
# or
tclass = Template.compile(file=someFileObject)
# or
# if you used the command line compiler or have Cheetah's ImportHooks
# installed your template class is also available via Python's
# standard import mechanism:
from ACompileTemplate import AcompiledTemplate as tclass
## ii) then you create an instance
t = tclass(namespaces=namespaces)
# or
t = tclass(namespaces=namespaces, filter='RawOrEncodedUnicode')
print t
Arguments:
for usage pattern a)
If you are compiling a new template, you must provide either a
'source' or 'file' arg, but not both:
- source (string or None)
- file (string path, file-like object, or None)
Optional args (see below for more) :
- compilerSettings
Default: Template._CHEETAH_compilerSettings=None
a dictionary of settings to override those defined in
DEFAULT_COMPILER_SETTINGS. See
Cheetah.Template.DEFAULT_COMPILER_SETTINGS and the Users' Guide
for details.
You can pass the source arg in as a positional arg with this usage
pattern. Use keywords for all other args.
for usage pattern b)
Do not use positional args with this usage pattern, unless your
template subclasses something other than Cheetah.Template and you
want to pass positional args to that baseclass. E.g.:
dictTemplate = Template.compile('hello $name from $caller', baseclass=dict)
tmplvars = dict(name='world', caller='me')
print dictTemplate(tmplvars)
This usage requires all Cheetah args to be passed in as keyword args.
optional args for both usage patterns:
- namespaces (aka 'searchList')
Default: None
an optional list of namespaces (dictionaries, objects, modules,
etc.) which Cheetah will search through to find the variables
referenced in $placeholders.
If you provide a single namespace instead of a list, Cheetah will
automatically convert it into a list.
NOTE: Cheetah does NOT force you to use the namespaces search list
and related features. It's on by default, but you can turn if off
using the compiler settings useSearchList=False or
useNameMapper=False.
- filter
Default: 'EncodeUnicode'
Which filter should be used for output filtering. This should
either be a string which is the name of a filter in the
'filtersLib' or a subclass of Cheetah.Filters.Filter. . See the
Users' Guide for more details.
- filtersLib
Default: Cheetah.Filters
A module containing subclasses of Cheetah.Filters.Filter. See the
Users' Guide for more details.
- errorCatcher
Default: None
This is a debugging tool. See the Users' Guide for more details.
Do not use this or the #errorCatcher diretive with live
production systems.
Do NOT mess with the args _globalSetVars or _preBuiltSearchList!
"""
errmsg = "arg '%s' must be %s"
errmsgextra = errmsg + "\n%s"
if not isinstance(source, (types.NoneType, basestring)):
raise TypeError(errmsg % ('source', 'string or None'))
if not isinstance(source, (types.NoneType, basestring, filetype)):
raise TypeError(errmsg %
('file', 'string, file open for reading, or None'))
if not isinstance(filter, (basestring, types.TypeType)) and not \
(isinstance(filter, type) and issubclass(filter, Filters.Filter)):
raise TypeError(errmsgextra %
('filter', 'string or class',
'(if class, must be subclass of Cheetah.Filters.Filter)'))
if not isinstance(filtersLib, (basestring, types.ModuleType)):
raise TypeError(errmsgextra %
('filtersLib', 'string or module',
'(if module, must contain subclasses of Cheetah.Filters.Filter)'))
if not errorCatcher is None:
err = True
if isinstance(errorCatcher, (basestring, types.TypeType)):
err = False
if isinstance(errorCatcher, type) and \
issubclass(errorCatcher, ErrorCatchers.ErrorCatcher):
err = False
if err:
raise TypeError(errmsgextra %
('errorCatcher', 'string, class or None',
'(if class, must be subclass of Cheetah.ErrorCatchers.ErrorCatcher)'))
if compilerSettings is not Unspecified:
if not isinstance(compilerSettings, types.DictType):
raise TypeError(errmsg %
('compilerSettings', 'dictionary'))
if source is not None and file is not None:
raise TypeError("you must supply either a source string or the" +
" 'file' keyword argument, but not both")
##################################################
## Do superclass initialization.
super(Template, self).__init__()
##################################################
## Do required version check
if not hasattr(self, '_CHEETAH_versionTuple'):
try:
mod = sys.modules[self.__class__.__module__]
compiledVersion = mod.__CHEETAH_version__
compiledVersionTuple = convertVersionStringToTuple(compiledVersion)
if compiledVersionTuple < MinCompatibleVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
compiledVersion, MinCompatibleVersion))
except AssertionError:
raise
except:
pass
##################################################
## Setup instance state attributes used during the life of template
## post-compile
if searchList:
for namespace in searchList:
if isinstance(namespace, dict):
intersection = self.Reserved_SearchList & set(namespace.keys())
warn = False
if intersection:
warn = True
if isinstance(compilerSettings, dict) and compilerSettings.get('prioritizeSearchListOverSelf'):
warn = False
if warn:
logging.info(''' The following keys are members of the Template class and will result in NameMapper collisions! ''')
logging.info(''' > %s ''' % ', '.join(list(intersection)))
logging.info(''' Please change the key's name or use the compiler setting "prioritizeSearchListOverSelf=True" to prevent the NameMapper from using ''')
logging.info(''' the Template member in place of your searchList variable ''')
self._initCheetahInstance(
searchList=searchList, namespaces=namespaces,
filter=filter, filtersLib=filtersLib,
errorCatcher=errorCatcher,
_globalSetVars=_globalSetVars,
compilerSettings=compilerSettings,
_preBuiltSearchList=_preBuiltSearchList)
##################################################
## Now, compile if we're meant to
if (source is not None) or (file is not None):
self._compile(source, file, compilerSettings=compilerSettings)
def generatedModuleCode(self):
"""Return the module code the compiler generated, or None if no
compilation took place.
"""
return self._CHEETAH_generatedModuleCode
def generatedClassCode(self):
"""Return the class code the compiler generated, or None if no
compilation took place.
"""
return self._CHEETAH_generatedModuleCode[
self._CHEETAH_generatedModuleCode.find('\nclass '):
self._CHEETAH_generatedModuleCode.find('\n## END CLASS DEFINITION')]
def searchList(self):
"""Return a reference to the searchlist
"""
return self._CHEETAH__searchList
def errorCatcher(self):
"""Return a reference to the current errorCatcher
"""
return self._CHEETAH__errorCatcher
## cache methods ##
def _getCacheStore(self):
if not self._CHEETAH__cacheStore:
if self._CHEETAH_cacheStore is not None:
self._CHEETAH__cacheStore = self._CHEETAH_cacheStore
else:
# @@TR: might want to provide a way to provide init args
self._CHEETAH__cacheStore = self._CHEETAH_cacheStoreClass()
return self._CHEETAH__cacheStore
def _getCacheStoreIdPrefix(self):
if self._CHEETAH_cacheStoreIdPrefix is not None:
return self._CHEETAH_cacheStoreIdPrefix
else:
return str(id(self))
def _createCacheRegion(self, regionID):
return self._CHEETAH_cacheRegionClass(
regionID=regionID,
templateCacheIdPrefix=self._getCacheStoreIdPrefix(),
cacheStore=self._getCacheStore())
def getCacheRegion(self, regionID, cacheInfo=None, create=True):
cacheRegion = self._CHEETAH__cacheRegions.get(regionID)
if not cacheRegion and create:
cacheRegion = self._createCacheRegion(regionID)
self._CHEETAH__cacheRegions[regionID] = cacheRegion
return cacheRegion
def getCacheRegions(self):
"""Returns a dictionary of the 'cache regions' initialized in a
template.
Each #cache directive block or $*cachedPlaceholder is a separate 'cache
region'.
"""
# returns a copy to prevent users mucking it up
return self._CHEETAH__cacheRegions.copy()
def refreshCache(self, cacheRegionId=None, cacheItemId=None):
"""Refresh a cache region or a specific cache item within a region.
"""
if not cacheRegionId:
for cacheRegion in self.getCacheRegions().itervalues():
cacheRegion.clear()
else:
cregion = self._CHEETAH__cacheRegions.get(cacheRegionId)
if not cregion:
return
if not cacheItemId: # clear the desired region and all its cacheItems
cregion.clear()
else: # clear one specific cache of a specific region
cache = cregion.getCacheItem(cacheItemId)
if cache:
cache.clear()
## end cache methods ##
def shutdown(self):
"""Break reference cycles before discarding a servlet.
"""
try:
Servlet.shutdown(self)
except:
pass
self._CHEETAH__searchList = None
self.__dict__ = {}
## utility functions ##
def getVar(self, varName, default=Unspecified, autoCall=True):
"""Get a variable from the searchList. If the variable can't be found
in the searchList, it returns the default value if one was given, or
raises NameMapper.NotFound.
"""
try:
return valueFromSearchList(self.searchList(), varName.replace('$', ''), autoCall)
except NotFound:
if default is not Unspecified:
return default
else:
raise
def varExists(self, varName, autoCall=True):
"""Test if a variable name exists in the searchList.
"""
try:
valueFromSearchList(self.searchList(), varName.replace('$', ''), autoCall)
return True
except NotFound:
return False
hasVar = varExists
def i18n(self, message,
plural=None,
n=None,
id=None,
domain=None,
source=None,
target=None,
comment=None
):
"""This is just a stub at this time.
plural = the plural form of the message
n = a sized argument to distinguish between single and plural forms
id = msgid in the translation catalog
domain = translation domain
source = source lang
target = a specific target lang
comment = a comment to the translation team
See the following for some ideas
http://www.zope.org/DevHome/Wikis/DevSite/Projects/ComponentArchitecture/ZPTInternationalizationSupport
Other notes:
- There is no need to replicate the i18n:name attribute from plone / PTL,
as cheetah placeholders serve the same purpose
"""
return message
def getFileContents(self, path):
"""A hook for getting the contents of a file. The default
implementation just uses the Python open() function to load local files.
This method could be reimplemented to allow reading of remote files via
various protocols, as PHP allows with its 'URL fopen wrapper'
"""
fp = open(path, 'r')
output = fp.read()
fp.close()
return output
def runAsMainProgram(self):
"""Allows the Template to function as a standalone command-line program
for static page generation.
Type 'python yourtemplate.py --help to see what it's capabable of.
"""
from TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=self).run()
##################################################
## internal methods -- not to be called by end-users
def _initCheetahInstance(self,
searchList=None,
namespaces=None,
filter='RawOrEncodedUnicode', # which filter from Cheetah.Filters
filtersLib=Filters,
errorCatcher=None,
_globalSetVars=None,
compilerSettings=None,
_preBuiltSearchList=None):
"""Sets up the instance attributes that cheetah templates use at
run-time.
This is automatically called by the __init__ method of compiled
templates.
Note that the names of instance attributes used by Cheetah are prefixed
with '_CHEETAH__' (2 underscores), where class attributes are prefixed
with '_CHEETAH_' (1 underscore).
"""
if getattr(self, '_CHEETAH__instanceInitialized', False):
return
if namespaces is not None:
assert searchList is None, (
'Provide "namespaces" or "searchList", not both!')
searchList = namespaces
if searchList is not None and not isinstance(searchList, (list, tuple)):
searchList = [searchList]
self._CHEETAH__globalSetVars = {}
if _globalSetVars is not None:
# this is intended to be used internally by Nested Templates in #include's
self._CHEETAH__globalSetVars = _globalSetVars
if _preBuiltSearchList is not None:
# happens with nested Template obj creation from #include's
self._CHEETAH__searchList = list(_preBuiltSearchList)
self._CHEETAH__searchList.append(self)
else:
# create our own searchList
self._CHEETAH__searchList = [self._CHEETAH__globalSetVars, self]
if searchList is not None:
if isinstance(compilerSettings, dict) and compilerSettings.get('prioritizeSearchListOverSelf'):
self._CHEETAH__searchList = searchList + self._CHEETAH__searchList
else:
self._CHEETAH__searchList.extend(list(searchList))
self._CHEETAH__cheetahIncludes = {}
self._CHEETAH__cacheRegions = {}
self._CHEETAH__indenter = Indenter()
# @@TR: consider allowing simple callables as the filter argument
self._CHEETAH__filtersLib = filtersLib
self._CHEETAH__filters = {}
if isinstance(filter, basestring):
filterName = filter
klass = getattr(self._CHEETAH__filtersLib, filterName)
else:
klass = filter
filterName = klass.__name__
self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName] = klass(self).filter
self._CHEETAH__initialFilter = self._CHEETAH__currentFilter
self._CHEETAH__errorCatchers = {}
if errorCatcher:
if isinstance(errorCatcher, basestring):
errorCatcherClass = getattr(ErrorCatchers, errorCatcher)
elif isinstance(errorCatcher, type):
errorCatcherClass = errorCatcher
self._CHEETAH__errorCatcher = ec = errorCatcherClass(self)
self._CHEETAH__errorCatchers[errorCatcher.__class__.__name__] = ec
else:
self._CHEETAH__errorCatcher = None
self._CHEETAH__initErrorCatcher = self._CHEETAH__errorCatcher
if not hasattr(self, 'transaction'):
self.transaction = None
self._CHEETAH__instanceInitialized = True
self._CHEETAH__isBuffering = False
self._CHEETAH__isControlledByWebKit = False
self._CHEETAH__cacheStore = None
if self._CHEETAH_cacheStore is not None:
self._CHEETAH__cacheStore = self._CHEETAH_cacheStore
def _compile(self, source=None, file=None, compilerSettings=Unspecified,
moduleName=None, mainMethodName=None):
"""Compile the template. This method is automatically called by
Template.__init__ it is provided with 'file' or 'source' args.
USERS SHOULD *NEVER* CALL THIS METHOD THEMSELVES. Use Template.compile
instead.
"""
if compilerSettings is Unspecified:
compilerSettings = self._getCompilerSettings(source, file) or {}
mainMethodName = mainMethodName or self._CHEETAH_defaultMainMethodName
self._fileMtime = None
self._fileDirName = None
self._fileBaseName = None
if file and isinstance(file, basestring):
file = self.serverSidePath(file)
self._fileMtime = os.path.getmtime(file)
self._fileDirName, self._fileBaseName = os.path.split(file)
self._filePath = file
templateClass = self.compile(source, file,
moduleName=moduleName,
mainMethodName=mainMethodName,
compilerSettings=compilerSettings,
keepRefToGeneratedCode=True)
if not self.__class__ == Template:
# Only propogate attributes if we're in a subclass of
# Template
for k, v in self.__class__.__dict__.iteritems():
if not v or k.startswith('__'):
continue
## Propogate the class attributes to the instance
## since we're about to obliterate self.__class__
## (see: cheetah.Tests.Tepmlate.SubclassSearchListTest)
setattr(self, k, v)
self.__class__ = templateClass
# must initialize it so instance attributes are accessible
templateClass.__init__(self,
#_globalSetVars=self._CHEETAH__globalSetVars,
#_preBuiltSearchList=self._CHEETAH__searchList
)
if not hasattr(self, 'transaction'):
self.transaction = None
def _handleCheetahInclude(self, srcArg, trans=None, includeFrom='file', raw=False):
"""Called at runtime to handle #include directives.
"""
_includeID = srcArg
if _includeID not in self._CHEETAH__cheetahIncludes:
if not raw:
if includeFrom == 'file':
source = None
if isinstance(srcArg, basestring):
if hasattr(self, 'serverSidePath'):
file = path = self.serverSidePath(srcArg)
else:
file = path = os.path.normpath(srcArg)
else:
file = srcArg ## a file-like object
else:
source = srcArg
file = None
# @@TR: might want to provide some syntax for specifying the
# Template class to be used for compilation so compilerSettings
# can be changed.
compiler = self._getTemplateAPIClassForIncludeDirectiveCompilation(source, file)
nestedTemplateClass = compiler.compile(source=source, file=file)
nestedTemplate = nestedTemplateClass(_preBuiltSearchList=self.searchList(),
_globalSetVars=self._CHEETAH__globalSetVars)
# Set the inner template filters to the initial filter of the
# outer template:
# this is the only really safe way to use
# filter='WebSafe'.
nestedTemplate._CHEETAH__initialFilter = self._CHEETAH__initialFilter
nestedTemplate._CHEETAH__currentFilter = self._CHEETAH__initialFilter
self._CHEETAH__cheetahIncludes[_includeID] = nestedTemplate
else:
if includeFrom == 'file':
path = self.serverSidePath(srcArg)
self._CHEETAH__cheetahIncludes[_includeID] = self.getFileContents(path)
else:
self._CHEETAH__cheetahIncludes[_includeID] = srcArg
##
if not raw:
self._CHEETAH__cheetahIncludes[_includeID].respond(trans)
else:
trans.response().write(self._CHEETAH__cheetahIncludes[_includeID])
def _getTemplateAPIClassForIncludeDirectiveCompilation(self, source, file):
"""Returns the subclass of Template which should be used to compile
#include directives.
This abstraction allows different compiler settings to be used in the
included template than were used in the parent.
"""
if issubclass(self.__class__, Template):
return self.__class__
else:
return Template
## functions for using templates as CGI scripts
def webInput(self, names, namesMulti=(), default='', src='f',
defaultInt=0, defaultFloat=0.00, badInt=0, badFloat=0.00, debug=False):
"""Method for importing web transaction variables in bulk.
This works for GET/POST fields both in Webware servlets and in CGI
scripts, and for cookies and session variables in Webware servlets. If
you try to read a cookie or session variable in a CGI script, you'll get
a RuntimeError. 'In a CGI script' here means 'not running as a Webware
servlet'. If the CGI environment is not properly set up, Cheetah will
act like there's no input.
The public method provided is:
def webInput(self, names, namesMulti=(), default='', src='f',
defaultInt=0, defaultFloat=0.00, badInt=0, badFloat=0.00, debug=False):
This method places the specified GET/POST fields, cookies or session
variables into a dictionary, which is both returned and put at the
beginning of the searchList. It handles:
* single vs multiple values
* conversion to integer or float for specified names
* default values/exceptions for missing or bad values
* printing a snapshot of all values retrieved for debugging
All the 'default*' and 'bad*' arguments have 'use or raise' behavior,
meaning that if they're a subclass of Exception, they're raised. If
they're anything else, that value is substituted for the missing/bad
value.
The simplest usage is:
#silent $webInput(['choice'])
$choice
dic = self.webInput(['choice'])
write(dic['choice'])
Both these examples retrieves the GET/POST field 'choice' and print it.
If you leave off the'#silent', all the values would be printed too. But
a better way to preview the values is
#silent $webInput(['name'], $debug=1)
because this pretty-prints all the values inside HTML <PRE> tags.
** KLUDGE: 'debug' is supposed to insert into the template output, but it
wasn't working so I changed it to a'print' statement. So the debugging
output will appear wherever standard output is pointed, whether at the
terminal, in a Webware log file, or whatever. ***
Since we didn't specify any coversions, the value is a string. It's a
'single' value because we specified it in 'names' rather than
'namesMulti'. Single values work like this:
* If one value is found, take it.
* If several values are found, choose one arbitrarily and ignore the rest.
* If no values are found, use or raise the appropriate 'default*' value.
Multi values work like this:
* If one value is found, put it in a list.
* If several values are found, leave them in a list.
* If no values are found, use the empty list ([]). The 'default*'
arguments are *not* consulted in this case.
Example: assume 'days' came from a set of checkboxes or a multiple combo
box on a form, and the user chose'Monday', 'Tuesday' and 'Thursday'.
#silent $webInput([], ['days'])
The days you chose are: #slurp
#for $day in $days
$day #slurp
#end for
dic = self.webInput([], ['days'])
write('The days you chose are: ')
for day in dic['days']:
write(day + ' ')
Both these examples print: 'The days you chose are: Monday Tuesday Thursday'.
By default, missing strings are replaced by '' and missing/bad numbers
by zero. (A'bad number' means the converter raised an exception for
it, usually because of non-numeric characters in the value.) This
mimics Perl/PHP behavior, and simplifies coding for many applications
where missing/bad values *should* be blank/zero. In those relatively
few cases where you must distinguish between empty-string/zero on the
one hand and missing/bad on the other, change the appropriate
'default*' and 'bad*' arguments to something like:
* None
* another constant value
* $NonNumericInputError/self.NonNumericInputError
* $ValueError/ValueError
(NonNumericInputError is defined in this class and is useful for
distinguishing between bad input vs a TypeError/ValueError thrown for
some other rason.)
Here's an example using multiple values to schedule newspaper
deliveries. 'checkboxes' comes from a form with checkboxes for all the
days of the week. The days the user previously chose are preselected.
The user checks/unchecks boxes as desired and presses Submit. The value
of 'checkboxes' is a list of checkboxes that were checked when Submit
was pressed. Our task now is to turn on the days the user checked, turn
off the days he unchecked, and leave on or off the days he didn't
change.
dic = self.webInput([], ['dayCheckboxes'])
wantedDays = dic['dayCheckboxes'] # The days the user checked.
for day, on in self.getAllValues():
if not on and wantedDays.has_key(day):
self.TurnOn(day)
# ... Set a flag or insert a database record ...
elif on and not wantedDays.has_key(day):
self.TurnOff(day)
# ... Unset a flag or delete a database record ...
'source' allows you to look up the variables from a number of different
sources:
'f' fields (CGI GET/POST parameters)
'c' cookies
's' session variables
'v' 'values', meaning fields or cookies
In many forms, you're dealing only with strings, which is why the
'default' argument is third and the numeric arguments are banished to
the end. But sometimes you want automatic number conversion, so that
you can do numeric comparisions in your templates without having to
write a bunch of conversion/exception handling code. Example:
#silent $webInput(['name', 'height:int'])
$name is $height cm tall.
#if $height >= 300
Wow, you're tall!
#else
Pshaw, you're short.
#end if
dic = self.webInput(['name', 'height:int'])
name = dic[name]
height = dic[height]
write('%s is %s cm tall.' % (name, height))
if height > 300:
write('Wow, you're tall!')
else:
write('Pshaw, you're short.')
To convert a value to a number, suffix ':int' or ':float' to the name.
The method will search first for a 'height:int' variable and then for a
'height' variable. (It will be called 'height' in the final
dictionary.) If a numeric conversion fails, use or raise 'badInt' or
'badFloat'. Missing values work the same way as for strings, except the
default is 'defaultInt' or 'defaultFloat' instead of 'default'.
If a name represents an uploaded file, the entire file will be read into
memory. For more sophistocated file-upload handling, leave that name
out of the list and do your own handling, or wait for
Cheetah.Utils.UploadFileMixin.
This only in a subclass that also inherits from Webware's Servlet or
HTTPServlet. Otherwise you'll get an AttributeError on 'self.request'.
EXCEPTIONS: ValueError if 'source' is not one of the stated characters.
TypeError if a conversion suffix is not ':int' or ':float'.
FUTURE EXPANSION: a future version of this method may allow source
cascading; e.g., 'vs' would look first in 'values' and then in session
variables.
Meta-Data
================================================================================
Author: Mike Orr <iron@mso.oz.net>
License: This software is released for unlimited distribution under the
terms of the MIT license. See the LICENSE file.
Version: $Revision: 1.186 $
Start Date: 2002/03/17
Last Revision Date: $Date: 2008/03/10 04:48:11 $
"""
src = src.lower()
isCgi = not self._CHEETAH__isControlledByWebKit
if isCgi and src in ('f', 'v'):
global _formUsedByWebInput
if _formUsedByWebInput is None:
_formUsedByWebInput = cgi.FieldStorage()
source, func = 'field', _formUsedByWebInput.getvalue
elif isCgi and src == 'c':
raise RuntimeError("can't get cookies from a CGI script")
elif isCgi and src == 's':
raise RuntimeError("can't get session variables from a CGI script")
elif isCgi and src == 'v':
source, func = 'value', self.request().value
elif isCgi and src == 's':
source, func = 'session', self.request().session().value
elif src == 'f':
source, func = 'field', self.request().field
elif src == 'c':
source, func = 'cookie', self.request().cookie
elif src == 'v':
source, func = 'value', self.request().value
elif src == 's':
source, func = 'session', self.request().session().value
else:
raise TypeError("arg 'src' invalid")
sources = source + 's'
converters = {
'': _Converter('string', None, default, default ),
'int': _Converter('int', int, defaultInt, badInt ),
'float': _Converter('float', float, defaultFloat, badFloat), }
#pprint.pprint(locals()); return {}
dic = {} # Destination.
for name in names:
k, v = _lookup(name, func, False, converters)
dic[k] = v
for name in namesMulti:
k, v = _lookup(name, func, True, converters)
dic[k] = v
# At this point, 'dic' contains all the keys/values we want to keep.
# We could split the method into a superclass
# method for Webware/WebwareExperimental and a subclass for Cheetah.
# The superclass would merely 'return dic'. The subclass would
# 'dic = super(ThisClass, self).webInput(names, namesMulti, ...)'
# and then the code below.
if debug:
print("<PRE>\n" + pprint.pformat(dic) + "\n</PRE>\n\n")
self.searchList().insert(0, dic)
return dic
T = Template # Short and sweet for debugging at the >>> prompt.
Template.Reserved_SearchList = set(dir(Template))
def genParserErrorFromPythonException(source, file, generatedPyCode, exception):
#print dir(exception)
filename = isinstance(file, (str, unicode)) and file or None
sio = StringIO.StringIO()
traceback.print_exc(1, sio)
formatedExc = sio.getvalue()
if hasattr(exception, 'lineno'):
pyLineno = exception.lineno
else:
pyLineno = int(re.search('[ \t]*File.*line (\d+)', formatedExc).group(1))
lines = generatedPyCode.splitlines()
prevLines = [] # (i, content)
for i in range(1, 4):
if pyLineno-i <=0:
break
prevLines.append( (pyLineno+1-i, lines[pyLineno-i]) )
nextLines = [] # (i, content)
for i in range(1, 4):
if not pyLineno+i < len(lines):
break
nextLines.append( (pyLineno+i, lines[pyLineno+i]) )
nextLines.reverse()
report = 'Line|Python Code\n'
report += '----|-------------------------------------------------------------\n'
while prevLines:
lineInfo = prevLines.pop()
report += "%(row)-4d|%(line)s\n"% {'row':lineInfo[0], 'line':lineInfo[1]}
if hasattr(exception, 'offset'):
report += ' '*(3+(exception.offset or 0)) + '^\n'
while nextLines:
lineInfo = nextLines.pop()
report += "%(row)-4d|%(line)s\n"% {'row':lineInfo[0], 'line':lineInfo[1]}
message = [
"Error in the Python code which Cheetah generated for this template:",
'='*80,
'',
str(exception),
'',
report,
'='*80,
]
cheetahPosMatch = re.search('line (\d+), col (\d+)', formatedExc)
if cheetahPosMatch:
lineno = int(cheetahPosMatch.group(1))
col = int(cheetahPosMatch.group(2))
#if hasattr(exception, 'offset'):
# col = exception.offset
message.append('\nHere is the corresponding Cheetah code:\n')
else:
lineno = None
col = None
cheetahPosMatch = re.search('line (\d+), col (\d+)',
'\n'.join(lines[max(pyLineno-2, 0):]))
if cheetahPosMatch:
lineno = int(cheetahPosMatch.group(1))
col = int(cheetahPosMatch.group(2))
message.append('\nHere is the corresponding Cheetah code.')
message.append('** I had to guess the line & column numbers,'
' so they are probably incorrect:\n')
message = '\n'.join(message)
reader = SourceReader(source, filename=filename)
return ParseError(reader, message, lineno=lineno, col=col)
# vim: shiftwidth=4 tabstop=4 expandtab
|
jcsp/manila | refs/heads/master | manila/api/views/limits.py | 4 | # Copyright 2010-2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_utils import timeutils
import six
class ViewBuilder(object):
"""OpenStack API base limits view builder."""
def build(self, rate_limits, absolute_limits):
rate_limits = self._build_rate_limits(rate_limits)
absolute_limits = self._build_absolute_limits(absolute_limits)
output = {
"limits": {
"rate": rate_limits,
"absolute": absolute_limits,
},
}
return output
def _build_absolute_limits(self, absolute_limits):
"""Builder for absolute limits
absolute_limits should be given as a dict of limits.
For example: {"ram": 512, "gigabytes": 1024}.
"""
limit_names = {
"gigabytes": ["maxTotalShareGigabytes"],
"snapshot_gigabytes": ["maxTotalSnapshotGigabytes"],
"shares": ["maxTotalShares"],
"snapshots": ["maxTotalShareSnapshots"],
"share_networks": ["maxTotalShareNetworks"],
}
limits = {}
for name, value in six.iteritems(absolute_limits):
if name in limit_names and value is not None:
for name in limit_names[name]:
limits[name] = value
return limits
def _build_rate_limits(self, rate_limits):
limits = []
for rate_limit in rate_limits:
_rate_limit_key = None
_rate_limit = self._build_rate_limit(rate_limit)
# check for existing key
for limit in limits:
if (limit["uri"] == rate_limit["URI"] and
limit["regex"] == rate_limit["regex"]):
_rate_limit_key = limit
break
# ensure we have a key if we didn't find one
if not _rate_limit_key:
_rate_limit_key = {
"uri": rate_limit["URI"],
"regex": rate_limit["regex"],
"limit": [],
}
limits.append(_rate_limit_key)
_rate_limit_key["limit"].append(_rate_limit)
return limits
def _build_rate_limit(self, rate_limit):
_get_utc = datetime.datetime.utcfromtimestamp
next_avail = _get_utc(rate_limit["resetTime"])
return {
"verb": rate_limit["verb"],
"value": rate_limit["value"],
"remaining": int(rate_limit["remaining"]),
"unit": rate_limit["unit"],
"next-available": timeutils.isotime(at=next_avail),
}
|
RandallDW/Aruba_plugin | refs/heads/Aruba_plugin | plugins/org.python.pydev/tests/pysrc/testlib/unittest/relative/toimport.py | 11 | class Test1:
def test1():pass
class Test2:
def test2():pass
def Met1():pass |
zhlinh/leetcode | refs/heads/master | 0349.Intersection of Two Arrays/test.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from solution import Solution
nums1 = [1, 2, 2, 3, 1]
nums2 = [2, 3]
sol = Solution()
res = sol.intersection(nums1, nums2)
print(res)
|
dekisha/dartlamp | refs/heads/master | libraries/codemirror/component-tools/update.py | 28 | #!/usr/bin/env python
from distutils.version import StrictVersion as V
import os
import re
from subprocess import check_output, check_call
import sys
# the first version we started maintaining as a component
first_version = V('5.20')
blacklist_tags = ['v3.02', '5.13.4']
tools = os.path.dirname(os.path.abspath(__file__))
repo_root = os.path.dirname(tools)
tag_sh = os.path.join(tools, 'cm-tags.sh')
build_sh = os.path.join(tools, 'build.sh')
lines = check_output(['npm', 'show', 'codemirror@*', 'version']).decode('utf8').splitlines()
# lines of the form "codemirror@version 'version'"
npm_versions = [ line.split()[0].split('@')[1] for line in lines ]
tags = set(check_output(['git', 'tag'], cwd=repo_root).decode('utf8').split())
built = []
for v in sorted(npm_versions, key=V):
if v in tags:
print("Already have", v)
continue
if V(v) < first_version:
print("Skipping old version", v)
continue
print('building', v)
check_call([build_sh, v], cwd=repo_root)
built.append(v)
if built:
check_call(['git', 'push', '--tags'], cwd=repo_root)
|
mmnelemane/nova | refs/heads/master | nova/tests/unit/api/openstack/compute/test_cloudpipe.py | 26 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid as uuid_lib
from oslo_config import cfg
from oslo_utils import timeutils
from webob import exc
from nova.api.openstack.compute import cloudpipe as cloudpipe_v21
from nova.api.openstack.compute.legacy_v2.contrib import cloudpipe \
as cloudpipe_v2
from nova.compute import utils as compute_utils
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_network
from nova.tests.unit import matchers
from nova import utils
CONF = cfg.CONF
CONF.import_opt('vpn_image_id', 'nova.cloudpipe.pipelib')
project_id = str(uuid_lib.uuid4().hex)
uuid = str(uuid_lib.uuid4())
def fake_vpn_instance():
return objects.Instance(
id=7, image_ref=CONF.vpn_image_id, vm_state='active',
created_at=timeutils.parse_strtime('1981-10-20T00:00:00.000000'),
uuid=uuid, project_id=project_id)
def compute_api_get_all_empty(context, search_opts=None, want_objects=True):
return []
def compute_api_get_all(context, search_opts=None, want_objects=True):
return [fake_vpn_instance()]
def utils_vpn_ping(addr, port, timoeout=0.05, session_id=None):
return True
class CloudpipeTestV21(test.NoDBTestCase):
cloudpipe = cloudpipe_v21
url = '/v2/fake/os-cloudpipe'
def setUp(self):
super(CloudpipeTestV21, self).setUp()
self.controller = self.cloudpipe.CloudpipeController()
self.stubs.Set(self.controller.compute_api, "get_all",
compute_api_get_all_empty)
self.stubs.Set(utils, 'vpn_ping', utils_vpn_ping)
self.req = fakes.HTTPRequest.blank('')
def test_cloudpipe_list_no_network(self):
def fake_get_nw_info_for_instance(instance):
return {}
self.stubs.Set(compute_utils, "get_nw_info_for_instance",
fake_get_nw_info_for_instance)
self.stubs.Set(self.controller.compute_api, "get_all",
compute_api_get_all)
res_dict = self.controller.index(self.req)
response = {'cloudpipes': [{'project_id': project_id,
'instance_id': uuid,
'created_at': '1981-10-20T00:00:00Z'}]}
self.assertEqual(res_dict, response)
def test_cloudpipe_list(self):
def network_api_get(context, network_id):
self.assertEqual(context.project_id, project_id)
return {'vpn_public_address': '127.0.0.1',
'vpn_public_port': 22}
def fake_get_nw_info_for_instance(instance):
return fake_network.fake_get_instance_nw_info(self.stubs)
self.stubs.Set(compute_utils, "get_nw_info_for_instance",
fake_get_nw_info_for_instance)
self.stubs.Set(self.controller.network_api, "get",
network_api_get)
self.stubs.Set(self.controller.compute_api, "get_all",
compute_api_get_all)
res_dict = self.controller.index(self.req)
response = {'cloudpipes': [{'project_id': project_id,
'internal_ip': '192.168.1.100',
'public_ip': '127.0.0.1',
'public_port': 22,
'state': 'running',
'instance_id': uuid,
'created_at': '1981-10-20T00:00:00Z'}]}
self.assertThat(res_dict, matchers.DictMatches(response))
def test_cloudpipe_create(self):
def launch_vpn_instance(context):
return ([fake_vpn_instance()], 'fake-reservation')
self.stubs.Set(self.controller.cloudpipe, 'launch_vpn_instance',
launch_vpn_instance)
body = {'cloudpipe': {'project_id': project_id}}
res_dict = self.controller.create(self.req, body=body)
response = {'instance_id': uuid}
self.assertEqual(res_dict, response)
def test_cloudpipe_create_no_networks(self):
def launch_vpn_instance(context):
raise exception.NoMoreNetworks
self.stubs.Set(self.controller.cloudpipe, 'launch_vpn_instance',
launch_vpn_instance)
body = {'cloudpipe': {'project_id': project_id}}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(exc.HTTPBadRequest,
self.controller.create, req, body=body)
def test_cloudpipe_create_already_running(self):
def launch_vpn_instance(*args, **kwargs):
self.fail("Method should not have been called")
self.stubs.Set(self.controller.cloudpipe, 'launch_vpn_instance',
launch_vpn_instance)
self.stubs.Set(self.controller.compute_api, "get_all",
compute_api_get_all)
body = {'cloudpipe': {'project_id': project_id}}
req = fakes.HTTPRequest.blank(self.url)
res_dict = self.controller.create(req, body=body)
response = {'instance_id': uuid}
self.assertEqual(res_dict, response)
def test_cloudpipe_create_with_bad_project_id_failed(self):
body = {'cloudpipe': {'project_id': 'bad.project.id'}}
req = fakes.HTTPRequest.blank(self.url)
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
class CloudpipeTestV2(CloudpipeTestV21):
cloudpipe = cloudpipe_v2
def test_cloudpipe_create_with_bad_project_id_failed(self):
pass
class CloudpipePolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(CloudpipePolicyEnforcementV21, self).setUp()
self.controller = cloudpipe_v21.CloudpipeController()
self.req = fakes.HTTPRequest.blank('')
def _common_policy_check(self, func, *arg, **kwarg):
rule_name = "os_compute_api:os-cloudpipe"
rule = {rule_name: "project:non_fake"}
self.policy.set_rules(rule)
exc = self.assertRaises(
exception.PolicyNotAuthorized, func, *arg, **kwarg)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_list_policy_failed(self):
self._common_policy_check(self.controller.index, self.req)
def test_create_policy_failed(self):
body = {'cloudpipe': {'project_id': uuid}}
self._common_policy_check(self.controller.create, self.req, body=body)
def test_update_policy_failed(self):
body = {"configure_project": {'vpn_ip': '192.168.1.1',
'vpn_port': 2000}}
self._common_policy_check(
self.controller.update, self.req, uuid, body=body)
|
ngokevin/zamboni | refs/heads/master | mkt/abuse/urls.py | 3 | from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from mkt.abuse.views import AppAbuseViewSet, UserAbuseViewSet
abuse = SimpleRouter()
abuse.register('user', UserAbuseViewSet, base_name='user-abuse')
abuse.register('app', AppAbuseViewSet, base_name='app-abuse')
api_patterns = patterns('',
url('^abuse/', include(abuse.urls)),
)
|
johnsliao/bostonservicemap | refs/heads/master | draw_dots_at_requests.py | 1 | import sys
import pandas
from PIL import Image
from config import MIN_LAT, MIN_LON, MAX_LAT, MAX_LON, CANVAS_X, CANVAS_Y
def ll_to_pixel(lat, lon):
x_frac = (MAX_LAT - lat) / (MAX_LAT - MIN_LAT)
y_frac = (MAX_LON - lon) / (MAX_LON - MIN_LON)
x = x_frac * CANVAS_X
y = y_frac * CANVAS_Y
return x, y
def load_ll(fname):
points = []
data = pandas.read_csv(fname)
for row in range(len(data.index)):
lat, lon = data['LATITUDE'][row], data['LONGITUDE'][row]
points.append([float(lat), float(lon)])
return points
def main(fname):
points = load_ll(fname)
I = Image.new('RGBA', (CANVAS_X, CANVAS_Y))
IM = I.load()
for lat, lon in points:
x, y = ll_to_pixel(lat, lon)
print x, y
for x1, y1 in [(x,y),
(x+1,y+1),
(x-1,y-1),
(x-1,y+1),
(x+1, y-1),
(x+2,y+2),
(x-2,y-2),
(x-2,y+2),
(x+2, y-2)]:
if 0 <= x1 < CANVAS_X and 0 <= y1 < CANVAS_Y:
print 'Adding to canvas'
IM[x1,y1] = (0,0,0)
I.save("./bostonservicemap/sample_data/sample.png", "PNG")
if __name__ == '__main__':
if len(sys.argv) != 2:
print '''Usage
draw_dots_at_requests.py <service_requests.csv>'''
else:
fname = sys.argv[1]
main(fname) |
SerialShadow/SickRage | refs/heads/master | autoProcessTV/lib/requests/packages/chardet/jpcntx.py | 1776 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
jiangzhixiao/odoo | refs/heads/8.0 | addons/website_event_sale/__openerp__.py | 307 | # -*- coding: utf-8 -*-
{
'name': "Online Event's Tickets",
'category': 'Hidden',
'summary': "Sell Your Event's Tickets",
'website': 'https://www.odoo.com/page/events',
'version': '1.0',
'description': """
Online Event's Tickets
======================
""",
'author': 'OpenERP SA',
'depends': ['website_event', 'event_sale', 'website_sale'],
'data': [
'views/website_event_sale.xml',
'security/ir.model.access.csv',
'security/website_event_sale.xml',
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
'auto_install': True
}
|
gregorynicholas/raven-python | refs/heads/master | raven/contrib/django/raven_compat/__init__.py | 41 | """
raven.contrib.django.raven_compat
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from raven.contrib.django import * # NOQA
|
adrienbrault/home-assistant | refs/heads/dev | homeassistant/components/ozw/fan.py | 6 | """Support for Z-Wave fans."""
import math
from homeassistant.components.fan import (
DOMAIN as FAN_DOMAIN,
SUPPORT_SET_SPEED,
FanEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util.percentage import (
int_states_in_range,
percentage_to_ranged_value,
ranged_value_to_percentage,
)
from .const import DATA_UNSUBSCRIBE, DOMAIN
from .entity import ZWaveDeviceEntity
SUPPORTED_FEATURES = SUPPORT_SET_SPEED
SPEED_RANGE = (1, 99) # off is not included
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Fan from Config Entry."""
@callback
def async_add_fan(values):
"""Add Z-Wave Fan."""
fan = ZwaveFan(values)
async_add_entities([fan])
hass.data[DOMAIN][config_entry.entry_id][DATA_UNSUBSCRIBE].append(
async_dispatcher_connect(hass, f"{DOMAIN}_new_{FAN_DOMAIN}", async_add_fan)
)
class ZwaveFan(ZWaveDeviceEntity, FanEntity):
"""Representation of a Z-Wave fan."""
async def async_set_percentage(self, percentage):
"""Set the speed percentage of the fan."""
if percentage is None:
# Value 255 tells device to return to previous value
zwave_speed = 255
elif percentage == 0:
zwave_speed = 0
else:
zwave_speed = math.ceil(percentage_to_ranged_value(SPEED_RANGE, percentage))
self.values.primary.send_value(zwave_speed)
async def async_turn_on(
self, speed=None, percentage=None, preset_mode=None, **kwargs
):
"""Turn the device on."""
await self.async_set_percentage(percentage)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
self.values.primary.send_value(0)
@property
def is_on(self):
"""Return true if device is on (speed above 0)."""
return self.values.primary.value > 0
@property
def percentage(self):
"""Return the current speed.
The Z-Wave speed value is a byte 0-255. 255 means previous value.
The normal range of the speed is 0-99. 0 means off.
"""
return ranged_value_to_percentage(SPEED_RANGE, self.values.primary.value)
@property
def speed_count(self) -> int:
"""Return the number of speeds the fan supports."""
return int_states_in_range(SPEED_RANGE)
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORTED_FEATURES
|
PanDAWMS/panda-server | refs/heads/master | pandaserver/taskbuffer/SupErrors.py | 1 | # Supplemental error codes
# Error values must be less than 1000
class SupErrors (object):
error_codes = {
'SUCCEEDE': 0,
'INVALID_BATCH_ID': 1,
'WORKER_ALREADY_DONE': 2,
}
|
zsiciarz/django | refs/heads/master | tests/migrations/test_migrations_initial_false/0001_not_initial.py | 21 | from django.db import migrations, models
class Migration(migrations.Migration):
initial = False
operations = [
migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=255)),
("slug", models.SlugField(null=True)),
("age", models.IntegerField(default=0)),
("silly_field", models.BooleanField(default=False)),
],
),
migrations.CreateModel(
"Tribble",
[
("id", models.AutoField(primary_key=True)),
("fluffy", models.BooleanField(default=True)),
],
),
migrations.AlterUniqueTogether(
name='author',
unique_together=set([('name', 'slug')]),
),
]
|
mozilla/inventory | refs/heads/master | core/network/migrations/0001_initial.py | 3 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Network'
db.create_table('network', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('vlan', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vlan.Vlan'], null=True, on_delete=models.SET_NULL, blank=True)),
('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['site.Site'], null=True, on_delete=models.SET_NULL, blank=True)),
('ip_type', self.gf('django.db.models.fields.CharField')(max_length=1)),
('ip_upper', self.gf('django.db.models.fields.BigIntegerField')(blank=True)),
('ip_lower', self.gf('django.db.models.fields.BigIntegerField')(blank=True)),
('network_str', self.gf('django.db.models.fields.CharField')(max_length=49)),
('prefixlen', self.gf('django.db.models.fields.PositiveIntegerField')()),
('dhcpd_raw_include', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('network', ['Network'])
# Adding unique constraint on 'Network', fields ['ip_upper', 'ip_lower', 'prefixlen']
db.create_unique('network', ['ip_upper', 'ip_lower', 'prefixlen'])
# Adding model 'NetworkKeyValue'
db.create_table('network_key_value', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('key', self.gf('django.db.models.fields.CharField')(max_length=255)),
('value', self.gf('django.db.models.fields.CharField')(max_length=255)),
('is_option', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_statement', self.gf('django.db.models.fields.BooleanField')(default=False)),
('has_validator', self.gf('django.db.models.fields.BooleanField')(default=False)),
('network', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['network.Network'])),
))
db.send_create_signal('network', ['NetworkKeyValue'])
# Adding unique constraint on 'NetworkKeyValue', fields ['key', 'value', 'network']
db.create_unique('network_key_value', ['key', 'value', 'network_id'])
def backwards(self, orm):
# Removing unique constraint on 'NetworkKeyValue', fields ['key', 'value', 'network']
db.delete_unique('network_key_value', ['key', 'value', 'network_id'])
# Removing unique constraint on 'Network', fields ['ip_upper', 'ip_lower', 'prefixlen']
db.delete_unique('network', ['ip_upper', 'ip_lower', 'prefixlen'])
# Deleting model 'Network'
db.delete_table('network')
# Deleting model 'NetworkKeyValue'
db.delete_table('network_key_value')
models = {
'network.network': {
'Meta': {'unique_together': "(('ip_upper', 'ip_lower', 'prefixlen'),)", 'object_name': 'Network', 'db_table': "'network'"},
'dhcpd_raw_include': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_lower': ('django.db.models.fields.BigIntegerField', [], {'blank': 'True'}),
'ip_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'ip_upper': ('django.db.models.fields.BigIntegerField', [], {'blank': 'True'}),
'network_str': ('django.db.models.fields.CharField', [], {'max_length': '49'}),
'prefixlen': ('django.db.models.fields.PositiveIntegerField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['site.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'vlan': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vlan.Vlan']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'network.networkkeyvalue': {
'Meta': {'unique_together': "(('key', 'value', 'network'),)", 'object_name': 'NetworkKeyValue', 'db_table': "'network_key_value'"},
'has_validator': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_option': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_statement': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['network.Network']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'site.site': {
'Meta': {'unique_together': "(('name', 'parent'),)", 'object_name': 'Site', 'db_table': "'site'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['site.Site']", 'null': 'True', 'blank': 'True'})
},
'vlan.vlan': {
'Meta': {'unique_together': "(('name', 'number'),)", 'object_name': 'Vlan', 'db_table': "'vlan'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {})
}
}
complete_apps = ['network'] |
Endika/rma | refs/heads/8.0 | crm_rma_lot_mass_return/tests/test_crm_rma_lot_mass_return.py | 3 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright 2015 Vauxoo
# Author: Osval Reyes,
# Yanina Aular
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import date
from openerp.tests.common import TransactionCase
class TestCrmRmaLotMassReturn(TransactionCase):
"""
Test cases for CRM RMA Lot Mass Return Module
"""
def setUp(self):
super(TestCrmRmaLotMassReturn, self).setUp()
self.metasearch_wizard = self.env['returned.lines.from.serial.wizard']
self.partner_id = self.env['res.partner'].browse(
self.ref('base.res_partner_2'))
self.invoice_id, self.lot_ids = self.create_sale_invoice()
self.claim_id = self.env['crm.claim'].\
create({
'name': 'Test',
'claim_type': self.ref('crm_claim_type.'
'crm_claim_type_customer'),
'partner_id': self.invoice_id.partner_id.id,
'pick': True
})
def create_sale_order(self, order_policy='manual'):
sale_order_id = self.env['sale.order'].create({
'partner_id': self.partner_id.id,
'note': 'Sale Order Test',
'order_policy': order_policy,
'payment_term': self.ref('account.account_payment_term'),
'order_line': [(0, 0, {
'name': 'Test',
'product_id': self.ref('product.product_product_8'),
'product_uom_qty': 2
})]
})
sale_order_id.action_button_confirm()
return sale_order_id
def test_01_render_metasearch_view(self):
res = self.claim_id.render_metasearch_view()
self.assertEqual(res['res_model'], self.metasearch_wizard._name)
def test_02_load_products(self):
wizard_id = self.metasearch_wizard.with_context({
'active_model': self.claim_id._name,
'active_id': self.claim_id.id,
'active_ids': [self.claim_id.id]
}).create({})
# Get ids for invoice lines
lines_list_id = wizard_id.onchange_load_products(
self.invoice_id.number +
'*5*description here' + '\n' + self.lot_ids[0].name,
[(6, 0, [])])
lines_list_id = lines_list_id['domain']['lines_list_id'][0][2]
option_ids = wizard_id.onchange_load_products(
self.invoice_id.number, [(6, 0, [])])['value']['option_ids'][0][2]
wizard_id.option_ids = option_ids
wizard_id.lines_list_id = [(6, 0, lines_list_id)]
# the invoice lines are two
self.assertEqual(len(lines_list_id), 2)
# Validate it has exactly as much records as the taken invoice has
self.assertEqual(len(lines_list_id),
int(self.invoice_id.invoice_line.quantity))
wizard_id._set_message()
wizard_id.add_claim_lines()
# Claim record it must have same line count as the invoice
qty = 0
for inv_line in self.invoice_id.invoice_line:
qty += inv_line.quantity
self.assertEqual(len(self.claim_id.claim_line_ids),
int(qty))
def sale_validate_invoice(self, sale):
sale_advance_obj = self.env['sale.advance.payment.inv']
context = {
'active_model': 'sale.order',
'active_ids': [sale.id],
'active_id': sale.id,
}
wizard_invoice_id = sale_advance_obj.with_context(context).create({
'advance_payment_method': 'all',
})
wizard_invoice_id.with_context(context).create_invoices()
invoice_id = sale.invoice_ids[0]
invoice_id.signal_workflow('invoice_open')
# check if invoice is open
self.assertEqual(invoice_id.state, 'open')
pay_account_id = self.env['account.account'].\
browse(self.ref("account.cash"))
journal_id = self.env['account.journal'].\
browse(self.ref("account.bank_journal"))
date_start = date.today().replace(day=1, month=1).strftime('%Y-%m-%d')
period_id = self.env['account.fiscalyear'].search(
[('date_start', '=', date_start)]).period_ids[8]
invoice_id.pay_and_reconcile(
invoice_id.amount_total, pay_account_id.id,
period_id.id, journal_id.id, pay_account_id.id,
period_id.id, journal_id.id,
name="Payment for Invoice")
# in order to proceed is necessary to get the sale order invoiced
# and the invoice paid as well
self.assertTrue(sale.invoiced)
self.assertEqual(invoice_id.state, 'paid')
return invoice_id
def create_sale_invoice(self):
sale_order_id = self.create_sale_order('manual')
lot_ids = []
for picking_id in sale_order_id.picking_ids:
picking_id.force_assign()
# create wizard
wizard_id = self.env['stock.transfer_details'].create({
'picking_id': picking_id.id,
})
# make the transfers
for move_id in picking_id.move_lines:
wizard_item_id = self.env['stock.transfer_details_items'].\
create({
'transfer_id': wizard_id.id,
'product_id': move_id.product_id.id,
'quantity': move_id.product_qty,
'sourceloc_id': move_id.location_id.id,
'destinationloc_id':
self.ref('stock.stock_location_stock'),
'lot_id': False,
'product_uom_id': move_id.product_uom.id,
})
lot_id = self.env['stock.production.lot'].create({
'product_id': move_id.product_id.id,
'name': 'Test Lot %s%s' % (move_id.id,
move_id.product_id.id)
})
# keep lot_id for later check
lot_ids.append(lot_id)
wizard_item_id.write({
'lot_id': lot_id.id
})
wizard_id.do_detailed_transfer()
# Before continue, invoice must be open to get a number value
# and this is needed by the wizard
invoice_id = self.sale_validate_invoice(sale_order_id)
return invoice_id, lot_ids
|
yhfudev/docsis3ns3 | refs/heads/master | src/nix-vector-routing/bindings/callbacks_list.py | 331 | callback_classes = [
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
pirate/bookmark-archiver | refs/heads/master | archivebox/cli/__init__.py | 1 | __package__ = 'archivebox.cli'
__command__ = 'archivebox'
import os
import sys
import argparse
from typing import Optional, Dict, List, IO, Union
from pathlib import Path
from ..config import OUTPUT_DIR, check_data_folder, check_migrations
from importlib import import_module
CLI_DIR = Path(__file__).resolve().parent
# these common commands will appear sorted before any others for ease-of-use
meta_cmds = ('help', 'version') # dont require valid data folder at all
main_cmds = ('init', 'config', 'setup') # dont require existing db present
archive_cmds = ('add', 'remove', 'update', 'list', 'status') # require existing db present
fake_db = ("oneshot",) # use fake in-memory db
display_first = (*meta_cmds, *main_cmds, *archive_cmds)
# every imported command module must have these properties in order to be valid
required_attrs = ('__package__', '__command__', 'main')
# basic checks to make sure imported files are valid subcommands
is_cli_module = lambda fname: fname.startswith('archivebox_') and fname.endswith('.py')
is_valid_cli_module = lambda module, subcommand: (
all(hasattr(module, attr) for attr in required_attrs)
and module.__command__.split(' ')[-1] == subcommand
)
def list_subcommands() -> Dict[str, str]:
"""find and import all valid archivebox_<subcommand>.py files in CLI_DIR"""
COMMANDS = []
for filename in os.listdir(CLI_DIR):
if is_cli_module(filename):
subcommand = filename.replace('archivebox_', '').replace('.py', '')
module = import_module('.archivebox_{}'.format(subcommand), __package__)
assert is_valid_cli_module(module, subcommand)
COMMANDS.append((subcommand, module.main.__doc__))
globals()[subcommand] = module.main
display_order = lambda cmd: (
display_first.index(cmd[0])
if cmd[0] in display_first else
100 + len(cmd[0])
)
return dict(sorted(COMMANDS, key=display_order))
def run_subcommand(subcommand: str,
subcommand_args: List[str]=None,
stdin: Optional[IO]=None,
pwd: Union[Path, str, None]=None) -> None:
"""Run a given ArchiveBox subcommand with the given list of args"""
subcommand_args = subcommand_args or []
if subcommand not in meta_cmds:
from ..config import setup_django
cmd_requires_db = subcommand in archive_cmds
init_pending = '--init' in subcommand_args or '--quick-init' in subcommand_args
if cmd_requires_db:
check_data_folder(pwd)
setup_django(in_memory_db=subcommand in fake_db, check_db=cmd_requires_db and not init_pending)
if cmd_requires_db:
check_migrations()
module = import_module('.archivebox_{}'.format(subcommand), __package__)
module.main(args=subcommand_args, stdin=stdin, pwd=pwd) # type: ignore
SUBCOMMANDS = list_subcommands()
class NotProvided:
pass
def main(args: Optional[List[str]]=NotProvided, stdin: Optional[IO]=NotProvided, pwd: Optional[str]=None) -> None:
args = sys.argv[1:] if args is NotProvided else args
stdin = sys.stdin if stdin is NotProvided else stdin
subcommands = list_subcommands()
parser = argparse.ArgumentParser(
prog=__command__,
description='ArchiveBox: The self-hosted internet archive',
add_help=False,
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
'--help', '-h',
action='store_true',
help=subcommands['help'],
)
group.add_argument(
'--version',
action='store_true',
help=subcommands['version'],
)
group.add_argument(
"subcommand",
type=str,
help= "The name of the subcommand to run",
nargs='?',
choices=subcommands.keys(),
default=None,
)
parser.add_argument(
"subcommand_args",
help="Arguments for the subcommand",
nargs=argparse.REMAINDER,
)
command = parser.parse_args(args or ())
if command.version:
command.subcommand = 'version'
elif command.help or command.subcommand is None:
command.subcommand = 'help'
if command.subcommand not in ('help', 'version', 'status'):
from ..logging_util import log_cli_command
log_cli_command(
subcommand=command.subcommand,
subcommand_args=command.subcommand_args,
stdin=stdin,
pwd=pwd or OUTPUT_DIR
)
run_subcommand(
subcommand=command.subcommand,
subcommand_args=command.subcommand_args,
stdin=stdin,
pwd=pwd or OUTPUT_DIR,
)
__all__ = (
'SUBCOMMANDS',
'list_subcommands',
'run_subcommand',
*SUBCOMMANDS.keys(),
)
|
jideobs/twilioAngular | refs/heads/master | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/mbcharsetprober.py | 2923 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
class MultiByteCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mDistributionAnalyzer = None
self._mCodingSM = None
self._mLastChar = [0, 0]
def reset(self):
CharSetProber.reset(self)
if self._mCodingSM:
self._mCodingSM.reset()
if self._mDistributionAnalyzer:
self._mDistributionAnalyzer.reset()
self._mLastChar = [0, 0]
def get_charset_name(self):
pass
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mDistributionAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
return self._mDistributionAnalyzer.get_confidence()
|
avlach/univbris-ocf | refs/heads/ofelia.opticaldevelopment | expedient/src/python/expedient/common/permissions/models.py | 4 | '''
Created on May 28, 2010
@author: jnaous
'''
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.generic import GenericForeignKey
from django.contrib.auth.models import User
from expedient.common.permissions.exceptions import PermissionCannotBeDelegated
from expedient.common.permissions.managers import ExpedientPermissionManager,\
ObjectPermissionManager, PermitteeManager, PermissionOwnershipManager
import logging
logger = logging.getLogger("permissions.models")
class ExpedientPermission(models.Model):
"""
This class holds all instances of L{ObjectPermission} that have a
particular name. L{ObjectPermission} links permittees to a particular
object.
A permission may optionally have a view where the browser should be
redirected if the permission is missing (for example to request the
permission). The view is specified by its full path in C{view}.
The signature for the view function should be the following::
missing_perm_view(
request, permission, permittee, target_obj_or_class,
redirect_to=None)
- C{permission} is the missing L{ExpedientPermission}.
- C{permittee} is the object that needs to exercise the permission.
- C{target_obj_or_class} is the object or class whose permission is
missing.
- C{redirect_to} is the URL from which the request was made.
One limitation of the system right now is that we can only link to objects
that use a C{PositiveIntegerField} as the object ID.
@cvar objects: a L{ExpedientPermissionManager} instance.
@type objects: L{ExpedientPermissionManager}
@ivar name: The permission's name
@type name: C{str}
@ivar description: Information about the permission.
@type description: C{str}
@ivar view: The full path to the view for the permission
@type view: C{str}
@ivar object_permissions: Per-object permissions with this permission name.
@type object_permissions: m2m relationship to L{ObjectPermission}.
"""
objects = ExpedientPermissionManager()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(blank=True, default="")
view = models.CharField("Permission View", max_length=300,
blank=True, null=True)
def __unicode__(self):
return "perm name: %s, desc: %s, view: %s" % (
self.name, self.description, self.view)
class ObjectPermission(models.Model):
"""
Links a permission to its object using the C{contenttypes} framework and
to the set of permittees holding the permission.
@cvar objects: L{ObjectPermissionManager} for the class.
@ivar permission: The L{ExpedientPermission} of which this
L{ObjectPermission} is a sort of instance.
@type permission: L{ExpedientPermission}
@ivar object_type: The C{ContentType} indicating the class of the target.
@type object_type: ForeignKey to C{ContentType}
@ivar object_id: The id of the target.
@type object_id: positive C{int}
@ivar target: the object for this target.
@type target: varies
@ivar permittees: many-to-many relationship to permittees who own
the object permission
@type permittees: C{ManyToManyField} to L{Permittee} through
L{PermissionOwnership}
"""
objects = ObjectPermissionManager()
permission = models.ForeignKey(ExpedientPermission)
object_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
target = GenericForeignKey("object_type", "object_id")
permittees = models.ManyToManyField(
"Permittee", through="PermissionOwnership")
def __unicode__(self):
return u"%s object permission for %s" % (self.permission.name,
self.target)
class Meta:
unique_together = (
("permission", "object_type", "object_id"),
)
def give_to(self, receiver, giver=None, can_delegate=False):
"""
Give permission ownership to an object. This method also checks that
the action is allowed (the C{giver} can actually give the permission
to C{receiver}).
@param receiver: The permittee receiving the permission. If not a
L{Permittee} instance, one will be created (if not found).
@type receiver: L{Permittee} or C{Model} instance.
@keyword giver: The permission owner giving the permission. If not a
L{Permittee} instance, one will be created (if not found).
Defaults to C{None}.
@type giver: L{Permittee} or C{Model} instance.
@keyword can_delegate: Can the receiver in turn give the permission
out? Default is False.
@type can_delegate: L{bool}
@return: The new C{PermissionOwnership} instance.
"""
# Is someone delegating the permission?
if giver:
giver = Permittee.objects.get_as_permittee(giver)
# check that the giver can give ownership
can_give = Permittee.objects.filter_for_obj_permission(
# self, can_delegate=True).filter(
# No need to find for some one that CAN actually delegate...
# Permissions already take care of this.
self, can_delegate=False).filter(
id=giver.id).count() > 0
if not can_give:
raise PermissionCannotBeDelegated(
giver, self.permission.name)
receiver = Permittee.objects.get_as_permittee(receiver)
# All is good get or create the permission.
po, created = PermissionOwnership.objects.get_or_create(
obj_permission=self,
permittee=receiver,
defaults=dict(can_delegate=can_delegate),
)
# Don't change the can_delegate option if the permission was already
# created unless if it's to enable it. We don't want people
# taking each other's delegation capabilities (e.g. from owner)
if po.can_delegate != can_delegate and (created or \
(not created and can_delegate)):
po.can_delegate = can_delegate
po.save()
return po
class Permittee(models.Model):
"""
Links permissions to their owners using the C{contenttypes} framework,
where permittees are not necessarily C{django.contrib.auth.models.User}
instances.
@cvar objects: L{PermitteeManager} for the class
@ivar object_type: The C{ContentType} indicating the class of the permittee.
@type object_type: ForeignKey to C{ContentType}
@ivar object_id: The id of the permittee.
@type object_id: positive C{int}
@ivar object: the object for this permittee.
@type object: varies
"""
objects = PermitteeManager("object_type", "object_id")
object_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
object = GenericForeignKey("object_type", "object_id")
def __unicode__(self):
return u"%s" % self.object
class Meta:
unique_together=(("object_type", "object_id"),)
class PermissionOwnership(models.Model):
"""
Information on what the permittee can do with the permission.
@cvar objects: L{PermissionOwnershipManager} for the class
@ivar obj_permission: the object permission for this info.
@type obj_permission: ForeignKey to L{ObjectPermission}
@ivar permittee: the permittee for this info.
@type permittee: ForeignKey to L{Permittee}
@ivar can_delegate: Can the permittee give this permission to someone else?
@type can_delegate: C{bool}
"""
objects = PermissionOwnershipManager()
obj_permission = models.ForeignKey(ObjectPermission)
permittee = models.ForeignKey(Permittee)
can_delegate = models.BooleanField()
def __unicode__(self):
return u"%s - %s: Delegatable is %s" % (self.obj_permission,
self.permittee,
self.can_delegate)
class Meta:
unique_together=(("obj_permission", "permittee"),)
class PermissionRequest(models.Model):
"""
A request from a C{auth.models.User} on behalf of a C{Permittee} to
obtain some permission for a particular target.
@ivar requesting_user: the user requesting the permission be given to
C{permittee}.
@type requesting_user: C{django.contrib.auth.models.User}
@ivar permittee: The object that will receive the permission if granted.
@type permittee: L{Permittee}
@ivar permission_owner: The owner who should grant the permission.
@type permission_owner: C{django.contrib.auth.models.User}
@ivar requested_permission: The permission requested.
@type requested_permission: L{ObjectPermission}
@ivar message: a message to the permission owner.
@type message: C{str}
"""
requesting_user = models.ForeignKey(
User, related_name="sent_permission_requests")
permittee = models.ForeignKey(Permittee)
permission_owner = models.ForeignKey(
User, related_name="received_permission_requests")
requested_permission = models.ForeignKey(ObjectPermission)
message = models.TextField(default="", blank=True)
def allow(self, can_delegate=False):
self.requested_permission.give_to(
self.permittee,
giver=self.permission_owner,
can_delegate=can_delegate)
self.delete()
def deny(self):
self.delete()
|
siosio/intellij-community | refs/heads/master | python/testData/inspections/PyStringFormatInspection/NewStylePackedReference.py | 29 | ref = {"fst": 1, "snd": 2}
print "first is {fst}, second is {snd}".format(**ref)
<warning descr="Too few arguments for format string">"{}"</warning>.format() |
liveperson/Auto-Merger | refs/heads/master | src/merger/svn/svnutils.py | 1 | # Copyright (c) 2012 Liveperson. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Liveperson. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
General svn utilities, used heavily as auto merger works with svn.
"""
from merger.conf import mergeconf
from merger.conf.mergeconf import M_SHU, TMPL_COMMIT_LOG_MSG, TMPDIR, NA
from merger.utils.shellutils import ShellUtils
import logging
import os
import re
COMMIT_TMPL = 'svn commit %s -F %s --username %s --password %s'
CLEANUP_TMPL = 'svn cleanup %s/%s --username %s --password %s'
REVERT_TMPL = 'svn revert -R %s/%s --username %s --password %s'
UPDATE_TMPL = 'svn update --force %s/%s --username %s --password %s'
CO_TMPL = 'svn co %s %s/%s --force --username %s --password %s'
MERGE_TMPL = '''svn merge --non-interactive -r %s:%s %s %s/%s
+ '--username %s --password %s'''
LOG_TMPL = 'svn log %s -r %s:%s'
LOG_URL_TMP = 'svn log -v %s -r %s:%s --username %s --password %s %s %s'
LOOK_CHANGED_TMPL = 'svnlook changed --revision %s %s'
LOOK_AUTH_TMPL = 'svnlook author --revision %s %s'
LOOK_LOG_TMPL = 'svnlook log --revision %s %s'
MESSAGE_ABORT_COMMIT = 'Aborting commit'
MESSAGE_SUCCESSFUL_COMMIT = 'Committed revision'
def get_commit_log_message(repo, rev):
"""
Here we compute the commit log message after merge.
In this commit log message we will include details about
the merge that was performed, who was original committer etc.
Args:
repo: Repository on which the original commit was perforemd.
rev: The revision that was originally committed from it the
merge was performed.
Returns:
The message which will be provided to the commit.
"""
svnlook_log_cmd = LOOK_LOG_TMPL % (rev, repo)
mergeconf.LOGGER.debug('svn look log command: %s ' % svnlook_log_cmd)
message = M_SHU.runshellcmd(svnlook_log_cmd)
mergeconf.LOGGER.debug('log result: %s ' % message)
return message
def get_files_by_log(log):
"""
Return the list of files from svn log -v
Arguments:
url: The log produced by svn log -v
Returns:
A list of file paths which were updated by the svn log verbose message.
"""
fileslines = []
for line in log.splitlines():
if (line.strip().startswith('A ') or line.strip().startswith('D ')
or line.strip().startswith('M ') or line.strip().startswith('R ')):
fileslines.append(line)
return fileslines
class SVNCmdParams:
"""
Parameters for running various svn command.
"""
def __init__(self, **kwargs):
prop_defaults = {
"username": None,
"password": None,
"tmpdir": None,
"logger": None,
"url": None,
"startdate": None,
"enddate": None,
"isxml": False,
"stoponcopy": False
}
self.__dict__.update(prop_defaults)
self.__dict__.update(kwargs)
self.username = self.__dict__["username"]
self.password = self.__dict__["password"]
self.tmpdir = self.__dict__["tmpdir"]
self.logger = self.__dict__["logger"]
self.url = self.__dict__["url"]
self.startdate = self.__dict__["startdate"]
self.enddate = self.__dict__["enddate"]
self.isxml = self.__dict__["isxml"]
self.stoponcopy = self.__dict__["stoponcopy"]
class SVNUtils:
"""
General svn utilities, used heavily as auto merger works with svn.
"""
def __init__(self, svn_cmd_params):
self.logger = svn_cmd_params.logger
self.tmpdir = svn_cmd_params.tmpdir
self.username = svn_cmd_params.username
self.password = svn_cmd_params.password
self.shellutils = ShellUtils(svn_cmd_params.logger)
def log(self, url, startdate, enddate, isxml=False, stoponcopy=False):
"""
Run svn log commmand.
"""
logcommand = LOG_URL_TMP % (url, '{' + startdate + '}', '{' + enddate + '}', self.username, self.password,
' --xml' if isxml else '', ' --stop-on-copy' if stoponcopy else '')
logging.info(logcommand)
return M_SHU.runshellcmd(logcommand)
def get_log_message(self, url, rev):
"""
Get commit log message by url and revision
Arguments:
url: Url to get commit log message for.
rev: Revision to get the commit log message for the url above.
Returns:
The message which was committed on revision rev on url specified.
"""
mergeconf.LOGGER.debug("Commit log message for url [%s] with rev [%s]" % (url, rev))
log_cmd = (LOG_URL_TMP % (url, rev, rev, self.username, self.password))
return M_SHU.runshellcmd(log_cmd)
def commit(self, fileordir_to_commit, message, rev):
"""
Commit files into svn repository with message specified.
Arguments:
fileordir_to_commit: The working directory (or file) to commit to svn - path to them.
message: The message to commit with (-m)
rev: We are going to write an internal temporary file with the message
we will use this rev for its naming convention.
Returns: The message response from svn server.
"""
mergeconf.LOGGER.debug("Committing file/dir %s with message %s" %
(fileordir_to_commit, message))
message_file_name = TMPL_COMMIT_LOG_MSG % (self.tmpdir, rev)
message_file_name_org = message_file_name + '.org'
mergeconf.LOGGER.debug("Creating message file: %s" %
(message_file_name_org))
message_file = open(message_file_name_org, 'w')
try:
message_file.write(message)
message_file.close()
svn_commit_merged_cmd = (COMMIT_TMPL %
(fileordir_to_commit, message_file_name,
self.username, self.password))
initial_msg_file = open(message_file_name_org, 'rb')
msg_file = open(message_file_name, 'wb')
for line in initial_msg_file:
line = line.replace('\r', '')
line = line.replace('\n', '')
if line != '':
msg_file.write(line + '\n')
initial_msg_file.close()
msg_file.close()
result = self.shellutils.runshellcmd(svn_commit_merged_cmd)
mergeconf.LOGGER.debug(svn_commit_merged_cmd)
finally:
os.remove(message_file_name)
mergeconf.LOGGER.debug('returning result: ' + result)
return result
def update_local_workbranch(self, branch_dir):
"""Does everything it can to verify the current
working branch which is being used for merges is updated
and synced with svn so we can perform the merge
on this local branch.
Args:
merge_to_branch: The branch name we want to merge a commit into.
user: Auto merging will use this svn username to perform the merge.
pass: Auto merging will use this svn password to perform the merge.
Returns:
Nothing, local working branch will be updated in order to perform the auto merge.
"""
M_SHU.runshellcmd(CLEANUP_TMPL % (TMPDIR, branch_dir, self.username, self.password))
M_SHU.runshellcmd(REVERT_TMPL % (TMPDIR, branch_dir, self.username, self.password))
M_SHU.runshellcmd(UPDATE_TMPL % (TMPDIR, branch_dir, self.username, self.password))
def checkout(self, url, tolocation):
"""Given a branch url check it out to the local disk.
Args:
url: The branch to check out.
tolocation: Location on disk to check out to.
svn_username: Auto merging will use this svn username to perform the checkout.
svn_password: Auto merging will use this svn password to perform the checkout.
Returns:
Nothing, the branch specified in url will be checked out to local disk.
"""
checkout_cmd = CO_TMPL % (url, TMPDIR, tolocation + '/', self.username, self.password)
M_SHU.runshellcmd(checkout_cmd) # branch to merge to does not exist in disk, check it out from svn.
def merge_to_branch(self, revstart, revend=None, merge_from_url=None,
merge_to_branch=None):
"""Given a branch url merge it into a local branch.
Args:
merge_from_url: The url to merge from.
merge_to_branch: The branch to merge to.
revstart: Rev to merge from.
revend: If not specified only revstart is merged otherwise merging a range of revisions.
svn_username: Auto merging will use this svn username to perform the merge.
svn_password: Auto merging will use this svn password to perform the merge.
Returns:
Nothing, The branch will be synced on disk.
"""
mergeconf.LOGGER.debug('\nMerging...')
prev_rev = int(revstart) - 1 # In svn when merging single revision then merging from merge_rev - 1 to merge_rev
prev_rev_as_str = str(prev_rev)
if revend is None:
rev_as_str = str(revstart)
else:
rev_as_str = str(revend)
svn_merge_cmd = MERGE_TMPL % (prev_rev_as_str, rev_as_str, merge_from_url, TMPDIR, get_branch_dir
(merge_to_branch), self.username, self.password)
mergeconf.LOGGER.debug('merge cmd: : ' + svn_merge_cmd)
merge_result = M_SHU.runshellcmd(svn_merge_cmd)
mergeconf.LOGGER.debug('merge result:' + merge_result)
mergeconf.LOGGER.debug('merge cmd: : ' + svn_merge_cmd)
return rev_as_str
def get_branch_dir(branch_name):
"""
Extract branch directory to do local temporal work form branch name.
For example if we have multiple projects which have the same ending
branch name projecta/1.0, projectb/1.0 then store the folders as
projecta_1.0 and projectb_1.0 for temporal work folder for merging.
Args:
branch_name: The branch name to compute the branch working folder.
Returns:
Working branch dir on disk.
"""
return branch_name.replace('/', '_')
def get_branch_col(svn_look_line, branches_map):
"""
For each project we have a list of branches which should be merged for it.
this method will take the svn_look_line returned by svn look command
and return the relevant branches map for it.
Args:
svn_look_line: Will deduce the relevant branches and project for this svn look line.
BRANCHES_MAP: All projects with all their assigned branches.
Returns:
The relevant branches for the provided svn look line.
"""
for branches_col in branches_map:
for branch in branches_map[branches_col]:
if svn_look_line.find(branch) != -1:
return branches_map[branches_col]
def get_branch_by_look(svn_look_line, BRANCHES_MAP):
"""
Example: for Branches/myapp/ver1/src/main/java/myfile.java will return Branches/myapp/ver1
Args:
svn_look_line: Will deduce the relevant branches and project for this svn look line.
BRANCHES_MAP: All projects with all their assigned branches.
Returns:
The branch path for the provided svn look line.
"""
mergeconf.LOGGER.debug('get_branch_by_look: svn_look_line: ' + svn_look_line + ', BRANCHES_MAP: ' + str(BRANCHES_MAP))
for branches_col in BRANCHES_MAP:
for branch in BRANCHES_MAP[branches_col]:
mergeconf.LOGGER.debug('get_branch_by_look: svn_look_line: ' + svn_look_line + ', branch: ' + branch)
if svn_look_line.find(branch) != -1:
return branch
def get_branch_url(name):
"""
Based on a branch name give its full url into svn.
Args:
name: The branch name (as provided in configuration branches).
Returns:
The branch full svn url.
"""
return mergeconf.BASE_REPOSITORY_PATH + '/' + name
def get_next_branch(svn_look_line, branches_map):
"""
Get the actual branch that comes next to committed one - the branch to merge to.
Args:
svn_look_line: svn look line for a commit.
BRANCHES_MAP: A map containing sequence of branches per project.
Returns:
The next branch name to do the auto merge into.
"""
branches_col = get_branch_col(svn_look_line, branches_map)
for index, branch in enumerate(branches_col):
if (svn_look_line.find(branch) != -1):
if len(branches_col) > (index + 1):
return branches_col[index + 1]
return None
def get_commit_rev_by_resp(commit_resp):
"""After performing a commit svn returns to stdout its committed version,
parse it in order to get the new committed version number.
Args:
commit_resp: The stdout response out of commit command to svn
in case failed to find commit revision returns NA
Returns:
The committed revision id.
"""
rev = NA
if commit_resp is not None:
searchresult = re.compile(r"Committed revision (.*?)\.",
re.DOTALL | re.MULTILINE).search(commit_resp)
if searchresult is not None:
results = searchresult.groups()
if len(results) > 0:
rev = results[0]
return rev
svnparams = SVNCmdParams(username=mergeconf.SVN_USERNAME, password=mergeconf.SVN_PASSWORD, tmpdir=TMPDIR, logger=mergeconf.LOGGER)
SVNUTILS = SVNUtils(svnparams)
|
aruizramon/alec_erpnext | refs/heads/master | erpnext/patches/v4_0/countrywise_coa.py | 120 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Cash'
where account_type='Bank or Cash' and account_name in ('Cash', 'Cash In Hand')""")
frappe.db.sql("""update tabAccount set account_type='Stock'
where account_name = 'Stock Assets'""")
ac_types = {"Fixed Asset Account": "Fixed Asset", "Bank or Cash": "Bank"}
for old, new in ac_types.items():
frappe.db.sql("""update tabAccount set account_type=%s
where account_type=%s""", (new, old))
try:
frappe.db.sql("""update `tabAccount` set report_type =
if(is_pl_account='Yes', 'Profit and Loss', 'Balance Sheet')""")
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
except:
pass
|
pranavk/xhtml2pdf | refs/heads/master | test/linkloading.py | 154 | # -*- coding: utf-8 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "$Revision: 194 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2008-04-18 18:59:53 +0200 (Fr, 18 Apr 2008) $"
import ho.pisa as pisa
import logging
log = logging.getLogger(__file__)
def dummyLoader(name):
return '\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00F\x00\x00\x00\x89\x04\x03\x00\x00\x00c\xbeS\xd6\x00\x00\x000PLTE\x00\x00\x00\n\x06\x04\x18\x14\x0f-&\x1eLB6w`E\x8f\x80q\xb2\x9c\x82\xbe\xa1{\xc7\xb0\x96\xd1\xbd\xa9\xd9\xd0\xc6\xef\xeb\xe6\xf8\xf3\xef\xff\xfb\xf7\xff\xff\xffZ\x83\x0b|\x00\x00\x0c\xedIDATx^u\x97]l\x1bWv\xc7g\xe2`\x81\xbe\xcd%Gr\xd3\xa7P\x12e\xb7\x01\x8a\xd0")E\x01\x02\x8f\xf8!\x8bI\x17\x10\xc5!))5`\xf1C\xb4\xb25`S\xb2l\xb95\x90H\xa4.\xb9/u$K3\xe3\xa2\x80W\x12\xc59L\xf6a\xb3\x8dcN\xd6@\xb7\x1f\x01\x8a\x85\x16\x9b-\xfa\x81M\xb8@\x83l\xd1\xd8\xbc|)\xd0\x97\x82\xea\xb93\x92\xec"\xce\x11 \t3?\xfe\xcf\xff\x9e{\xce\x01(\x1c>7\x18\xfb\xc2\xfaE\xffk_\xb6\x18\xeb\x1e>\x8f\xe92d\xfe%T\xa8\x98\xfa\x07\x1f $<\x0f\xe1\x91\xabT\xc1\xacT\xf2\xbfd\xec\xbb\x98\xdfM\xeb\x86aYP\xfa\xd3\xd6\xf3\x98C[\xa6\xaaU\xa1a5\xe9\x1b\xad\xef\xd0i}\x91\xccy+\xc8X\xf5E\xf6]:\xff0\xd8\x97\xce7\xb9P\xf1\xd1\xb7\x98\xaec\xe7/\xd3\xa1\xeb\x81{\x96e5\xd7.\xb6\x85\xe7\x99aO\x94\xf1R(\xfeC\xce\xd4F\xbf\xc50\x1b\xfa\xefS\xa9\xb2\x12p\x98({\x8eN\x9b\xb1\xbf\xf5O\xa5\xd7\x0b\xb4\xc9\x0f\x96\xec<G\xa7\xc5\x1e\xbf\xfa\xe2b\x90\x16\xb2\x00\x96E\x93O\x9e\xe7\xe77\x8b\xd2@ \xa3\xa7\x96\xe6\r\xab\xb9\x97\xfc\xf6\xb90WV\x0e\x8d(\xa1\xa5dd*\x06PL\xa2\xe7g\xdfw\xba\xe8\xe6o\x06\xc6\xd5\x80\xc7\xe5s\xbb|\xbd\x91\xd2\xb9 \x13\x9e1\xc2\x13\xb5\xfeN\rn\xa5\xd5a\xc5+\xe7\xb7\xf5\xa2\xcbC\xde>a\x9c\xd2\xb5\xad\x07\xdbS\x0b\xb0\xa5z\xeb\x94\xd2y\x80kD\xee<e\x10h\x7fs]\xf4g\xa7\x01\xb6\x12\x91z\xa9P\x8a\\\xcfg\xfdQ\xf6\x0c\x83\xb1CD?\x05\x80\xf2\xa4;z)\xb8\x11\xf1\x11\xf7\xe5\x8b\x9d\xff\xcf\\\x92H\x846\x80f\x91Ys/\x11\xe2r\x85\xfe\x98u\x9e\xf5\xf3_\x1eB\xd2U\x00\x9a\xf3\xc9\xc92\xb9\xbc\xbc\xec\x93N?:\xce\xd59\xect\xdb\xec_\xbdC\xa4\x1f\x99\xb9\x81\x97\xddj\xb9g\x8c\xf4\xaf\xe8\x8f\xba\xc8\x1cwy\xbb\xd3\xb8\xab.\xfb\x0bU\xd03S\xa2\xac\x96\x03k\xe1\x02\xe4\x19\xbe\x12N\xcc|3<U\xd8O\x02\xd4iQ\x12\\j\x81R\x80\xbd\x14\x16\xed\x88\xc1\xfavw&\x02isj\xa2\xa9\xd1\x12\x91\xc4\xfe$\xa5\xe1\xbc\xf2f\xbbs\xcc \xc2\xb2\xc6\xcd\xec\xe8\xfe\xa2\x05\xb4F$A\x0c\x94\n\xee\x9b\xc5\xec_\xb3\xa7\x0c\xfb\xf7q\xad\xb2\xb6b5?h\xea\xe6$\x11\t\xe9\xebs\r\xbdv\xf5\xf6\t\xd3a\xec#5\xb8\x9c\x08\xdf\xb4\xc0J\xc1\x9a$\x11\x7f8\x1c\x01\xb8\xf4\x17\xec\xb0s\xe29\x93\x18\x08\xa5\xcc\xa4eA\xaep\xd7#\xca\xa0\xeb\xd7o\xd5\x8a\xb7\x19;a:.\x1f\x11\xdd7\x1b8R\xcb\x83\xf5\xac<\xbf\x1e.,\xce~<\xff\xe3N\x9b\x1d3m\x0f\xea\x8b\x85{\xd6\xa7\xd6\xc3\xf8e}\xd9\xdc C\xd1\xd9f\xfe\x9d\x16;f\xba\x7f/\x12A\x10\xce\xe2\x88[\xffT\x9a\x99\xc8\x0co\xf5\xf5\x05g\xad\xda\x0fX\xeb\xa4\xceqQ\x10$\xb1\xb7\xd2@\xa86x\x7f8>h._\x9dh4\x8d\xa7:\x8f#X\x13At\xdb3nF\xee\xc8\x19wV^\xf4\x1b\xd6\xdc\xed\x13\xe6w\x01I\x90\x90\xa1F\x05\x99\xdc}B\x88(\x87}\xb7\xac\xda\x99\x13\xe6\xa7\xa1\xf3\x02fs\xa5)\xbd\xd70\r\xceH"\x91\xc2\x15\xc8\x1e\x9f\xbd\xbd\x17\xf7\x8b\x04m\x07\xd2\xb4\x02\xc8 !\xcf\xe1\x83\x0b\xc6\x9d+\\\x87u;\xedl\xdc{^\x12\x05\x89$\x0b\xd40\xef\x12\tu\xd2\x99!\xec\xc4\xab\x17\x8f\x98\xc7/\xc6\x07\xc6$;\xc1YZ\xd1+\n\x11E\x12\xa0\xe0\x1b\x18G\xd3\x0e\xf3\xb57\xeeN\xbc,\x89\xa2@z\xd0\x12]\xc34C\x11d\xbct\x809\x0c\xfbU N"\x1eA\x92\xf0l\x03\xd8]\xeb\nq/\xc9\xb4\xe6\x91\x13\xf2\x97\xc8t\x1dF\xea#\xa2\xc0\xebH\x06)\x98\x8b\xc4\xbd\xd73\x12\x17e\xe5\x956g\xb0C~\x15P\x89(\t<\x08\xe9\xbda\xc0]\xcf\x1f\xed\x91\xbcBd\xe5\rv\xc4\xfc:\xac\xe2Qlf\xc8G\x82\x95\xc6\'\xf1\x18(><\xa6\xfb\xc0\xf6\x83\xcc\xe7\t\xd5G\x1c&\x8d\xc3E\x1b\x0fK\x00\x8a"\xc8\xd9\xde\x93\xfb\xfa\\U\xa7\x08\xcf\x85\x96\xd3\xf9\xb1\xf4\x0f\x9b\x9c\x11\xa4q_\xf8\xe0)3\xa5\x9e\x97\x1c;^\xbaU\xa8Z[1x\x9f\xbcX$3_v9\xd3\xedt?W\xe3^\x14r\xa04T\xc0\xfad\x14\xc6r\x83\xf7\xa5\xc4\x91\x1f\xc6\x90!r\x9fs0\xb1\xa76\xdd\xb0\x1e\xc66\xcf\\\x9ay\xf5\x85\xc4\xc1aW\xb0\x97\xd355A\x88,8AjA\x1d\x1b-S\x98Ly\xe4\xe4m\xe7\xec-\xe6WU\x82%\x94\x1cF\xed\xa1Uk/\xa2\xb9\xb3\xe4T\xee\r\xf6[dZ-\x16@F\xc2{w\x92\x05C#\xd4\x1a\x1f\xae\xcbe\x8f\xff\\\xaf\xe3\xa7\xfd\xf5\xd9\xb2:\x89wu\x14\xb2\xe2\xbeqO_\xa9\x0f\xaf\xfb\xfa\x06\xe7\xae\xb4m?\xff\xdc[\x8a\xa8\xca1$\x8a!\xf2Zc\x13\xea\x17\xd6\\I(\xcd\xb4\x84\xeea\x9b}\xe4\xce\x8f\x85\x13\xce\x8d\x89\xc8HR\x10\xb2P\xa7\x19w\x0c\xf6\x93\xbf\xe4L\xeb\x12\x89\x95\\\x11\xc5\xbe1" *\xca\xc6\x80Ik\xbe\xf0\x02\xd4s\x8f\xb8\x9fo|\xbd\x83\xda\x80+\xc7\xdbPD\x10\x8f\xf8\xc2B?\xadlD\x8b\x00\x943]\xf6?\xa9\xfe\x1e\xdc\xd6\x83\x08\t\xbc\x00\xc3\x8aH\xd2\xfd\x85\x8a_\x1b?a~\xb4\xb0\x99\xf1-g\xfc\x86\x11\x1a\x1a:\xd7G\x00\xce\x8b\xbd\xef\x176a\xed\xb5f\xb3\x9e{\x9b\xe7\xda\xbde\xc1^h\x1cj\x97s*\xc69\x80]B2\x05]\xcb.\x00\xd4\xcb\xafs\x9d\xfb\xef\xe0\x90\xefG\r\x8d\xaa\xe10\x9aA\x8eH\xee\x02-\xab^\x00\xd3f\xba\xbb\xc6\xa7V\xb3\xa9Uu]\xcf\x86\xb1\xda\xf6\x8c\xbe\x90,\xe4\x16]Q\xd08s\xd8\xde\xc5=\xd0\x040\xa0\x01e\x1f\x8e\xab\xcd\x90Hr\xdd\xf4yS\xb0\xc5\x99\xc71\x04@\xdf\x1c6\x00\xeeb\x89$\xde\xb5\xc4C\xfa\x01v\x86\xd2\xb0\x8f\x9e\xbb\xffV\x05\x93\x96\t\x99\x9b\x013DPG$R\xdf\xa9bx\x85\x7f\x12\xac\x07\x9c\xf9\xa4\n:\x8d\xe3h\xcfC.\xcb\xcbH\xdc\x03j\x90\xa2]\xdd\xc0\x9de\xfe\x00\x99T\x15\xa0\xe6!\x0159\x9f\xcf\xc7\t"I\x7f\xb9@\xab\x1a\xa5Z\xf5SK{\x13\x99\xf1*\xd4\xe7\xc8 \x8e\xf0\xe5\x89p\xde#{\xe3\xe9<\xb5\xa3R\xbfgY\x9a\x1f=GQg{\xfe\x06\xc5X\xd0\xebD.\xac\xf3\xff\xcb\xaa\x9a\xac\\\xc0\x9a\x94\\\x8e\x0e\x0f\xcd\xf9\xa4G.P\x8cuU\x8dxw\x0b\r0Koq\x86\x1aO!\x9a\x90\xd3\x1c\xc9*\x84\x8c\x16/7\xabu\xfa\xe7\xc8Di\xc5fL\x8a&\xe9v8\x89\x7fscD\x92\x17&W\x1e\xde\xd3J\xaf\xd8\x0c\xad\xd8\x14\xbe\x03C_T\xf3\xf9\\\xe2eB\xdc\xb1\x84F\xf5\xf0\x1a?{\x84[D\xa4\x01u\x8a\xbf\xf6T\x1e\xb83\xce\x04\xbd\xa6\xaa\xcd\xaf}\x88\xe7:?L\xb5\xfcM\'\x1b`(X*\xf5UQL-\xf5>\x18\xce\x8c$\x99\xc0\x98\x12\xa4tJ\xbd\xac\xeb<\x1bX\xcd\x1d{w\xf2\xae\x1d\xfeI\x94,q\xa6\xa3\x04\n\xebJ\x00\x97.\xcc\xeb\xb4\n\xf0>2|d%\x12\xfbI\xbe\'\x94\xecp\x9d@j]q\x0f\x8d\xd3\x9a?\xa6\x1b\x00\xef\x11I\xe0\xbb\x91\xb8\xa6wj\xd3\xc1 \xcf\xf5sY\xcdM\x11\x12(\x94\x88\\\xb1>K\xbf\xe7\x91\x88\xc8\xb5\xdc\xc9\xd0\xb5\xec\x99\xb78\xf3\xebS\xaa\x8a\x03\x88\x8c\x87\\\xf8\xf4\xfe\xcc5\xb4\x83\x86\x029\xf7\xd4\xe9\x9b\xa1\xa5/\xb9\x9f\xff\x15#jbh(\x92\xc6\x06\t6\xe6.\xfb\xb1\xc4\xfdb\x8fV\xf2\x89\xa2\x1c\xb9\xd2\xe6\xcc\x93\xc9\x80\x8a\x81\xf5\xc5d\xd5D\xed\x0f\xefr\xdd\x0b\xb4<\x89\xae\xc8\x15\xc6\x84\x0e\xeb~\x16Bh\x8a\xa8\xe5\xb0+Y\xd9\xdc\x9b\xb5,S!7hi\nG\x92\x1cp\xe6\xf0\xb7\x1fo\xf7\xf5\xf5\xbdL\x06K\x02\xb9P\x9d\xd8\xbbeY;\xa4\x07\xef,!\x89\xd2\xe9N\xf7\x10\x99v\x13\xee\xa0K\xd2["nZ\x81M\xec\xab;\x9e42\x93\x82$\xbe\xd29\xe4\xcc\x93\x18lp\xd5`\x89\x04\x0bU\x98Z\xb1\x9a\xfex\x9a\x96\xf9\xfa#\xb79\xc3\xba\xc8\x94\xf9|\xde(\x91\xe84@\xb2a}\x9c\x0c\xdb\xa9\x04\xe1\xd4#\x9ba\xc8`k\x89\xb2^"\x91\n\xec\xa7,kiKFF\xc1\x91\xc5m\x88\xcc!{2\x08\xb4\xe4\x11\'\x00sU\xeb\xc5\xd9fx\xa6&\xd3r\x02\'Q|\xb3c3\x87\xed\xbbP_#d\xc6\x98\x93\xd3\xd5\xd5\xc0\xec\xc3\x01(\xcbeu\n\x19r\x91ul\xa6\xb3\x07u\xac\xde\xeeK\x97\x08\xf6Vpv\'\x06\xef\x8e\xe4T\x85\x88\x92\xcc\x1c\xa6\xcb\x90YC\xe6\xb4B\xc2!wa=\x07\xf5w\xc7U,\x0e\x91\xfe\xa4\xd5:a\xcc\xb2O\xde\xed%\x18=t{\x06\xb4w\x83\t\x9f\x84%\xfbY\xf7(\x17\xdbY\x00\xaa\xc8\xbbI>\xea\x11\xdee\x9a\x12T\xb0b\xe2\xf7\x0eP\xc7\xf1|\x9f3$Q\xe4\xdb9J\rd\xce\xe5}\x9c\xf9\xb36;\xd6\xb9?\x83\x8c\x18\xbe\x86\x0c\x19__\x01s\xcd\xbd\xf8\x02\xf6*\x16\x87\xb5\x8f\xfc\xd8:b\xe2\x9a$H\xaedy\x01\xccLOv@\xb2\xdb\x82u\x1d\xa6\xbd\xb3b3s(\xe3N\xa1\x9fm_$\x11\x97D^c\xac\xa0\xe3g\x0f\x00\xeb<4\x87\x1f\x95SK\xbcX\xc3XA\xe9-4s\xc4t\x9f\xf8\x01\xd6\xf0H\xd8\xc7DNfM:\xd7sF\x9d\x12\xe5\x1f?\xcb\x8c\xa2K\x91\xb8\xe6DI\x94\xd3\xa3Z\x9ex\x83\x81\xb1\x84\xf7g\xfcP\xc7L\x8c\xdf\xa9\xf0\xa2\xffUQ\x08\xa4\xce\xe6|$\x91\x95U5\xf8\x08\x99\xae\xc3`\x8f\x99\x94*\x828\x91\x11p\x80\x06}\xe2)\xf5\xd2@^M\x7f\x88\x9e\x9f\xea\xd4)\x9d#\xe2BV\x10\x02\xd9~\\\x18\xd7\xc7\x92TM\xbf\xdd:a\x0e\xbf\x18EfU +\x8b\xc8d\xb0\xbe\xc1\xa4/J\xf37^G\xe4X\xe7q\xcc\x04Z&\xc2K\x0eC\\Y\x1a\xb8`,\x9a\xb7Z\xad\xa7\xb9Fu\x13u\xa4\x97\xb26#}\xcfK#\xd4\xd85W\xdb\xec\x19\xc6\x00\r\xeb\xfaR\xc9a\xc6F\xea\xab\x9aQ\x87U\xf6\x8cN\x0c\x1a\xday"\xfe\x9e\xc3\x90k#\xf52gJWX\x17\xef\xeb\x98\x01\x9a\xc7\xfa\x95\x88\xcd\xcc\x05\xa3U\xce\xd4\xdf\xc0+\xed:3\xf8x\x14\x99u\t\xbd\x12\x11\x19W1\xd0c\xd8\x8c\xcaX\x8b9\xf3\xf5\x1f1\xa8\xd3UIt\xe1p\xb8\xb3~Z\xf1\x91\r\xcd\xa85\xcc\xdc\x01k\x1f33\x00\xda\xaa\xe4\x0e/\x12\x89\xa4\xb1V\x8b\xbe\xa2\x06\xc5\x15(\xf1\x9b?\xb4\x99\xaf\x00\x80\xc6\xdd)\xc8\x12B\xfc\xcd\n\xad\x14s\xbay\x15\'|\x98\xb1\x13\x1d\x03h$U\x1b?\'\x86C\xa4\x01\x94\xee\x8e\xe8p\x15\x1b8\x8c\xd7\xeax\xfe\xeaF\xb5^\xd1k\xe7z\xb13\xae\xfb\x1aVS\xd39\x13\x03\x9ayttv\x16\xa2\x06\x98EQ\xec\x15"xo\xb8\xa1\x00Ftc\xaf\x17\x05\xdf\xec:\xf3\xce\xa2\x94\xc2&\x1f?\x92\xa6\xd5\xcd3M\x1d`\xa62\xbf\x13Df\x03\r\xd9~\xc2i\n\x97H8\xac\x88i\xdd0\x07,]\xdfZ\xd9^\xd9\xcf\x1b\x94\x96n\x1f1\xf7\xbdUXR)}\xcf\xfe\xa27`\x81V6\xf6rZn\x85\xd2\xf2\xf7\x8f\xcf%\xc3\x05\n\xf8@\xec\x1f1`\xee\x9df}j\xc5\xdc\x18Voit\xf5\xfb-\xc7\xf3\xcf\'\x8a\x7f\x00\x1a\xa5\xeb\xc4C&\xe0\xfdY\x0b&\x0bK\x99A\xafQ\xa7k\x07-\x9e\xab\xc3\xc6\xb6\x94\xd3\x00uZ\x96T%X\xd9\x8b!\x93t\'\x06\xaf\x83I\xd7o\xb7\x9c\\\x91\xc5p\xbfa\xeat]I\xff\xc8O\xf7\x83M\xc8\x10w\xc0\xbb\xb4b\xd2\xf2\xa8\xc3\xfc\xe7|\x94\xc6\xa7ML\x86_m\xb3\x14\x96\x8cz9G\xc8\xd9\xaca\x96\xe6C\x1fr\xa6\xf5@+\x18\xa5A\xd3\x04\x9a\xed\xd9\xc8j\xb0\x1f\xa6\xd4X"\xeei0\xd6\n\xea\x01g\xday\x8dB=~\x06\x1d\x95zV\xb7\xab`\xea\x1aB\xba\xc9\x1d\x06\xdf\xb6\xeb\xf3\x9b\n4\xf9N\xd8\xc6c(Y\xb3\x02{\xf3\x0f\n\x15@\xc3\x18\xfeN\xd7f(>\xc0\x9e\xbf3\x0e\x1a\xda\xd2\xa1\xe6\xc9O\xa0\xa8\x81H\xeeb\xdb\xd6\xf9G.\x0c\xb0zU\x9e\x81\xcd\xdf7\x00\x96<\xde( \xab\xd1l\xe0\xc0\xe9\xc3\x8f\x90G\xa9\xf8\xc6\xbc\x1fv\xe5J\xb5\xba\xd9#\'\x81K\xaf\xc5>hu\xed>\xfc)\xe5a\x8cm\xc2F\xcc\x1cZ\xde\xdc\x9f\x0ef\xd1\xf8:-\xfd\xd5\x01;\xea\xc3S\xd4\x8e\xdd\xe5\x19\x80\x86\x8fd\xca\x13\xd1\x1e\xa3\x9e\x0fEX\x1b\x7f\x1c\x1dU-\xd8\xd9F5t\x95 \xa1\xa5\x89\xa8:\xddTg\xf9N\xc5\xc9\xb1\x99\xc7J\xc4\x16\x9a\xd6\xd0\x95\x99 J4\xb5\x7f\xab\x85D\x8b\xffr\xf6<{\xb8\x1d\x0e\xf9\xa9\x13\xb0GnZ\xd6/Z\xfc%\xb3\x99\xae\xcd0f\xe1c\x1e\x9f\r\r\x05\xad\x16{&\x10\xc0\xf8?Z\n\xf1+\xfb\x81\xd5F\x00\x00\x00\x00IEND\xaeB`\x82'
class myLinkLoader:
"""
This object is just a wrapper to track additional informations
and handle temporary files after they are not needed any more.
"""
def __init__(self, **kw):
"""
The self.kw could be used in getFileName if you like
"""
self.kw = kw
self.tmpFileList = []
def __del__(self):
for path in self.tmpFileList:
os.remove(path)
self.tmpFileList = []
def getFileName(self, path, relative=None):
import os
import tempfile
log.info("myLinkLoader.getFileName: %r %r %r", path, relative, self.kw)
try:
if "." in path:
new_suffix = "." + path.split(".")[-1].lower()
if new_suffix in (".css", ".gif", ".jpg", ".png"):
suffix = new_suffix
tmpPath = tempfile.mktemp(prefix="pisa-", suffix = suffix)
tmpFile = file(tmpPath, "wb")
try:
# Here you may add your own stuff
tmpFile.write(dummyLoader(path))
finally:
tmpFile.close()
self.tmpFileList.append(tmpPath)
return tmpPath
except Exception, e:
log.exception("myLinkLoader.getFileName")
return None
def helloWorld():
filename = __file__ + ".pdf"
lc = myLinkLoader(database="some_name", port=666).getFileName
pdf = pisa.CreatePDF(
u"""
<p>
Hello <strong>World</strong>
<p>
<img src="apath/some.png">
""",
file(filename, "wb"),
link_callback = lc,
)
if not pdf.err:
pisa.startViewer(filename)
if __name__=="__main__":
pisa.showLogging()
helloWorld()
# print repr(open("img/denker.png", "rb").read())
|
sorgerlab/belpy | refs/heads/master | indra/tests/test_model_checker.py | 2 | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import os
import pickle
import random
import numpy as np
import pygraphviz as pgv
from indra.statements import *
from collections import Counter
from pysb import *
from pysb.core import SelfExporter
from pysb.tools import render_reactions
from indra.databases import hgnc_client
from indra.explanation.model_checker import ModelChecker, _mp_embeds_into, \
_cp_embeds_into, _match_lhs, \
stmt_from_rule, PathResult, \
remove_im_params
from indra.assemblers.pysb.assembler import PysbAssembler, \
set_base_initial_condition
from pysb.tools import species_graph
from pysb.bng import generate_equations
from pysb import kappa
from pysb.testing import with_model
@with_model
def test_mp_embedding():
# Create a PySB model
Monomer('A', ['b', 'other'], {'other':['u','p']})
mp1 = A(other='u')
mp2 = A()
mp3 = A(other='p')
assert _mp_embeds_into(mp1, mp2)
assert not _mp_embeds_into(mp2, mp1)
assert _mp_embeds_into(mp3, mp2)
assert not _mp_embeds_into(mp2, mp3)
assert not _mp_embeds_into(mp3, mp1)
assert not _mp_embeds_into(mp1, mp3)
@with_model
def test_cp_embedding():
Monomer('A', ['b', 'other'], {'other': ['u','p']})
Monomer('B', ['b'])
cp1 = A(b=1, other='p') % B(b=1)
cp2 = A()
cp3 = A(b=1, other='u') % B(b=1)
cp4 = A(other='p')
cp5 = A(b=1) % B(b=1)
# FIXME Some tests not performed because ComplexPatterns for second term
# FIXME are not yet supported
assert _cp_embeds_into(cp1, cp2)
#assert not _cp_embeds_into(cp1, cp3)
assert _cp_embeds_into(cp1, cp4)
#assert not _cp_embeds_into(cp1, cp5)
#assert not _cp_embeds_into(cp2, cp1)
#assert not _cp_embeds_into(cp2, cp3)
assert not _cp_embeds_into(cp2, cp4)
#assert not _cp_embeds_into(cp2, cp5)
#assert not _cp_embeds_into(cp3, cp1)
assert _cp_embeds_into(cp3, cp2)
assert not _cp_embeds_into(cp3, cp4)
#assert _cp_embeds_into(cp3, cp5)
#assert not _cp_embeds_into(cp4, cp1)
assert _cp_embeds_into(cp4, cp2)
#assert not _cp_embeds_into(cp4, cp3)
#assert not _cp_embeds_into(cp4, cp5)
#assert not _cp_embeds_into(cp5, cp1)
assert _cp_embeds_into(cp5, cp2)
#assert not _cp_embeds_into(cp5, cp3)
assert not _cp_embeds_into(cp5, cp4)
@with_model
def test__match_lhs():
Monomer('A', ['other'], {'other': ['u', 'p']})
Monomer('B', ['T185'], {'T185': ['u', 'p']})
rule = Rule('A_phos_B', A() + B(T185='u') >> A() + B(T185='p'),
Parameter('k', 1))
matching_rules = _match_lhs(A(), model.rules)
assert len(matching_rules) == 1
assert matching_rules[0] == rule
matching_rules = _match_lhs(A(other='u'), model.rules)
assert len(matching_rules) == 0
"""
@with_model
def test_match_rhs():
Monomer('A', ['other'], {'other':['u', 'p']})
Monomer('B', ['T185'], {'T185':['u', 'p']})
rule = Rule('A_phos_B', A() + B(T185='u') >> A() + B(T185='p'),
Parameter('k', 1))
matching_rules = _match_rhs(B(T185='p'), model.rules)
assert len(matching_rules) == 1
assert matching_rules[0] == rule
matching_rules = _match_rhs(B(T185='u'), model.rules)
assert len(matching_rules) == 0
matching_rules = _match_rhs(B(), model.rules)
assert len(matching_rules) == 1
assert matching_rules[0] == rule
"""
@with_model
def test_one_step_phosphorylation():
# Create the statement
a = Agent('A', db_refs={'HGNC': '1'})
b = Agent('B', db_refs={'HGNC': '2'})
st = Phosphorylation(a, b, 'T', '185')
# Now create the PySB model
Monomer('A')
Monomer('B', ['T185'], {'T185': ['u', 'p']})
Rule('A_phos_B', A() + B(T185='u') >> A() + B(T185='p'),
Parameter('k', 1))
Initial(A(), Parameter('A_0', 100))
Initial(B(T185='u'), Parameter('B_0', 100))
# Add annotations
Annotation(A, 'http://identifiers.org/hgnc/HGNC:1')
Annotation(B, 'http://identifiers.org/hgnc/HGNC:2')
Annotation('A_phos_B', 'A', 'rule_has_subject')
Annotation('A_phos_B', 'B', 'rule_has_object')
B.site_annotations = [
Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),
Annotation('T185', 'T', 'is_residue'),
Annotation('T185', '185', 'is_position'),
]
mc = ModelChecker(model, [st])
results = mc.check_model()
assert len(results) == 1
assert isinstance(results[0], tuple)
assert results[0][0] == st
pr = results[0][1]
assert isinstance(pr, PathResult)
assert pr.paths == [(('A_phos_B', 1), ('B_T185_p_obs', 1))]
@with_model
def test_two_step_phosphorylation():
# Create the statement
a = Agent('A', db_refs={'HGNC': '1'})
b = Agent('B', db_refs={'HGNC': '2'})
st = Phosphorylation(a, b, 'T', '185')
# Now create the PySB model
Monomer('A', ['b', 'other'], {'other': ['u','p']})
Monomer('B', ['b', 'T185'], {'T185': ['u', 'p']})
Rule('A_bind_B', A(b=None) + B(b=None, T185='u') >>
A(b=1) % B(b=1, T185='u'), Parameter('kf', 1))
Rule('A_bind_B_rev', A(b=1) % B(b=1, T185='u') >>
A(b=None) + B(b=None, T185='u'), Parameter('kr', 1))
Rule('A_phos_B', A(b=1) % B(b=1, T185='u') >>
A(b=None) + B(b=None, T185='p'),
Parameter('kcat', 1))
Initial(A(b=None, other='p'), Parameter('Ap_0', 100))
Initial(A(b=None, other='u'), Parameter('Au_0', 100))
Initial(B(b=None, T185='u'), Parameter('B_0', 100))
# Add annotations
Annotation(A, 'http://identifiers.org/hgnc/HGNC:1')
Annotation(B, 'http://identifiers.org/hgnc/HGNC:2')
Annotation('A_phos_B', 'A', 'rule_has_subject')
Annotation('A_phos_B', 'B', 'rule_has_object')
B.site_annotations = [
Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),
Annotation('T185', 'T', 'is_residue'),
Annotation('T185', '185', 'is_position'),
]
#with open('model_rxn.dot', 'w') as f:
# f.write(render_reactions.run(model))
#with open('species_2step.dot', 'w') as f:
# f.write(species_graph.run(model))
#generate_equations(model)
# Now check the model
mc = ModelChecker(model, [st])
results = mc.check_model()
assert len(results) == 1
assert isinstance(results[0], tuple)
assert results[0][0] == st
pr = results[0][1]
assert pr.paths == [(('A_phos_B', 1), ('B_T185_p_obs', 1))]
def test_pysb_assembler_phospho_policies():
a = Agent('A', db_refs={'HGNC': '1'})
b = Agent('B', db_refs={'HGNC': '2'})
st = Phosphorylation(a, b, 'T', '185')
pa = PysbAssembler()
pa.add_statements([st])
# Try two step
pa.make_model(policies='two_step')
mc = ModelChecker(pa.model, [st])
results = mc.check_model()
assert len(results) == 1
assert isinstance(results[0], tuple)
assert results[0][0] == st
pr = results[0][1]
assert pr.paths == [(('A_phosphorylation_B_T185', 1), ('B_T185_p_obs', 1))]
# Try one step
pa.make_model(policies='one_step')
mc = ModelChecker(pa.model, [st])
results = mc.check_model()
assert len(results) == 1
assert isinstance(results[0], tuple)
assert results[0][0] == st
pr = results[0][1]
assert pr.path_found
assert pr.paths == [(('A_phosphorylation_B_T185', 1), ('B_T185_p_obs', 1))]
# Try interactions_only
pa.make_model(policies='interactions_only')
mc = ModelChecker(pa.model, [st])
results = mc.check_model()
assert len(results) == 1
assert isinstance(results[0], tuple)
assert results[0][0] == st
pr = results[0][1]
assert not pr.path_found
"""
def test_ras_220_network():
ras_220_results_path = os.path.join('../../models/ras_220_genes'
'/ras_220_gn_related2_stmts.pkl')
#ras_220_results_path = 'braf_dusp6_stmts.pkl'
#ras_220_results_path = 'braf_dusp6_small.pkl'
with open(ras_220_results_path, 'rb') as f:
ras220_stmts = pickle.load(f)
ras220_stmts = [s for s in ras220_stmts
if isinstance(s, Modification) or
isinstance(s, ActiveForm)]
print("Done loading")
# Build a PySB model from the Ras 220 statements
pa = PysbAssembler()
pa.add_statements(ras220_stmts)
pa.make_model(policies='one_step')
# Now create an indirect statement to check the model against
egfr = Agent('EGFR')
braf = Agent('BRAF')
dusp6 = Agent('DUSP6')
stmt1 = Phosphorylation(braf, dusp6, 'S', '159')
stmt2 = Phosphorylation(egfr, dusp6, 'S', '159')
# Check model
stmts = [stmt1, stmt2]
mc = ModelChecker(pa.model, stmts)
checks = mc.check_model()
assert len(checks) == 2
assert isinstance(checks[0], tuple)
assert checks[0][0] == stmt1
assert checks[0][1] == True
assert checks[1][0] == stmt2
assert checks[1][1] == False
# Now try again, with a two_step policy
# Skip this, building the influence map takes a very long time
#pa.make_model(policies='two_step')
#mc = ModelChecker(pa.model, [stmt1, stmt2])
#checks = mc.check_model()
#print checks
#assert len(checks) == 2
#assert isinstance(checks[0], tuple)
#assert checks[0][0] == stmt1
#assert checks[0][1] == True
#assert checks[1][0] == stmt2
#assert checks[1][1] == False
# Now with an interactions_only policy
pa.make_model(policies='interactions_only')
mc = ModelChecker(pa.model, [stmt1, stmt2])
checks = mc.check_model()
assert len(checks) == 2
assert isinstance(checks[0], tuple)
assert checks[0][0] == stmt1
assert checks[0][1] == False
assert checks[1][0] == stmt2
assert checks[1][1] == False
"""
"""
def test_path_polarity():
im = pgv.AGraph('im_polarity.dot')
path1 = ['BRAF_phospho_MAPK1_T185_1', 'MAPK1_phospho_DUSP6_S159_1']
path2 = ['BRAF_phospho_MAPK1_T185_1', 'BRAF_phospho_MAPK1_T185_3',
'MAPK1_phospho_DUSP6_S159_1']
assert _positive_path(im, path1)
assert not _positive_path(im, path2)
"""
@with_model
def test_consumption_rule():
pvd = Agent('Pervanadate', db_refs={'HGNC': '1'})
erk = Agent('MAPK1', db_refs={'HGNC': '2'})
stmt = Phosphorylation(pvd, erk, 'T', '185')
# Now make the model
Monomer('Pervanadate', ['b'])
Monomer('DUSP', ['b'])
Monomer('MAPK1', ['b', 'T185'], {'T185': ['u', 'p']})
Rule('Pvd_binds_DUSP',
Pervanadate(b=None) + DUSP(b=None) >>
Pervanadate(b=1) % DUSP(b=1),
Parameter('k1', 1))
Rule('Pvd_binds_DUSP_rev',
Pervanadate(b=1) % DUSP(b=1) >>
Pervanadate(b=None) + DUSP(b=None),
Parameter('k2', 1))
Rule('DUSP_binds_MAPK1_phosT185',
DUSP(b=None) + MAPK1(b=None, T185='p') >>
DUSP(b=1) % MAPK1(b=1, T185='p'),
Parameter('k3', 1))
Rule('DUSP_binds_MAPK1_phosT185_rev',
DUSP(b=1) % MAPK1(b=1, T185='p') >>
DUSP(b=None) + MAPK1(b=None, T185='p'),
Parameter('k4', 1))
Rule('DUSP_dephos_MAPK1_at_T185',
DUSP(b=1) % MAPK1(b=1, T185='p') >>
DUSP(b=None) % MAPK1(b=None, T185='u'),
Parameter('k5', 1))
Annotation(Pervanadate, 'http://identifiers.org/hgnc/HGNC:1')
Annotation(MAPK1, 'http://identifiers.org/hgnc/HGNC:2')
Annotation('Pvd_binds_DUSP', 'Pervanadate', 'rule_has_subject')
Annotation('Pvd_binds_DUSP', 'Pervanadate', 'rule_has_object')
Annotation('Pvd_binds_DUSP', 'DUSP', 'rule_has_subject')
Annotation('Pvd_binds_DUSP', 'DUSP', 'rule_has_object')
Annotation('Pvd_binds_DUSP_rev', 'Pervanadate', 'rule_has_subject')
Annotation('Pvd_binds_DUSP_rev', 'Pervanadate', 'rule_has_object')
Annotation('Pvd_binds_DUSP_rev', 'DUSP', 'rule_has_subject')
Annotation('Pvd_binds_DUSP_rev', 'DUSP', 'rule_has_object')
Annotation('DUSP_dephos_MAPK1_at_T185', 'DUSP', 'rule_has_subject')
Annotation('DUSP_dephos_MAPK1_at_T185', 'MAPK1', 'rule_has_object')
MAPK1.site_annotations = [
Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),
Annotation('T185', 'T', 'is_residue'),
Annotation('T185', '185', 'is_position'),
]
# Now check the model against the statement
mc = ModelChecker(model, [stmt])
checks = mc.check_model()
assert len(checks) == 1
assert isinstance(checks[0], tuple)
assert checks[0][0] == stmt
pr = checks[0][1]
assert pr.paths == [(('Pvd_binds_DUSP', 1),
('DUSP_binds_MAPK1_phosT185', -1),
('DUSP_dephos_MAPK1_at_T185', -1),
('MAPK1_T185_p_obs', 1))]
def test_dephosphorylation():
dusp = Agent('DUSP6', db_refs={'HGNC':'1'})
mapk1 = Agent('MAPK1', db_refs={'HGNC':'2'})
stmt = Dephosphorylation(dusp, mapk1, 'T', '185')
def check_policy(policy, result):
pysba = PysbAssembler()
pysba.add_statements([stmt])
pysba.make_model(policies=policy)
mc = ModelChecker(pysba.model, [stmt])
checks = mc.check_model()
assert len(checks) == 1
assert isinstance(checks[0], tuple)
assert checks[0][0] == stmt
pr = checks[0][1]
assert pr.paths == result
check_policy('one_step', [(('DUSP6_dephosphorylation_MAPK1_T185', 1),
('MAPK1_T185_p_obs', -1))])
check_policy('two_step', [(('DUSP6_dephosphorylation_MAPK1_T185', 1),
('MAPK1_T185_p_obs', -1))])
check_policy('interactions_only', [])
@with_model
def test_invalid_modification():
# Override the shutoff of self export in psyb_assembler
# Create the statement
a = Agent('A')
b = Agent('B')
st = Phosphorylation(a, b, 'T', '185')
# Now create the PySB model
Monomer('A')
Monomer('B', ['Y187'], {'Y187':['u', 'p']})
Rule('A_phos_B', A() + B(Y187='u') >> A() + B(Y187='p'),
Parameter('k', 1))
#Initial(A(), Parameter('A_0', 100))
#Initial(B(T187='u'), Parameter('B_0', 100))
#with open('model_rxn.dot', 'w') as f:
# f.write(render_reactions.run(model))
#with open('species_1step.dot', 'w') as f:
# f.write(species_graph.run(model))
# Now check the model
mc = ModelChecker(model, [st])
results = mc.check_model()
assert len(results) == 1
#assert isinstance(results[0], tuple)
#assert results[0][0] == st
#assert results[0][1] == True
def _path_polarity_stmt_list():
a = Agent('A', db_refs={'HGNC': '1'})
b = Agent('B', db_refs={'HGNC': '2'})
c = Agent('C', db_refs={'HGNC': '3'})
st1 = Phosphorylation(a, c, 'T', '185')
st2 = Dephosphorylation(a, c, 'T', '185')
st3 = Phosphorylation(None, c, 'T', '185')
st4 = Dephosphorylation(None, c, 'T', '185')
return [st1, st2, st3, st4]
@with_model
def test_distinguish_path_polarity1():
"""Test the ability to distinguish a positive from a negative regulation."""
Monomer('A')
Monomer('B', ['act'], {'act' : ['y', 'n']})
Monomer('C', ['T185'], {'T185': ['u', 'p']})
Parameter('k', 1)
Rule('A_activate_B', A() + B(act='n') >> A() + B(act='y'), k)
Rule('B_dephos_C', B(act='y') + C(T185='p') >>
B(act='y') + C(T185='u'), k)
Initial(A(), k)
Initial(B(act='y'), k)
Initial(C(T185='p'), k)
Annotation(A, 'http://identifiers.org/hgnc/HGNC:1')
Annotation(B, 'http://identifiers.org/hgnc/HGNC:2')
Annotation(C, 'http://identifiers.org/hgnc/HGNC:3')
Annotation('A_activate_B', 'A', 'rule_has_subject')
Annotation('A_activate_B', 'B', 'rule_has_object')
Annotation('B_dephos_C', 'B', 'rule_has_subject')
Annotation('B_dephos_C', 'C', 'rule_has_object')
C.site_annotations = [
Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),
Annotation('T185', 'T', 'is_residue'),
Annotation('T185', '185', 'is_position'),
]
# Create the model checker
stmts = _path_polarity_stmt_list()
mc = ModelChecker(model, stmts)
results = mc.check_model()
assert len(results) == len(stmts)
assert isinstance(results[0], tuple)
path_results = [res[1] for res in results]
assert path_results[0].paths == []
assert path_results[1].paths == [(('A_activate_B', 1), ('B_dephos_C', 1),
('C_T185_p_obs', -1))]
assert path_results[2].paths == []
assert path_results[3].paths == [(('B_dephos_C', 1), ('C_T185_p_obs', -1))]
@with_model
def test_distinguish_path_polarity2():
"""Test the ability to distinguish a positive from a negative regulation."""
Monomer('A')
Monomer('B', ['act'], {'act' : ['y', 'n']})
Monomer('C', ['T185'], {'T185': ['u', 'p']})
Parameter('k', 1)
Rule('A_inhibit_B', A() + B(act='y') >> A() + B(act='n'), k)
Rule('B_dephos_C', B(act='y') + C(T185='p') >>
B(act='y') + C(T185='u'), k)
Initial(A(), k)
Initial(B(act='y'), k)
Initial(C(T185='p'), k)
Annotation(A, 'http://identifiers.org/hgnc/HGNC:1')
Annotation(B, 'http://identifiers.org/hgnc/HGNC:2')
Annotation(C, 'http://identifiers.org/hgnc/HGNC:3')
Annotation('A_inhibit_B', 'A', 'rule_has_subject')
Annotation('A_inhibit_B', 'B', 'rule_has_object')
Annotation('B_dephos_C', 'B', 'rule_has_subject')
Annotation('B_dephos_C', 'C', 'rule_has_object')
C.site_annotations = [
Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),
Annotation('T185', 'T', 'is_residue'),
Annotation('T185', '185', 'is_position'),
]
# Create the model checker
stmts = _path_polarity_stmt_list()
mc = ModelChecker(model, stmts)
results = mc.check_model()
assert len(results) == len(stmts)
assert isinstance(results[0], tuple)
assert results[0][1].paths == [(('A_inhibit_B', 1), ('B_dephos_C', -1),
('C_T185_p_obs', 1))]
assert results[1][1].paths == []
assert results[2][1].paths == [(('A_inhibit_B', 1), ('B_dephos_C', -1),
('C_T185_p_obs', 1))]
assert results[3][1].paths == [(('B_dephos_C', 1), ('C_T185_p_obs', -1))]
def test_check_activation():
a = Agent('A', db_refs={'HGNC': '1'})
b = Agent('B', db_refs={'HGNC': '2'})
c = Agent('C', db_refs={'HGNC': '3'})
st1 = Activation(a, b)
st2 = Inhibition(b, c, 'kinase')
stmts = [st1, st2]
# Create the model
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model(policies='one_step')
mc = ModelChecker(pa.model, stmts)
results = mc.check_model()
assert len(results) == len(stmts)
assert isinstance(results[0], tuple)
assert results[0][1].paths == [(('A_activates_B_activity', 1),
('B_activity_active_obs', 1))]
assert results[1][1].paths == [(('B_deactivates_C_kinase', 1),
('C_kinase_active_obs', -1))]
@with_model
def test_none_phosphorylation_stmt():
# Create the statement
b = Agent('B', db_refs={'HGNC': '2'})
st1 = Phosphorylation(None, b, 'T', '185')
st2 = Phosphorylation(None, b, 'Y', '187')
stmts = [st1, st2]
# Now create the PySB model
Monomer('A')
Monomer('B', ['T185', 'Y187'], {'T185':['u', 'p'], 'Y187': ['u', 'p']})
Rule('A_phos_B', A() + B(T185='u') >> A() + B(T185='p'),
Parameter('k', 1))
Initial(A(), Parameter('A_0', 100))
Initial(B(T185='u', Y187='p'), Parameter('B_0', 100))
Annotation(A, 'http://identifiers.org/hgnc/HGNC:1')
Annotation(B, 'http://identifiers.org/hgnc/HGNC:2')
B.site_annotations = [
Annotation(('T185', 'p'), 'phosphorylation', 'is_modification'),
Annotation('T185', 'T', 'is_residue'),
Annotation('T185', '185', 'is_position'),
Annotation(('Y187', 'p'), 'phosphorylation', 'is_modification'),
Annotation('Y187', 'Y', 'is_residue'),
Annotation('Y187', '187', 'is_position'),
]
mc = ModelChecker(model, stmts)
results = mc.check_model()
assert len(results) == 2
assert isinstance(results[0], tuple)
assert results[0][0] == st1
assert results[0][1].paths == [(('A_phos_B', 1), ('B_T185_p_obs', 1))]
assert results[1][0] == st2
assert results[1][1].paths == []
@with_model
def test_phosphorylation_annotations():
# Create the statement
a = Agent('MEK1', db_refs={'HGNC': '6840'})
b = Agent('ERK2', db_refs={'HGNC': '6871'})
st1 = Phosphorylation(a, b, 'T', '185')
st2 = Phosphorylation(a, b, None, None)
st3 = Phosphorylation(a, b, 'Y', '187')
# Now create the PySB model
Monomer('A_monomer')
Monomer('B_monomer', ['Thr185', 'Y187'],
{'Thr185': ['un', 'phos'], 'Y187': ['u', 'p']})
Rule('A_phos_B', A_monomer() + B_monomer(Thr185='un') >>
A_monomer() + B_monomer(Thr185='phos'),
Parameter('k', 1))
Initial(A_monomer(), Parameter('A_0', 100))
Initial(B_monomer(Thr185='un', Y187='u'), Parameter('B_0', 100))
# Add agent grounding
Annotation(A_monomer, 'http://identifiers.org/hgnc/HGNC:6840')
Annotation(B_monomer, 'http://identifiers.org/hgnc/HGNC:6871')
Annotation('A_phos_B', 'A_monomer', 'rule_has_subject')
Annotation('A_phos_B', 'B_monomer', 'rule_has_object')
# Add annotations to the sites/states of the Monomer itself
B_annot = [
Annotation('Thr185', 'T', 'is_residue'),
Annotation('Thr185', '185', 'is_position'),
Annotation(('Thr185', 'phos'), 'phosphorylation', 'is_modification'),
Annotation('Y187', 'Y', 'is_residue'),
Annotation('Y187', '187', 'is_position'),
Annotation(('Y187', 'p'), 'phosphorylation', 'is_modification'),
]
B_monomer.site_annotations = B_annot
mc = ModelChecker(model, [st1, st2, st3])
results = mc.check_model()
assert len(results) == 3
assert isinstance(results[0], tuple)
assert results[0][0] == st1
assert results[0][1].paths == [(('A_phos_B', 1),
('B_monomer_Thr185_phos_obs', 1))]
assert results[1][0] == st2
assert results[1][1].paths == [(('A_phos_B', 1),
('B_monomer_Thr185_phos_obs', 1))]
assert results[2][0] == st3
assert results[2][1].paths == []
@with_model
def test_activation_annotations():
# Create the statement
a = Agent('MEK1', db_refs={'HGNC': '6840'})
b = Agent('ERK2', db_refs={'HGNC': '6871'})
st1 = Phosphorylation(a, b, 'T', '185')
st2 = Activation(a, b)
st3 = Activation(a, b, 'kinase')
# Now create the PySB model
Monomer('A_monomer')
Monomer('B_monomer', ['Thr185', 'Y187'],
{'Thr185': ['un', 'phos'], 'Y187': ['u', 'p']})
Rule('A_phos_B', A_monomer() + B_monomer(Thr185='un') >>
A_monomer() + B_monomer(Thr185='phos'),
Parameter('k', 1))
Initial(A_monomer(), Parameter('A_0', 100))
Initial(B_monomer(Thr185='un', Y187='u'), Parameter('B_0', 100))
# Add agent grounding
Annotation(A_monomer, 'http://identifiers.org/hgnc/HGNC:6840')
Annotation(B_monomer, 'http://identifiers.org/hgnc/HGNC:6871')
Annotation(B_monomer, {'Thr185':'phos'}, 'has_active_pattern')
Annotation('A_phos_B', 'A_monomer', 'rule_has_subject')
Annotation('A_phos_B', 'B_monomer', 'rule_has_object')
# Add annotations to the sites/states of the Monomer itself
B_annot = [
Annotation('Thr185', 'T', 'is_residue'),
Annotation('Thr185', '185', 'is_position'),
Annotation(('Thr185', 'phos'), 'phosphorylation', 'is_modification'),
Annotation('Y187', 'Y', 'is_residue'),
Annotation('Y187', '187', 'is_position'),
Annotation(('Y187', 'p'), 'phosphorylation', 'is_modification'),
]
B_monomer.site_annotations = B_annot
mc = ModelChecker(model, [st1, st2, st3])
results = mc.check_model()
assert len(results) == 3
assert isinstance(results[0], tuple)
assert results[0][0] == st1
assert results[0][1].paths == [(('A_phos_B', 1),
('B_monomer_Thr185_phos_obs', 1))]
assert results[1][0] == st2
assert results[1][1].paths == [(('A_phos_B', 1),
('B_monomer_Thr185_phos_obs', 1))]
assert results[2][0] == st3
assert results[1][1].paths == [(('A_phos_B', 1),
('B_monomer_Thr185_phos_obs', 1))]
def test_multitype_path():
"""Test causal chain involving Complex, Gef, Activation"""
egfr = Agent('EGFR', db_refs={'HGNC': '3236'})
grb2 = Agent('GRB2', db_refs={'HGNC': '4566'})
grb2_egfr = Agent('GRB2', bound_conditions=[BoundCondition(egfr)],
db_refs={'HGNC': '4566'})
sos1 = Agent('SOS1', db_refs={'HGNC': '11187'}, )
sos1_grb2 = Agent('SOS1', bound_conditions=[BoundCondition(grb2)],
db_refs={'HGNC': '11187'}, )
kras = Agent('KRAS', db_refs={'HGNC': '6407'})
kras_g = Agent('KRAS', activity=ActivityCondition('gtpbound', True),
db_refs={'HGNC': '6407'})
braf = Agent('BRAF', db_refs={'HGNC': '1097'})
def check_stmts(stmts, paths):
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model(policies='one_step')
stmts_to_check = [
Activation(egfr, kras, 'gtpbound'),
Activation(egfr, braf, 'kinase')
]
mc = ModelChecker(pa.model, stmts_to_check)
results = mc.check_model()
assert len(results) == len(stmts_to_check)
assert isinstance(results[0], tuple)
assert results[0][1].paths == paths[0], results[0][1].paths
assert results[1][1].paths == paths[1], results[1][1].paths
# Check with the ActiveForm
stmts1 = [
Complex([egfr, grb2]),
Complex([sos1, grb2_egfr]),
ActiveForm(sos1_grb2, 'activity', True),
Activation(sos1_grb2, kras, 'gtpbound'),
Activation(kras_g, braf, 'kinase')
]
check_stmts(stmts1, ([(('EGFR_GRB2_bind', 1), ('SOS1_GRB2_EGFR_bind', 1),
('SOS1_GRB2_activates_KRAS_gtpbound', 1),
('KRAS_gtpbound_active_obs', 1))],
[(('EGFR_GRB2_bind', 1), ('SOS1_GRB2_EGFR_bind', 1),
('SOS1_GRB2_activates_KRAS_gtpbound', 1),
('KRAS_gtp_activates_BRAF_kinase', 1),
('BRAF_kinase_active_obs', 1))]))
# Check without the ActiveForm
stmts2 = [
Complex([egfr, grb2]),
Complex([sos1, grb2_egfr]),
Gef(sos1_grb2, kras),
Activation(kras_g, braf, 'kinase')
]
check_stmts(stmts2, ([(('EGFR_GRB2_bind', 1), ('SOS1_GRB2_EGFR_bind', 1),
('SOS1_GRB2_activates_KRAS', 1),
('KRAS_gtpbound_active_obs', 1))],
[(('EGFR_GRB2_bind', 1), ('SOS1_GRB2_EGFR_bind', 1),
('SOS1_GRB2_activates_KRAS', 1),
('KRAS_gtp_activates_BRAF_kinase', 1),
('BRAF_kinase_active_obs', 1))]))
def test_grounded_modified_enzyme():
"""Check if the model checker can use semantic annotations to match mods
on the enzyme, not just the substrate, of a phosphorylation statement."""
mek_s202 = Agent('MEK1', mods=[ModCondition('phosphorylation', 'S', '202')],
db_refs={'HGNC': '6840'})
mek_phos = Agent('MEK1', mods=[ModCondition('phosphorylation', None, None)],
db_refs={'HGNC': '6840'})
erk = Agent('ERK2', db_refs={'HGNC': '6871'})
stmt_to_model = Phosphorylation(mek_s202, erk, None, None)
stmt_to_check = Phosphorylation(mek_phos, erk, None, None)
pa = PysbAssembler()
pa.add_statements([stmt_to_model])
pa.make_model(policies='one_step')
mc = ModelChecker(pa.model, [stmt_to_check])
results = mc.check_model()
assert len(results) == 1
assert results[0][0] == stmt_to_check
assert results[0][1].paths == \
[(('MEK1_phosphoS202_phosphorylation_ERK2_phospho', 1),
('ERK2_phospho_p_obs', 1))]
def test_check_ubiquitination():
xiap = Agent('XIAP', db_refs={'HGNC': '592'})
casp3 = Agent('CASP3', db_refs={'HGNC': '1504'})
stmt = Ubiquitination(xiap, casp3)
pysba = PysbAssembler()
pysba.add_statements([stmt])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [stmt])
checks = mc.check_model()
assert len(checks) == 1
assert isinstance(checks[0], tuple)
assert checks[0][0] == stmt
assert checks[0][1].paths == [(('XIAP_ubiquitination_CASP3_ub', 1),
('CASP3_ub_y_obs', 1))]
def test_check_rule_subject1():
mek = Agent('MEK1', db_refs={'HGNC': '6840'})
erk = Agent('ERK2', db_refs={'HGNC': '6871'})
stmt = Phosphorylation(mek, erk)
pysba = PysbAssembler()
pysba.add_statements([stmt])
pysba.make_model(policies='one_step')
# Check against stmt: should not validate ERK phosphorylates ERK
stmt_to_check = Phosphorylation(erk, erk)
mc = ModelChecker(pysba.model, [stmt_to_check])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == stmt_to_check
assert checks[0][1].paths == []
def test_gef_activation():
sos = Agent('SOS1', db_refs={'HGNC': '1'})
ras = Agent('KRAS', db_refs={'HGNC': '2'})
gef_stmt = Gef(sos, ras)
act_stmt = Activation(sos, ras, 'gtpbound')
# Check that the activation is satisfied by the Gef
pysba = PysbAssembler()
pysba.add_statements([gef_stmt])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [act_stmt])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == act_stmt
assert checks[0][1].paths == [(('SOS1_activates_KRAS', 1),
('KRAS_gtpbound_active_obs', 1))]
# TODO TODO TODO
"""
# Check that the Gef is satisfied by the Activation
# This currently doesn't work because Gef statements aren't checked
pysba = PysbAssembler()
pysba.add_statements([act_stmt])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [gef_stmt])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == gef_stmt
assert checks[0][1] == True
"""
def test_gef_rasgtp():
sos = Agent('SOS1', db_refs={'HGNC': '1'})
ras = Agent('KRAS', db_refs={'HGNC': '2'})
ras_gtp = Agent('KRAS', activity=ActivityCondition('gtpbound', True),
db_refs={'HGNC': '2'})
raf = Agent('BRAF', db_refs={'HGNC': '3'})
gef_stmt = Gef(sos, ras)
rasgtp_stmt = GtpActivation(ras_gtp, raf, 'kinase')
act_stmt = Activation(sos, raf, 'kinase')
# Check that the activation is satisfied by the Gef
pysba = PysbAssembler()
pysba.add_statements([gef_stmt, rasgtp_stmt])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [act_stmt])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == act_stmt
assert checks[0][1].paths == [(('SOS1_activates_KRAS', 1),
('KRAS_gtp_activates_BRAF_kinase', 1),
('BRAF_kinase_active_obs', 1))], \
checks[0][1].paths
def test_gef_rasgtp_phos():
sos = Agent('SOS1', db_refs={'HGNC': '1'})
ras = Agent('KRAS', db_refs={'HGNC': '2'})
ras_a = Agent('KRAS', activity=ActivityCondition('gtpbound', True),
db_refs={'HGNC': '2'})
raf = Agent('BRAF', db_refs={'HGNC': '3'})
raf_a = Agent('BRAF', activity=ActivityCondition('kinase', True),
db_refs={'HGNC': '3'})
mek = Agent('MEK', db_refs={'HGNC': '4'})
gef_stmt = Gef(sos, ras)
rasgtp_stmt = GtpActivation(ras_a, raf, 'kinase')
phos = Phosphorylation(raf_a, mek)
stmt_to_check = Phosphorylation(sos, mek)
# Assemble and check
pysba = PysbAssembler()
pysba.add_statements([gef_stmt, rasgtp_stmt, phos])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [stmt_to_check])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == stmt_to_check
assert checks[0][1].paths == [(('SOS1_activates_KRAS', 1),
('KRAS_gtp_activates_BRAF_kinase', 1),
('BRAF_kin_phosphorylation_MEK_phospho', 1),
('MEK_phospho_p_obs', 1))], \
checks[0][1].paths
def test_gap_activation():
nf1 = Agent('NF1', db_refs={'HGNC': '1'})
ras = Agent('KRAS', db_refs={'HGNC': '2'})
gap_stmt = Gap(nf1, ras)
act_stmt = Inhibition(nf1, ras, 'gtpbound')
# Check that the activation is satisfied by the Gap
pysba = PysbAssembler()
pysba.add_statements([gap_stmt])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [act_stmt])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == act_stmt
assert checks[0][1].paths == [(('NF1_deactivates_KRAS', 1),
('KRAS_gtpbound_active_obs', -1))]
# TODO TODO TODO
"""
# Check that the Gap is satisfied by the Activation
# This currently doesn't work because Gap statements aren't checked by
# the ModelChecker
pysba = PysbAssembler()
pysba.add_statements([act_stmt])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [gap_stmt])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == gap_stmt
assert checks[0][1] == True
"""
def test_gap_rasgtp():
nf1 = Agent('NF1', db_refs={'HGNC': '1'})
ras = Agent('KRAS', db_refs={'HGNC': '2'})
ras_g = Agent('KRAS', activity=ActivityCondition('gtpbound', True),
db_refs={'HGNC': '2'})
raf = Agent('BRAF', db_refs={'HGNC': '3'})
gap_stmt = Gap(nf1, ras)
rasgtp_stmt = GtpActivation(ras_g, raf, 'kinase')
act_stmt = Inhibition(nf1, raf, 'kinase')
# Check that the activation is satisfied by the Gap
pysba = PysbAssembler()
pysba.add_statements([gap_stmt, rasgtp_stmt])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [act_stmt])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == act_stmt
assert checks[0][1].paths == [(('NF1_deactivates_KRAS', 1),
('KRAS_gtp_activates_BRAF_kinase', -1),
('BRAF_kinase_active_obs', -1))], \
checks[0][1].paths
def test_gap_rasgtp_phos():
nf1 = Agent('NF1', db_refs={'HGNC': '1'})
ras = Agent('KRAS', db_refs={'HGNC': '2'})
ras_g = Agent('KRAS', activity=ActivityCondition('gtpbound', True),
db_refs={'HGNC': '2'})
raf = Agent('BRAF', db_refs={'HGNC': '3'})
raf_a = Agent('BRAF', activity=ActivityCondition('kinase', True),
db_refs={'HGNC': '3'})
mek = Agent('MEK', db_refs={'HGNC': '4'})
gap_stmt = Gap(nf1, ras)
rasgtp_stmt = GtpActivation(ras_g, raf, 'kinase')
phos = Phosphorylation(raf_a, mek)
stmt_to_check = Dephosphorylation(nf1, mek)
# Assemble and check
pysba = PysbAssembler()
pysba.add_statements([gap_stmt, rasgtp_stmt, phos])
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [stmt_to_check])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == stmt_to_check
assert checks[0][1].paths == \
[(('NF1_deactivates_KRAS', 1),
('KRAS_gtp_activates_BRAF_kinase', -1),
('BRAF_kin_phosphorylation_MEK_phospho', -1),
('MEK_phospho_p_obs', -1))], checks[0][1].paths
def test_increase_amount():
tp53 = Agent('TP53', db_refs={'HGNC': '1'})
x = Agent('X', db_refs={'HGNC': 2})
mdm2 = Agent('MDM2', db_refs={'HGNC': '3'})
stmts = [IncreaseAmount(tp53, x), IncreaseAmount(x, mdm2)]
stmt_to_check = IncreaseAmount(tp53, mdm2)
pysba = PysbAssembler()
pysba.add_statements(stmts)
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [stmt_to_check])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == stmt_to_check
assert checks[0][1].paths == [(('TP53_produces_X', 1),
('X_produces_MDM2', 1),
('MDM2__obs', 1))]
def test_decrease_amount():
tp53 = Agent('TP53', db_refs={'HGNC': '1'})
tp53u = Agent('TP53', mods=[ModCondition('ubiquitination')],
db_refs={'HGNC': '1'})
mdm2 = Agent('MDM2', db_refs={'HGNC': '3'})
stmts = [IncreaseAmount(tp53, mdm2),
Ubiquitination(mdm2, tp53), DecreaseAmount(None, tp53u)]
stmt_to_check = DecreaseAmount(tp53, tp53)
pysba = PysbAssembler()
pysba.add_statements(stmts)
pysba.make_model(policies='one_step')
mc = ModelChecker(pysba.model, [stmt_to_check])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == stmt_to_check
assert checks[0][1].paths == [(('TP53_produces_MDM2', 1),
('MDM2_ubiquitination_TP53_ub', 1),
('TP53_ub_degraded', 1),
('TP53__obs', -1))]
def test_stmt_from_rule():
mek = Agent('MEK1', db_refs={'HGNC': '6840'})
erk = Agent('ERK2', db_refs={'HGNC': '6871'})
st = Phosphorylation(mek, erk, 'T', '185')
pa = PysbAssembler()
pa.add_statements([st])
pa.make_model()
rule_name = pa.model.rules[0].name
stmt = stmt_from_rule(rule_name, pa.model, [st])
assert stmt == st
def test_activate_via_mod():
mek = Agent('MEK1', db_refs={'HGNC': '6840'})
erk = Agent('ERK2', db_refs={'HGNC': '6871'})
erka = Agent('ERK2', mods=[ModCondition('phosphorylation', 'T', '185')],
db_refs={'HGNC': '6871'})
st1 = Phosphorylation(mek, erk, 'T', '185')
st2 = ActiveForm(erka, 'activity', True)
st3 = Activation(mek, erk)
pa = PysbAssembler()
pa.add_statements([st1, st2])
pa.make_model()
mc = ModelChecker(pa.model, [st3])
checks = mc.check_model()
# Make sure it checks out to True
assert checks[0][1].path_found
def test_observables():
mek = Agent('MEK1', db_refs={'HGNC': '6840'})
erk = Agent('ERK2', db_refs={'HGNC': '6871'})
erkp = Agent('ERK2', mods=[ModCondition('phosphorylation', 'T', '185')],
db_refs={'HGNC': '6871'})
st1 = Phosphorylation(mek, erk, 'T', '185')
st2 = ActiveForm(erkp, 'activity', True)
st3 = Activation(mek, erk)
pa = PysbAssembler()
pa.add_statements([st1, st2])
pa.make_model()
mc = ModelChecker(pa.model, [st1, st3], agent_obs=[erkp])
checks = mc.check_model()
assert checks[0][1].path_found
assert checks[1][1].path_found
# Only 1 observable should be created
assert len(mc.model.observables) == 1
"""
def test_check_rule_subject_bound_condition():
braf = Agent('BRAF', db_refs={'HGNC': '1'})
raf1 = Agent('RAF1', db_refs={'HGNC': '2'})
braf_raf1 = Agent('BRAF', bound_conditions=[BoundCondition(raf1)],
db_refs={'HGNC': '1'})
mek = Agent('MEK1', db_refs={'HGNC': '6840'})
stmt = Phosphorylation(braf_raf1, mek)
pysba = PysbAssembler()
pysba.add_statements([stmt])
pysba.make_model(policies='one_step')
# Check against stmt: should indicate that RAF1 is causally linked to MEK
# phosphorylation
stmt_to_check = Phosphorylation(raf1, mek)
mc = ModelChecker(pysba.model, [stmt_to_check])
checks = mc.check_model()
assert len(checks) == 1
assert checks[0][0] == stmt_to_check
assert checks[0][1] == True
def test_activation_subtype():
sos1 = Agent('SOS1', db_refs={'HGNC':'11187'})
kras = Agent('KRAS', db_refs={'HGNC':'6407'})
stmts = [Activation(sos1, kras, 'gtpbound')]
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model(policies='one_step')
stmts_to_check = [Activation(sos1, kras, 'activity')]
mc = ModelChecker(pa.model, stmts_to_check)
results = mc.check_model()
assert len(results) == len(stmts_to_check)
assert isinstance(results[0], tuple)
assert results[0][1] == True
def test_check_autophosphorylation():
egfr = Agent('EGFR', db_refs={'HGNC':'3236'})
stmts = [Autophosphorylation(egfr, 'Y', '1016')]
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model(policies='one_step')
stmts_to_check = [Phosphorylation(None, egfr),
Phosphorylation(None, egfr, 'Y', '1016')]
mc = ModelChecker(pa.model, stmts_to_check)
results = mc.check_model()
assert len(results) == len(stmts_to_check)
assert isinstance(results[0], tuple)
assert results[0][1] == True
assert results[1][1] == True
def test_check_transphosphorylation():
egfr = Agent('EGFR', db_refs={'HGNC':'3236'})
erbb2_egfr = Agent('ERBB2', bound_conditions=[BoundCondition(egfr)],
db_refs={'HGNC':'3430'})
stmts = [Transphosphorylation(erbb2_egfr, 'Y', '1016')]
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model(policies='one_step')
stmts_to_check = [Phosphorylation(None, egfr),
Phosphorylation(None, egfr, 'Y', '1016')]
mc = ModelChecker(pa.model, stmts_to_check)
results = mc.check_model()
assert len(results) == len(stmts_to_check)
assert isinstance(results[0], tuple)
assert results[0][1] == True
assert results[1][1] == True
"""
def test_model_check_data():
# Create a set of statements
a = Agent('A', db_refs={'HGNC': '1'})
b = Agent('B', db_refs={'HGNC': '2'})
b_phos = Agent('B', mods=[ModCondition('phosphorylation')],
db_refs={'HGNC': '2'})
c = Agent('C', db_refs={'HGNC': '3'})
c_phos = Agent('C', mods=[ModCondition('phosphorylation')],
db_refs={'HGNC': '3'})
d = Agent('D', db_refs={'HGNC': '4'})
d_phos = Agent('D', mods=[ModCondition('phosphorylation')],
db_refs={'HGNC': '4'})
# Two paths from A to D: One going through B and another through C
st1 = Phosphorylation(a, b)
st2 = Phosphorylation(b_phos, d)
st3 = Phosphorylation(a, c)
st4 = Phosphorylation(c_phos, d)
# Statements/Data agents for checking
stmt_to_check = Phosphorylation(a, d)
agent_obs = [b_phos, c_phos, d_phos]
# Make model
pa = PysbAssembler()
pa.add_statements([st1, st2, st3, st4])
pa.make_model(policies='one_step')
mc = ModelChecker(pa.model, [stmt_to_check], agent_obs)
results = mc.check_model(max_paths=5)
# Create observable
assert len(results) == 1
pr = results[0][1]
res = pr.paths[0:2]
assert len(res) == 2
p1 = (('A_phosphorylation_B_phospho', 1),
('B_phospho_phosphorylation_D_phospho', 1),
('D_phospho_p_obs', 1))
assert p1 in res
p2 = (('A_phosphorylation_C_phospho', 1),
('C_phospho_phosphorylation_D_phospho', 1),
('D_phospho_p_obs', 1))
assert p2 in res
# Now, a vector linking agents with values, expressed at first as
# +/- 1
# This data should ensure that the path through B should be more highly
# ranked than the path through C
data = {b_phos: 1, c_phos: -1, d_phos: 1}
paths = results[0][1].paths
scored_paths = mc.score_paths(paths, data)
assert scored_paths[0][0] == p1
assert scored_paths[1][0] == p2
assert scored_paths[0][1] > scored_paths[1][1]
def test_prune_influence_map():
kin = Agent('Kinase', db_refs={'HGNC': '1'})
phos = Agent('Phosphatase', db_refs={'HGNC': '2'})
subs = Agent('Substrate', db_refs={'HGNC': '3'})
st1 = Phosphorylation(kin, subs)
st2 = Dephosphorylation(phos, subs)
pa = PysbAssembler()
pa.add_statements([st1, st2])
pa.make_model(policies='one_step')
mc = ModelChecker(pa.model, [st1])
im = mc.get_im()
remove_im_params(pa.model, im)
mc.prune_influence_map()
im = mc.get_im()
assert len(im.nodes()) == 3
assert len(im.edges()) == 2
# Smoke test to make sure drawing works
mc.draw_im(os.devnull)
def test_prune_influence_map_subj_obj():
def ag(gene_name):
return Agent(gene_name,
db_refs={'HGNC': hgnc_client.get_hgnc_id(gene_name)})
mek = ag('MAP2K1')
erk = ag('MAPK1')
mek2 = ag('MAP2K2')
s1 = Influence(mek, erk)
s2 = Influence(mek2, erk, obj_delta={'polarity': -1})
s3 = Influence(erk, mek2, obj_delta={'polarity': -1})
# To check:
s4 = Influence(mek, mek2)
# Make the model
pa = PysbAssembler()
pa.add_statements([s1, s2, s3])
model = pa.make_model()
# Check the model
mc = ModelChecker(model, [s4])
pr_before = mc.check_statement(s4)
assert pr_before.result_code == 'PATHS_FOUND'
# Now prune the influence map
mc.prune_influence_map()
mc.prune_influence_map_subj_obj()
pr_after = mc.check_statement(s4)
assert pr_after.result_code == 'NO_PATHS_FOUND'
def test_weighted_sampling1():
"""Test sampling with different path lengths but no data."""
os.environ['TEST_FLAG'] = 'TRUE'
mc = ModCondition('phosphorylation')
braf = Agent('BRAF', db_refs={'HGNC': '1097'})
map2k1 = Agent('MAP2K1', db_refs={'HGNC': '6840'})
map2k1_phos = Agent('MAP2K1', mods=[mc], db_refs={'HGNC': '6840'})
mapk1 = Agent('MAPK1', db_refs={'HGNC': '6871'})
mapk1_phos = Agent('MAPK1', mods=[mc], db_refs={'HGNC': '6871'})
jun = Agent('JUN', db_refs={'HGNC': '6204'})
stmt_to_check = Phosphorylation(braf, jun)
stmts = [stmt_to_check,
Phosphorylation(braf, map2k1),
Phosphorylation(map2k1_phos, jun),
Phosphorylation(map2k1_phos, mapk1),
Phosphorylation(mapk1_phos, jun)]
# Make model
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model(policies='one_step')
# Make the model checker and prune the influence map
mc = ModelChecker(pa.model, [stmt_to_check], do_sampling=True, seed=1)
mc.prune_influence_map()
# Seed the random number generator
np.random.seed(1)
results = mc.check_model(max_path_length=5, max_paths=100)
assert type(results) == list
assert len(results) == 1
stmt_tuple = results[0]
assert len(stmt_tuple) == 2
assert stmt_tuple[0] == stmt_to_check
path_result = stmt_tuple[1]
assert type(path_result) == PathResult
path_lengths = [len(p) for p in path_result.paths]
assert max(path_lengths) <= 5
# There are two distinct paths
assert len(set(path_result.paths)) == 3
path_ctr = Counter(path_result.paths)
assert path_ctr[(('BRAF_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))] == 46, path_ctr
assert path_ctr[(('BRAF_phosphorylation_MAP2K1_phospho', 1),
('MAP2K1_phospho_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))] == 22, path_ctr
assert path_ctr[(('BRAF_phosphorylation_MAP2K1_phospho', 1),
('MAP2K1_phospho_phosphorylation_MAPK1_phospho', 1),
('MAPK1_phospho_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))] == 32, path_ctr
def test_weighted_sampling2():
"""Test sampling with abundances but no tail probabilities from data."""
os.environ['TEST_FLAG'] = 'TRUE'
map2k1 = Agent('MAP2K1', db_refs={'HGNC': '6840'})
mapk1 = Agent('MAPK1', db_refs={'HGNC': '6871'})
mapk3 = Agent('MAPK3', db_refs={'HGNC': '6877'})
mc = ModCondition('phosphorylation')
mapk1_phos = Agent('MAPK1', mods=[mc], db_refs={'HGNC': '6871'})
mapk3_phos = Agent('MAPK3', mods=[mc], db_refs={'HGNC': '6877'})
jun = Agent('JUN', db_refs={'HGNC': '6204'})
st1 = Phosphorylation(map2k1, mapk1)
st2 = Phosphorylation(map2k1, mapk3)
st3 = Phosphorylation(mapk1_phos, jun)
st4 = Phosphorylation(mapk3_phos, jun)
stmt_to_check = Phosphorylation(map2k1, jun)
# Make model
pa = PysbAssembler()
pa.add_statements([st1, st2, st3, st4])
pa.make_model(policies='one_step')
# Set the initial conditions
mapk1_monomer = pa.model.all_components()['MAPK1']
mapk3_monomer = pa.model.all_components()['MAPK3']
set_base_initial_condition(pa.model, mapk1_monomer, 75)
set_base_initial_condition(pa.model, mapk3_monomer, 25)
# Make the model checker and prune the influence map
# Setting do_sampling to False should yield the default enumeration
# behavior
mc = ModelChecker(pa.model, [stmt_to_check], do_sampling=False)
mc.prune_influence_map()
results = mc.check_model(max_paths=5)
path_result = results[0][1]
assert len(path_result.paths) == 2
enum_paths = path_result.paths
# Now, try sampling
mc = ModelChecker(pa.model, [stmt_to_check], do_sampling=True, seed=1)
mc.prune_influence_map()
results = mc.check_model(max_path_length=5, max_paths=1000)
assert type(results) == list
assert len(results) == 1
stmt_tuple = results[0]
assert len(stmt_tuple) == 2
assert stmt_tuple[0] == stmt_to_check
path_result = stmt_tuple[1]
assert type(path_result) == PathResult
path_lengths = [len(p) for p in path_result.paths]
assert max(path_lengths) <= 5
# There are two distinct paths
assert set(enum_paths) == set(path_result.paths)
path_ctr = Counter(path_result.paths)
mapk1_count = path_ctr[(('MAP2K1_phosphorylation_MAPK1_phospho', 1),
('MAPK1_phospho_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))]
mapk3_count = path_ctr[(('MAP2K1_phosphorylation_MAPK3_phospho', 1),
('MAPK3_phospho_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))]
assert mapk1_count == 750, mapk1_count
assert mapk3_count == 250, mapk3_count
def test_weighted_sampling3():
"Test sampling with normed abundances but no tail probabilities from data."
# Abundances are normalized across rule instances involving the same gene.
os.environ['TEST_FLAG'] = 'TRUE'
map2k1 = Agent('MAP2K1', db_refs={'HGNC': '6840'})
mapk1 = Agent('MAPK1', db_refs={'HGNC': '6871'})
mapk3 = Agent('MAPK3', db_refs={'HGNC': '6877'})
jun = Agent('JUN', db_refs={'HGNC': '6204'})
mapk1_p218 = Agent('MAPK1',
mods=[ModCondition('phosphorylation', 'S', '218')],
db_refs={'HGNC': '6871'})
mapk1_p222 = Agent('MAPK1',
mods=[ModCondition('phosphorylation', 'S', '222')],
db_refs={'HGNC': '6871'})
mapk3_phos = Agent('MAPK3',
mods=[ModCondition('phosphorylation')],
db_refs={'HGNC': '6877'})
st1 = Phosphorylation(map2k1, mapk3)
st2 = Phosphorylation(map2k1, mapk1, 'S', '218')
st3 = Phosphorylation(map2k1, mapk1, 'S', '222')
st4 = Phosphorylation(mapk3_phos, jun)
st5 = Phosphorylation(mapk1_p218, jun)
st6 = Phosphorylation(mapk1_p222, jun)
stmt_to_check = Phosphorylation(map2k1, jun)
# Make model
pa = PysbAssembler()
pa.add_statements([st1, st2, st3, st4, st5, st6])
pa.make_model(policies='one_step')
# Set the initial conditions
mapk1_monomer = pa.model.all_components()['MAPK1']
mapk3_monomer = pa.model.all_components()['MAPK3']
set_base_initial_condition(pa.model, mapk1_monomer, 50)
set_base_initial_condition(pa.model, mapk3_monomer, 50)
# Do sampling
mc = ModelChecker(pa.model, [stmt_to_check], do_sampling=True, seed=1)
mc.prune_influence_map()
results = mc.check_model(max_path_length=5, max_paths=100)
assert type(results) == list
assert len(results) == 1
stmt_tuple = results[0]
assert len(stmt_tuple) == 2
assert stmt_tuple[0] == stmt_to_check
path_result = stmt_tuple[1]
assert type(path_result) == PathResult
path_lengths = [len(p) for p in path_result.paths]
assert max(path_lengths) <= 5
# There are two distinct paths
path_ctr = Counter(path_result.paths)
assert len(path_ctr) == 3
assert path_ctr[(('MAP2K1_phosphorylation_MAPK3_phospho', 1),
('MAPK3_phospho_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))] == 49, path_ctr
assert path_ctr[(('MAP2K1_phosphorylation_MAPK1_S218', 1),
('MAPK1_phosphoS218_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))] == 31, path_ctr
assert path_ctr[(('MAP2K1_phosphorylation_MAPK1_S222', 1),
('MAPK1_phosphoS222_phosphorylation_JUN_phospho', 1),
('JUN_phospho_p_obs', 1))] == 20, path_ctr
if __name__ == '__main__':
test_prune_influence_map_subj_obj()
# TODO Add tests for autophosphorylation
# TODO Add test for transphosphorylation
# FIXME: Issue: Increasing kinase activity doesn't make it capable of executing
# phosphorylation statements
# FIXME Issue increase activity (generic) doesn't make something capable of
# executing phospho (or other statements)
# Goal: Be able to check generic phosphorylations against specific rules
# and vice versa.
# 0. (('T185', 'p'),('Y187','p'), ...,), 'kinase', 'is_active')
# 1. (('T185', 'p'),('Y187','p'), ...,), 'kinase', 'is_inactive')
# 2. How to figure out that 'kinase'
# 1. Add activity states to agents, and have check_activation work with
# grounded monomers
# 2. Also need activity observable matching to account for different types of
# activity, so that different types of activity can be checked
# 2. Need to be able to annotate specific site/state combinations as
# active forms
# 3. Need to make grounded_mp generation work with MutConditions and
# bound conditions (bound conditions would need to check for bonds to
# 4. Grounded monomer patterns for check_activation
# 5. Check for complexes (using contact map? Or express the complex as an
# observable and look for paths?
# Issues--if a rule activity isn't contingent on a particular mod,
# then were will be no causal connection between any upstream modifying
# rules and the downstream rule.
# Active Form vs. Conditions on enzymes
# Identify conflicting ModConditions?
# Refactor out active forms as a preassembly step
# Add "active" into assembled Pysb models as a requirement of every enzyme,
# and add ActiveForms in as rules in the model. This has the advantage of
# working when the activeform is not known (allows tying together activation
# and mods). Problem is what to do with the rules that have mod conditions
# already--add active in? Active flag approach has the advantage of a single
# role for a each substrate, which (I think) would prevent an explosion of
# paths
# Simplest thing: take any rule where the enzyme has no conditions on it
# and replace it with a comparable rule for each activeform.
# Create rules for each active form
# So what's needed is an assembly procedure where an active form is applied
# across all rules for which that protein is the enzyme.
# Need to know which agent is the "enzyme" in rules, so that these can
# be prioritized, and activeforms applied
# Apply weights based on evidence/belief scores;
# Apply weights based on positive
# Another issue--doesn't know that RAF1(phospho='p') should be satisfied
# by RAF1(S259='p'). A big problem, even after pre-assembly--because longer
# paths where a 'phospho' is the observable will never be satisfied.
# Does this mean that we need a PySB ComplexPattern -> Agent mapping, that
# we can subsequently use for refinements?
# Need to handle complex statements. Would show that one_step approach
# would not satisfy constraint, but two-step approach could, where the
# Complex information was specified.
# Can probably handle all modifications in a generic function.
# Then need to handle: Complex, Dephosphorylation.
# Then Gef/Gap?
# Then Activation/ActiveForm.
# Get the stuff from databases involving the canonical proteins,
# and show that a simple model satisfies it.
# Try to build the model using natural language?
#
# By tying molecules to biological processes, we can even check that
# these types of high-level observations are satisfied.
#
# Need to handle case where Phosphorylation site is not specified by
# statement, but is actually handled in the model (i.e., need to know
# that a particular site name and state corresponds to a phosphorylation.
# Points to need to have an additional data structure annotating agents,
# sites, states.
#
# Need to handle embeddings of complex patterns where sites can have both
# modification state and bonds
#
# Need to handle reversible rules!
#
# Should probably build in some way of returning the paths found
#
# Save all the paths that a particular rule is on--then if you're wondering
# why it's in the model, you look at all of the statements for which that
# rule provides a path .
#
# When Ras machine finds a new finding, it can be checked to see if it's
# satisfied by the model.
|
adieu/authentic2 | refs/heads/master | authentic2/hashers.py | 3 | import hashlib
import base64
import math
from django.contrib.auth import hashers
from django.utils.crypto import constant_time_compare
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_noop as _
from django.utils.encoding import force_bytes
class Drupal7PasswordHasher(hashers.BasePasswordHasher):
"""
Secure password hashing using the algorithm used by Drupal 7 (recommended)
"""
algorithm = "drupal7_sha512"
iterations = 10000
digest = hashlib.sha512
alphabet = './0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
def atoi64(self, v):
return self.alphabet.find(v)
def i64toa(self, v):
return self.alphabet[v]
def b64encode(self, v):
out = ''
count = len(v)
i = 0
while i < count:
value = ord(v[i])
i += 1
out += self.i64toa(value & 0x3f)
if i < count:
value |= ord(v[i]) << 8
out += self.i64toa((value >> 6) & 0x3f)
if i == count:
break
i += 1
if i < count:
value |= ord(v[i]) << 16
out += self.i64toa((value >> 12) & 0x3f)
if i == count:
break
i += 1
out += self.i64toa((value >> 18) & 0x3f)
return out
def from_drupal(self, encoded):
ident, log_count, salt, h = encoded[:3], encoded[3], encoded[4:12], encoded[12:]
if ident != '$S$':
raise ValueError('Not a Drupal7 SHA-512 hashed password')
count = 1 << self.atoi64(log_count)
return '%s$%s$%s$%s' % (self.algorithm, count, salt, h)
def to_drupal(self, encoded):
algo, count, salt, h = encoded.split('$', 3)
count = self.atoi64(math.ceil(math.log(count, 2)))
return '$S$%s%s%s' % (count, salt, h)
def encode(self, password, salt, iterations):
assert password
assert salt and '$' not in salt
h = salt
password = force_bytes(password)
for i in xrange(iterations+1):
h = self.digest(h + password).digest()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, self.b64encode(h)[:43])
def verify(self, password, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt, int(iterations))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('iterations'), iterations),
(_('salt'), hashers.mask_hash(salt)),
(_('hash'), hashers.mask_hash(hash)),
])
class CommonPasswordHasher(hashers.BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = None
digest = None
def encode(self, password, salt):
assert password
assert '$' not in salt
hash = self.digest(force_bytes(salt + password)).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), hashers.mask_hash(salt, show=2)),
(_('hash'), hashers.mask_hash(hash)),
])
class SHA256PasswordHasher(CommonPasswordHasher):
algorithm = 'sha256'
digest = hashlib.sha256
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.