repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
ndingwall/scikit-learn
|
refs/heads/master
|
sklearn/gaussian_process/_gpr.py
|
2
|
"""Gaussian processes regression."""
# Authors: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>
# Modified by: Pete Green <p.l.green@liverpool.ac.uk>
# License: BSD 3 clause
import warnings
from operator import itemgetter
import numpy as np
from scipy.linalg import cholesky, cho_solve, solve_triangular
import scipy.optimize
from ..base import BaseEstimator, RegressorMixin, clone
from ..base import MultiOutputMixin
from .kernels import RBF, ConstantKernel as C
from ..utils import check_random_state
from ..utils.validation import check_array
from ..utils.optimize import _check_optimize_result
from ..utils.validation import _deprecate_positional_args
class GaussianProcessRegressor(MultiOutputMixin,
RegressorMixin, BaseEstimator):
"""Gaussian process regression (GPR).
The implementation is based on Algorithm 2.1 of Gaussian Processes
for Machine Learning (GPML) by Rasmussen and Williams.
In addition to standard scikit-learn estimator API,
GaussianProcessRegressor:
* allows prediction without prior fitting (based on the GP prior)
* provides an additional method sample_y(X), which evaluates samples
drawn from the GPR (prior or posterior) at given inputs
* exposes a method log_marginal_likelihood(theta), which can be used
externally for other ways of selecting hyperparameters, e.g., via
Markov chain Monte Carlo.
Read more in the :ref:`User Guide <gaussian_process>`.
.. versionadded:: 0.18
Parameters
----------
kernel : kernel instance, default=None
The kernel specifying the covariance function of the GP. If None is
passed, the kernel ``ConstantKernel(1.0, constant_value_bounds="fixed"
* RBF(1.0, length_scale_bounds="fixed")`` is used as default. Note that
the kernel hyperparameters are optimized during fitting unless the
bounds are marked as "fixed".
alpha : float or ndarray of shape (n_samples,), default=1e-10
Value added to the diagonal of the kernel matrix during fitting.
This can prevent a potential numerical issue during fitting, by
ensuring that the calculated values form a positive definite matrix.
It can also be interpreted as the variance of additional Gaussian
measurement noise on the training observations. Note that this is
different from using a `WhiteKernel`. If an array is passed, it must
have the same number of entries as the data used for fitting and is
used as datapoint-dependent noise level. Allowing to specify the
noise level directly as a parameter is mainly for convenience and
for consistency with Ridge.
optimizer : "fmin_l_bfgs_b" or callable, default="fmin_l_bfgs_b"
Can either be one of the internally supported optimizers for optimizing
the kernel's parameters, specified by a string, or an externally
defined optimizer passed as a callable. If a callable is passed, it
must have the signature::
def optimizer(obj_func, initial_theta, bounds):
# * 'obj_func' is the objective function to be minimized, which
# takes the hyperparameters theta as parameter and an
# optional flag eval_gradient, which determines if the
# gradient is returned additionally to the function value
# * 'initial_theta': the initial value for theta, which can be
# used by local optimizers
# * 'bounds': the bounds on the values of theta
....
# Returned are the best found hyperparameters theta and
# the corresponding value of the target function.
return theta_opt, func_min
Per default, the 'L-BGFS-B' algorithm from scipy.optimize.minimize
is used. If None is passed, the kernel's parameters are kept fixed.
Available internal optimizers are::
'fmin_l_bfgs_b'
n_restarts_optimizer : int, default=0
The number of restarts of the optimizer for finding the kernel's
parameters which maximize the log-marginal likelihood. The first run
of the optimizer is performed from the kernel's initial parameters,
the remaining ones (if any) from thetas sampled log-uniform randomly
from the space of allowed theta-values. If greater than 0, all bounds
must be finite. Note that n_restarts_optimizer == 0 implies that one
run is performed.
normalize_y : bool, default=False
Whether the target values y are normalized, the mean and variance of
the target values are set equal to 0 and 1 respectively. This is
recommended for cases where zero-mean, unit-variance priors are used.
Note that, in this implementation, the normalisation is reversed
before the GP predictions are reported.
.. versionchanged:: 0.23
copy_X_train : bool, default=True
If True, a persistent copy of the training data is stored in the
object. Otherwise, just a reference to the training data is stored,
which might cause predictions to change if the data is modified
externally.
random_state : int, RandomState instance or None, default=None
Determines random number generation used to initialize the centers.
Pass an int for reproducible results across multiple function calls.
See :term: `Glossary <random_state>`.
Attributes
----------
X_train_ : array-like of shape (n_samples, n_features) or list of object
Feature vectors or other representations of training data (also
required for prediction).
y_train_ : array-like of shape (n_samples,) or (n_samples, n_targets)
Target values in training data (also required for prediction)
kernel_ : kernel instance
The kernel used for prediction. The structure of the kernel is the
same as the one passed as parameter but with optimized hyperparameters
L_ : array-like of shape (n_samples, n_samples)
Lower-triangular Cholesky decomposition of the kernel in ``X_train_``
alpha_ : array-like of shape (n_samples,)
Dual coefficients of training data points in kernel space
log_marginal_likelihood_value_ : float
The log-marginal-likelihood of ``self.kernel_.theta``
Examples
--------
>>> from sklearn.datasets import make_friedman2
>>> from sklearn.gaussian_process import GaussianProcessRegressor
>>> from sklearn.gaussian_process.kernels import DotProduct, WhiteKernel
>>> X, y = make_friedman2(n_samples=500, noise=0, random_state=0)
>>> kernel = DotProduct() + WhiteKernel()
>>> gpr = GaussianProcessRegressor(kernel=kernel,
... random_state=0).fit(X, y)
>>> gpr.score(X, y)
0.3680...
>>> gpr.predict(X[:2,:], return_std=True)
(array([653.0..., 592.1...]), array([316.6..., 316.6...]))
"""
@_deprecate_positional_args
def __init__(self, kernel=None, *, alpha=1e-10,
optimizer="fmin_l_bfgs_b", n_restarts_optimizer=0,
normalize_y=False, copy_X_train=True, random_state=None):
self.kernel = kernel
self.alpha = alpha
self.optimizer = optimizer
self.n_restarts_optimizer = n_restarts_optimizer
self.normalize_y = normalize_y
self.copy_X_train = copy_X_train
self.random_state = random_state
def fit(self, X, y):
"""Fit Gaussian process regression model.
Parameters
----------
X : array-like of shape (n_samples, n_features) or list of object
Feature vectors or other representations of training data.
y : array-like of shape (n_samples,) or (n_samples, n_targets)
Target values
Returns
-------
self : returns an instance of self.
"""
if self.kernel is None: # Use an RBF kernel as default
self.kernel_ = C(1.0, constant_value_bounds="fixed") \
* RBF(1.0, length_scale_bounds="fixed")
else:
self.kernel_ = clone(self.kernel)
self._rng = check_random_state(self.random_state)
if self.kernel_.requires_vector_input:
X, y = self._validate_data(X, y, multi_output=True, y_numeric=True,
ensure_2d=True, dtype="numeric")
else:
X, y = self._validate_data(X, y, multi_output=True, y_numeric=True,
ensure_2d=False, dtype=None)
# Normalize target value
if self.normalize_y:
self._y_train_mean = np.mean(y, axis=0)
self._y_train_std = np.std(y, axis=0)
# Remove mean and make unit variance
y = (y - self._y_train_mean) / self._y_train_std
else:
self._y_train_mean = np.zeros(1)
self._y_train_std = 1
if np.iterable(self.alpha) \
and self.alpha.shape[0] != y.shape[0]:
if self.alpha.shape[0] == 1:
self.alpha = self.alpha[0]
else:
raise ValueError("alpha must be a scalar or an array"
" with same number of entries as y.(%d != %d)"
% (self.alpha.shape[0], y.shape[0]))
self.X_train_ = np.copy(X) if self.copy_X_train else X
self.y_train_ = np.copy(y) if self.copy_X_train else y
if self.optimizer is not None and self.kernel_.n_dims > 0:
# Choose hyperparameters based on maximizing the log-marginal
# likelihood (potentially starting from several initial values)
def obj_func(theta, eval_gradient=True):
if eval_gradient:
lml, grad = self.log_marginal_likelihood(
theta, eval_gradient=True, clone_kernel=False)
return -lml, -grad
else:
return -self.log_marginal_likelihood(theta,
clone_kernel=False)
# First optimize starting from theta specified in kernel
optima = [(self._constrained_optimization(obj_func,
self.kernel_.theta,
self.kernel_.bounds))]
# Additional runs are performed from log-uniform chosen initial
# theta
if self.n_restarts_optimizer > 0:
if not np.isfinite(self.kernel_.bounds).all():
raise ValueError(
"Multiple optimizer restarts (n_restarts_optimizer>0) "
"requires that all bounds are finite.")
bounds = self.kernel_.bounds
for iteration in range(self.n_restarts_optimizer):
theta_initial = \
self._rng.uniform(bounds[:, 0], bounds[:, 1])
optima.append(
self._constrained_optimization(obj_func, theta_initial,
bounds))
# Select result from run with minimal (negative) log-marginal
# likelihood
lml_values = list(map(itemgetter(1), optima))
self.kernel_.theta = optima[np.argmin(lml_values)][0]
self.kernel_._check_bounds_params()
self.log_marginal_likelihood_value_ = -np.min(lml_values)
else:
self.log_marginal_likelihood_value_ = \
self.log_marginal_likelihood(self.kernel_.theta,
clone_kernel=False)
# Precompute quantities required for predictions which are independent
# of actual query points
K = self.kernel_(self.X_train_)
K[np.diag_indices_from(K)] += self.alpha
try:
self.L_ = cholesky(K, lower=True) # Line 2
# self.L_ changed, self._K_inv needs to be recomputed
self._K_inv = None
except np.linalg.LinAlgError as exc:
exc.args = ("The kernel, %s, is not returning a "
"positive definite matrix. Try gradually "
"increasing the 'alpha' parameter of your "
"GaussianProcessRegressor estimator."
% self.kernel_,) + exc.args
raise
self.alpha_ = cho_solve((self.L_, True), self.y_train_) # Line 3
return self
def predict(self, X, return_std=False, return_cov=False):
"""Predict using the Gaussian process regression model
We can also predict based on an unfitted model by using the GP prior.
In addition to the mean of the predictive distribution, also its
standard deviation (return_std=True) or covariance (return_cov=True).
Note that at most one of the two can be requested.
Parameters
----------
X : array-like of shape (n_samples, n_features) or list of object
Query points where the GP is evaluated.
return_std : bool, default=False
If True, the standard-deviation of the predictive distribution at
the query points is returned along with the mean.
return_cov : bool, default=False
If True, the covariance of the joint predictive distribution at
the query points is returned along with the mean.
Returns
-------
y_mean : ndarray of shape (n_samples, [n_output_dims])
Mean of predictive distribution a query points.
y_std : ndarray of shape (n_samples,), optional
Standard deviation of predictive distribution at query points.
Only returned when `return_std` is True.
y_cov : ndarray of shape (n_samples, n_samples), optional
Covariance of joint predictive distribution a query points.
Only returned when `return_cov` is True.
"""
if return_std and return_cov:
raise RuntimeError(
"Not returning standard deviation of predictions when "
"returning full covariance.")
if self.kernel is None or self.kernel.requires_vector_input:
X = check_array(X, ensure_2d=True, dtype="numeric")
else:
X = check_array(X, ensure_2d=False, dtype=None)
if not hasattr(self, "X_train_"): # Unfitted;predict based on GP prior
if self.kernel is None:
kernel = (C(1.0, constant_value_bounds="fixed") *
RBF(1.0, length_scale_bounds="fixed"))
else:
kernel = self.kernel
y_mean = np.zeros(X.shape[0])
if return_cov:
y_cov = kernel(X)
return y_mean, y_cov
elif return_std:
y_var = kernel.diag(X)
return y_mean, np.sqrt(y_var)
else:
return y_mean
else: # Predict based on GP posterior
K_trans = self.kernel_(X, self.X_train_)
y_mean = K_trans.dot(self.alpha_) # Line 4 (y_mean = f_star)
# undo normalisation
y_mean = self._y_train_std * y_mean + self._y_train_mean
if return_cov:
v = cho_solve((self.L_, True), K_trans.T) # Line 5
y_cov = self.kernel_(X) - K_trans.dot(v) # Line 6
# undo normalisation
y_cov = y_cov * self._y_train_std**2
return y_mean, y_cov
elif return_std:
# cache result of K_inv computation
if self._K_inv is None:
# compute inverse K_inv of K based on its Cholesky
# decomposition L and its inverse L_inv
L_inv = solve_triangular(self.L_.T,
np.eye(self.L_.shape[0]))
self._K_inv = L_inv.dot(L_inv.T)
# Compute variance of predictive distribution
y_var = self.kernel_.diag(X)
y_var -= np.einsum("ij,ij->i",
np.dot(K_trans, self._K_inv), K_trans)
# Check if any of the variances is negative because of
# numerical issues. If yes: set the variance to 0.
y_var_negative = y_var < 0
if np.any(y_var_negative):
warnings.warn("Predicted variances smaller than 0. "
"Setting those variances to 0.")
y_var[y_var_negative] = 0.0
# undo normalisation
y_var = y_var * self._y_train_std**2
return y_mean, np.sqrt(y_var)
else:
return y_mean
def sample_y(self, X, n_samples=1, random_state=0):
"""Draw samples from Gaussian process and evaluate at X.
Parameters
----------
X : array-like of shape (n_samples, n_features) or list of object
Query points where the GP is evaluated.
n_samples : int, default=1
The number of samples drawn from the Gaussian process
random_state : int, RandomState instance or None, default=0
Determines random number generation to randomly draw samples.
Pass an int for reproducible results across multiple function
calls.
See :term: `Glossary <random_state>`.
Returns
-------
y_samples : ndarray of shape (n_samples_X, [n_output_dims], n_samples)
Values of n_samples samples drawn from Gaussian process and
evaluated at query points.
"""
rng = check_random_state(random_state)
y_mean, y_cov = self.predict(X, return_cov=True)
if y_mean.ndim == 1:
y_samples = rng.multivariate_normal(y_mean, y_cov, n_samples).T
else:
y_samples = \
[rng.multivariate_normal(y_mean[:, i], y_cov,
n_samples).T[:, np.newaxis]
for i in range(y_mean.shape[1])]
y_samples = np.hstack(y_samples)
return y_samples
def log_marginal_likelihood(self, theta=None, eval_gradient=False,
clone_kernel=True):
"""Returns log-marginal likelihood of theta for training data.
Parameters
----------
theta : array-like of shape (n_kernel_params,) default=None
Kernel hyperparameters for which the log-marginal likelihood is
evaluated. If None, the precomputed log_marginal_likelihood
of ``self.kernel_.theta`` is returned.
eval_gradient : bool, default=False
If True, the gradient of the log-marginal likelihood with respect
to the kernel hyperparameters at position theta is returned
additionally. If True, theta must not be None.
clone_kernel : bool, default=True
If True, the kernel attribute is copied. If False, the kernel
attribute is modified, but may result in a performance improvement.
Returns
-------
log_likelihood : float
Log-marginal likelihood of theta for training data.
log_likelihood_gradient : ndarray of shape (n_kernel_params,), optional
Gradient of the log-marginal likelihood with respect to the kernel
hyperparameters at position theta.
Only returned when eval_gradient is True.
"""
if theta is None:
if eval_gradient:
raise ValueError(
"Gradient can only be evaluated for theta!=None")
return self.log_marginal_likelihood_value_
if clone_kernel:
kernel = self.kernel_.clone_with_theta(theta)
else:
kernel = self.kernel_
kernel.theta = theta
if eval_gradient:
K, K_gradient = kernel(self.X_train_, eval_gradient=True)
else:
K = kernel(self.X_train_)
K[np.diag_indices_from(K)] += self.alpha
try:
L = cholesky(K, lower=True) # Line 2
except np.linalg.LinAlgError:
return (-np.inf, np.zeros_like(theta)) \
if eval_gradient else -np.inf
# Support multi-dimensional output of self.y_train_
y_train = self.y_train_
if y_train.ndim == 1:
y_train = y_train[:, np.newaxis]
alpha = cho_solve((L, True), y_train) # Line 3
# Compute log-likelihood (compare line 7)
log_likelihood_dims = -0.5 * np.einsum("ik,ik->k", y_train, alpha)
log_likelihood_dims -= np.log(np.diag(L)).sum()
log_likelihood_dims -= K.shape[0] / 2 * np.log(2 * np.pi)
log_likelihood = log_likelihood_dims.sum(-1) # sum over dimensions
if eval_gradient: # compare Equation 5.9 from GPML
tmp = np.einsum("ik,jk->ijk", alpha, alpha) # k: output-dimension
tmp -= cho_solve((L, True), np.eye(K.shape[0]))[:, :, np.newaxis]
# Compute "0.5 * trace(tmp.dot(K_gradient))" without
# constructing the full matrix tmp.dot(K_gradient) since only
# its diagonal is required
log_likelihood_gradient_dims = \
0.5 * np.einsum("ijl,jik->kl", tmp, K_gradient)
log_likelihood_gradient = log_likelihood_gradient_dims.sum(-1)
if eval_gradient:
return log_likelihood, log_likelihood_gradient
else:
return log_likelihood
def _constrained_optimization(self, obj_func, initial_theta, bounds):
if self.optimizer == "fmin_l_bfgs_b":
opt_res = scipy.optimize.minimize(
obj_func, initial_theta, method="L-BFGS-B", jac=True,
bounds=bounds)
_check_optimize_result("lbfgs", opt_res)
theta_opt, func_min = opt_res.x, opt_res.fun
elif callable(self.optimizer):
theta_opt, func_min = \
self.optimizer(obj_func, initial_theta, bounds=bounds)
else:
raise ValueError("Unknown optimizer %s." % self.optimizer)
return theta_opt, func_min
def _more_tags(self):
return {'requires_fit': False}
|
wonjunetai/pulse
|
refs/heads/master
|
preprocess/filtermap.py
|
1
|
import time
def p_beg_end(p_start, pend, p_length):
phend = (pend + THRESHOLD / 3 > p_length)
phbeg = (p_start - THRESHOLD / 3 < 1)
return [phbeg, phend]
def m_beg_end(n_start, n_end, n_length, backwards):
if backwards:
temp = n_start
n_start = n_end
n_end = temp
mhend = (n_end + THRESHOLD > n_length)
mhbeg = (n_start - THRESHOLD < 1)
return [mhbeg, mhend]
# CONSTANTS
# TODO: Move these to preprocess_settings.json
THRESHOLD = 9 # nucleotides
PINDENT_THRESHOLD = 95.0
IGNORE_EXCLUSION = False
DEAL_WITH_NUCLEOTIDE = False
def filter_map1(blast_file, rna_seq_output):
read_from = open(blast_file, 'r')
write_to = open(rna_seq_output, 'w')
for line in read_from:
(query_id, subject_id, perc_identity, aln_length, mismatch_count, gap_open_count,
query_start, query_end, subject_start, subject_end, e_val, bit_score) = line.split('\t')
try:
# PARSING ID
snucleotide_lengths = query_id.split("-")[-1].split("=")
nucleotide_lengths = [int(snucleotide_lengths[0]),
int(snucleotide_lengths[1]),
int(snucleotide_lengths[2])]
except:
print 'WARNING WITH' + query_id
p_start = int(subject_start) # /3.0
p_end = int(subject_end) # /3.0
p_length = int(aln_length) # /3.0
if DEAL_WITH_NUCLEOTIDE:
p_start /= 3.0
p_end /= 3.0
p_length /= 3.0
n_start = int(query_start)
n_end = int(query_end)
n_length = sum(nucleotide_lengths)
pident = float(perc_identity)
if query_id[0] == "E":
if IGNORE_EXCLUSION:
continue
n_length -= nucleotide_lengths[1]
to_print = True
backwards = (n_start > n_end)
[hit_p_beg, hit_p_end] = p_beg_end(p_start, p_end, p_length)
[hit_n_beg, hit_n_end] = m_beg_end(n_start, n_end, n_length, backwards)
exist_beg = True
exist_end = True
if not hit_n_beg and not hit_p_beg:
to_print = False
exist_beg = False
if not hit_n_end and not hit_p_end:
to_print = False
exist_end = False
# if not (exist_beg or exist_end):
# print line.strip() + " no_both"
# elif not exist_beg:
# if backwards:
# print line.strip() + " no_c2"
# else:
# print line.strip() + " no_c1"
# elif not exist_end:
# if backwards:
# print line.strip() + " no_c1"
# else:
# print line.strip() + " no_c2"
else:
hit_length = n_end - n_start + 1
if hit_length < 0:
hit_length *= -1
hit_length += 2
if ((hit_length + THRESHOLD) / 3 < p_end - p_start + 1) or (
(hit_length - THRESHOLD) / 3 > p_end - p_start + 1):
to_print = False
# print line.strip() + " missing_chunks"
if pident < PINDENT_THRESHOLD:
to_print = False
# print line.strip() + " pident_failure"
if query_id[0] == "E":
# make sure A site is within
if not backwards:
if not (n_start <= nucleotide_lengths[0] <= n_end):
to_print = False
else:
if not (n_end <= nucleotide_lengths[0] <= n_start):
to_print = False
if to_print:
print >> write_to, line.strip()[0:len(line)]
##########################################################################
def filter_map2(read_from, write_to):
# filters multiple uniprot hits by taking the best one in terms of evalue
read_from = open(read_from, 'r')
write_to = open(write_to, 'w')
old_id = ""
for line in read_from:
tokens = line.split('\t')
# PARSING ID
asid = tokens[0]
_id = asid
if old_id != _id:
print >> write_to, line.strip()
old_id = _id
##########################################################################
def filter_map3(uniprot_fasta):
"""
Returns uniprot_ddbb.
:param uniprot_fasta:
:return:
"""
uniprot_ddbb = {}
seq = ''
uid = '|'
sep = '|'
with open(uniprot_fasta, 'r') as input_file:
for line in input_file:
if line[0] == '>':
uniprot_ddbb[uid] = len(seq)
# PARSING ID
uid = line.split(sep)[1]
seq = ''
# PARSING ID
else:
seq += line.strip()
uniprot_ddbb[uid] = len(seq)
return uniprot_ddbb
##########################################################################
def filter_map4(isoform_fasta):
"""
Load isoform in fasta.
:param isoform_fasta:
:return:
"""
isoforms_ddbb = {}
seq = ''
uid = ''
SEP = '>'
with open(isoform_fasta, 'r') as input_file:
for line in input_file:
if line[0] == '>':
isoforms_ddbb[uid] = len(seq)
# PARSING ID
uid = line.split(SEP)[1].strip().split(";")[0]
seq = ''
# PARSING ID
else:
seq += line.strip()
isoforms_ddbb[uid] = len(seq)
return isoforms_ddbb
##########################################################################
def filter_map5(read_from):
"""
:param read_from:
:return:
"""
# tags on spliced exon indices on query hits using p_start pend n_start and n_end along with A exon positions
# this information is obtained from the splice sequence to uniprot mapping file
read_from = open(read_from, 'r')
has_mapping = {}
for line in read_from:
tokens = line.split('\t')
# PARSING ID
id = tokens[0] # asid
uniprot = tokens[1].split("|")[1]
# PARSING ID
if has_mapping.has_key(id):
has_mapping[id].append(uniprot)
else:
has_mapping[id] = [uniprot]
read_from.close()
return has_mapping
##########################################################################
def filter_map6(read_from, write_to, has_mapping, uniprot_ddbb, isoforms_ddbb):
read_from = open(read_from, 'r')
write_to = open(write_to, 'w')
for line in read_from:
tokens = line.split('\t')
# PARSING ID
_id = tokens[0] # asid
uniprot = tokens[1].split("|")[1]
# PARSING ID
snucleotide_lengths = tokens[0].split("-")[-1].split("=")
nucleotide_lengths = [int(snucleotide_lengths[0]), int(snucleotide_lengths[1]), int(snucleotide_lengths[2])]
p_start = int(tokens[8])
p_end = int(tokens[9])
n_start = int(tokens[6])
n_end = int(tokens[7])
if n_start < n_end: # these could be negative
c1_hit_length = nucleotide_lengths[0] - n_start + 1
c2_hit_length = nucleotide_lengths[2] - (sum(nucleotide_lengths) - n_end + 1)
else:
c1_hit_length = nucleotide_lengths[0] - n_end + 1
c2_hit_length = nucleotide_lengths[2] - (sum(nucleotide_lengths) - n_start + 1)
if c1_hit_length < 0:
c1_hit_length = 0
if c2_hit_length < 0:
c2_hit_length = 0
a_start = p_start + c1_hit_length / 3
a_end = p_end - c2_hit_length / 3
# PARSING ID
# ID HANDLING change it if you need it
transcript_isoform = _id[2:].split(';')[0]
# PARSING ID
try:
if (a_start and a_end) > 0 and a_end >= a_start and "-" not in uniprot:
if _id[0] == "I":
list_prot = []
if ("E" + _id[1:len(_id)]) in has_mapping:
list_prot = has_mapping["E" + _id[1:len(_id)]]
print >> write_to, _id + "\t" + uniprot + "\t" + repr(p_start) + "\t" + repr(a_start) + \
"\t" + repr(a_end) + "\t" + repr(p_end) + "\t" + repr(list_prot) + \
"\t" + str(uniprot_ddbb[uniprot]) + "\t" + str(isoforms_ddbb[transcript_isoform])
else:
list_prot = []
if ("I" + _id[1:len(_id)]) in has_mapping:
list_prot = has_mapping["I" + _id[1:len(_id)]]
print >> write_to, _id + "\t" + uniprot + "\t" + repr(p_start) + "\t" + repr(a_start) + \
"\t" + repr(a_start) + "\t" + repr(p_end) + "\t" + repr(list_prot) + \
"\t" + str(uniprot_ddbb[uniprot]) + "\t" + \
str(isoforms_ddbb[transcript_isoform])
else:
pass
except KeyError, e:
print '%s NOT FOUND ' % str(e)
print 'Please check your Uniprot file or/and your poll of isoforms'
# print id
|
kenshay/ImageScripter
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/encodings/email/__init__.py
|
262
|
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""A package for parsing, handling, and generating email messages."""
__version__ = '4.0.3'
__all__ = [
# Old names
'base64MIME',
'Charset',
'Encoders',
'Errors',
'Generator',
'Header',
'Iterators',
'Message',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEMultipart',
'MIMENonMultipart',
'MIMEText',
'Parser',
'quopriMIME',
'Utils',
'message_from_string',
'message_from_file',
# new names
'base64mime',
'charset',
'encoders',
'errors',
'generator',
'header',
'iterators',
'message',
'mime',
'parser',
'quoprimime',
'utils',
]
# Some convenience routines. Don't import Parser and Message as side-effects
# of importing email since those cascadingly import most of the rest of the
# email package.
def message_from_string(s, *args, **kws):
"""Parse a string into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import Parser
return Parser(*args, **kws).parsestr(s)
def message_from_file(fp, *args, **kws):
"""Read a file and parse its contents into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import Parser
return Parser(*args, **kws).parse(fp)
# Lazy loading to provide name mapping from new-style names (PEP 8 compatible
# email 4.0 module names), to old-style names (email 3.0 module names).
import sys
class LazyImporter(object):
def __init__(self, module_name):
self.__name__ = 'email.' + module_name
def __getattr__(self, name):
__import__(self.__name__)
mod = sys.modules[self.__name__]
self.__dict__.update(mod.__dict__)
return getattr(mod, name)
_LOWERNAMES = [
# email.<old name> -> email.<new name is lowercased old name>
'Charset',
'Encoders',
'Errors',
'FeedParser',
'Generator',
'Header',
'Iterators',
'Message',
'Parser',
'Utils',
'base64MIME',
'quopriMIME',
]
_MIMENAMES = [
# email.MIME<old name> -> email.mime.<new name is lowercased old name>
'Audio',
'Base',
'Image',
'Message',
'Multipart',
'NonMultipart',
'Text',
]
for _name in _LOWERNAMES:
importer = LazyImporter(_name.lower())
sys.modules['email.' + _name] = importer
setattr(sys.modules['email'], _name, importer)
import email.mime
for _name in _MIMENAMES:
importer = LazyImporter('mime.' + _name.lower())
sys.modules['email.MIME' + _name] = importer
setattr(sys.modules['email'], 'MIME' + _name, importer)
setattr(sys.modules['email.mime'], _name, importer)
|
JackDandy/SickGear
|
refs/heads/master
|
sickbeard/providers/ettv.py
|
2
|
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
from . import generic
from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
from _23 import unidecode
from six import iteritems
class ETTVProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'ETTV')
self.url_home = ['https://www.ettvdl.com/']
self.url_vars = {'browse': 'torrents.php?%s&search=%s&sort=id&order=desc',
'search': 'torrents-search.php?%s&search=%s&sort=id&order=desc'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s',
'browse': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s'}
self.url_drop = ['http://et']
self.categories = {'Season': [7], 'Episode': [41, 5, 50, 72, 77]}
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.minseed, self.minleech = 2 * [None]
@staticmethod
def _has_signature(data=None):
return data and re.search(r'(?i)(?:ettv)', data)
def _search_provider(self, search_params, **kwargs):
results = []
if not self.url:
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'torrent/'})])
for mode in search_params:
for search_string in search_params[mode]:
search_string = unidecode(search_string)
search_url = self.urls[('browse', 'search')['Cache' != mode]] % (
self._categories_string(mode), search_string)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, parse_only=dict(table={'class': (lambda at: at and 'table' in at)})) as tbl:
tbl_rows = [] if not tbl else tbl.find_all('tr')
if not len(tbl_rows):
raise generic.HaltParseException
head = None
for tr in tbl_rows[1:]:
cells = tr.find_all('td')
if 6 > len(cells):
continue
try:
head = head if None is not head else self._header_row(
tr, {'seed': r'seed', 'leech': r'leech', 'size': r'^size'})
seeders, leechers, size = [try_int(n, n) for n in [
cells[head[x]].get_text().strip() for x in ('seed', 'leech', 'size')]]
if self._reject_item(seeders, leechers):
continue
info = tr.find('a', href=rc['info'])
title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self._link(info.get('href'))
except (AttributeError, TypeError, ValueError, IndexError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
if len(items[mode]):
break
results = self._sort_seeding(mode, results + items[mode])
return results
@staticmethod
def change_params(params):
for x, types in enumerate(params):
for y, ep_type in enumerate(types):
new_list = []
for t in params[0].get('Episode') or params[0].get('Season', []):
t = t.replace(' ', '.')
new_list += ['%2B ' + t, t]
params[x][ep_type] = new_list
return params
def _season_strings(self, ep_obj, **kwargs):
return self.change_params(super(ETTVProvider, self)._season_strings(ep_obj, **kwargs))
def _episode_strings(self, ep_obj, **kwargs):
return self.change_params(super(ETTVProvider, self)._episode_strings(ep_obj, **kwargs))
def get_data(self, url):
result = None
html = self.get_url(url, timeout=90)
if self.should_skip():
return result
try:
result = re.findall('(?i)"(magnet:[^"]+?)"', html)[0]
except IndexError:
logger.log('Failed no magnet in response', logger.DEBUG)
return result
provider = ETTVProvider()
|
TNick/pyl2extra
|
refs/heads/master
|
pyl2extra/scripts/datasets/tests/test_imagenet.py
|
1
|
"""
Tests for adjusters.
"""
__authors__ = "Nicu Tofan"
__copyright__ = "Copyright 2015, Nicu Tofan"
__credits__ = ["Nicu Tofan"]
__license__ = "3-clause BSD"
__maintainer__ = "Nicu Tofan"
__email__ = "nicu.tofan@gmail.com"
import functools
import unittest
from mock import patch, Mock
import os
import shutil
import tempfile
from xml.dom import minidom
from xml.parsers.expat import ExpatError
from pyl2extra.scripts.datasets import imagenet
TEST_SYNSETS = """
n04386664
n10731013
n03002948
n07609632
n03003091
n10562968
n07586179
n09929577
n07933530
n04136161
n03602194
n03703075
n12990597
"""
RELEASE_STATUS_SAMPLE = """<ReleaseStatus>
<releaseData>fall2011</releaseData>
<images>
<synsetInfos>
<synset wnid="n10801802" released="1" version="winter11" numImages="269"/>
<synset wnid="n10772937" released="1" version="winter11" numImages="58"/>
<synset wnid="n10028541" released="1" version="winter11" numImages="201"/>
<synset wnid="n10712374" released="1" version="winter11" numImages="175"/>
<synset wnid="n09878921" released="1" version="winter11" numImages="46"/>
<synset wnid="n10789415" released="1" version="winter11" numImages="48"/>
<synset wnid="n10370955" released="1" version="winter11" numImages="502"/>
</synsetInfos>
</images>
</ReleaseStatus>"""
GET_MAPPING_SAMPLE = """
n02109150_5962 http://1.jpg
n02109150_5969 http://2.jpg
n02109150_5976 http://3.jpg
n02109150_5981 http://4.jpg
n02109150_307 http://www.scbeacon.com/beacon_issues/03_09_18/images/Guidedog_pjh_091803.jpg
n02109150_323 http://www.braille.be/content/lig_braille/rapport_2005/img_05.jpg
"""
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestListFromUrl(unittest.TestCase):
"""
Tests for list_from_url().
"""
def test_simple(self, mock_urlopen):
"""
testing list_from_url().
"""
mok = Mock()
mok.read.side_effect = ['resp1', 'resp1\nresp2', '', ' a ']
mock_urlopen.return_value = mok
lst = imagenet.list_from_url('some_url')
self.assertListEqual(lst, ['resp1'])
lst = imagenet.list_from_url('some_url')
self.assertListEqual(lst, ['resp1', 'resp2'])
lst = imagenet.list_from_url('some_url')
self.assertListEqual(lst, [''])
lst = imagenet.list_from_url('some_url')
self.assertListEqual(lst, [' a '])
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestDenseListFromUrl(unittest.TestCase):
"""
Tests for dense_list_from_url().
"""
def test_simple(self, mock_urlopen):
"""
testing dense_list_from_url().
"""
mok = Mock()
mok.read.side_effect = ['resp1', 'resp1\nresp2', '',
' ', ' a ', ' a \n b \n c ',
'\n\na\n\nb\n\n c']
mock_urlopen.return_value = mok
lst = imagenet.dense_list_from_url('some_url')
self.assertListEqual(lst, ['resp1'])
lst = imagenet.dense_list_from_url('some_url')
self.assertListEqual(lst, ['resp1', 'resp2'])
lst = imagenet.dense_list_from_url('some_url')
self.assertListEqual(lst, [])
lst = imagenet.dense_list_from_url('some_url')
self.assertListEqual(lst, [])
lst = imagenet.dense_list_from_url('some_url')
self.assertListEqual(lst, ['a'])
lst = imagenet.dense_list_from_url('some_url')
self.assertListEqual(lst, ['a', 'b', 'c'])
lst = imagenet.dense_list_from_url('some_url')
self.assertListEqual(lst, ['a', 'b', 'c'])
class TestXmlElemByPath(unittest.TestCase):
"""
Tests for xml_elem_by_path().
"""
@functools.wraps(unittest.TestCase.setUp)
def setUp(self):
self.doc = minidom.Document()
root = self.doc.createElement('root')
self.doc.appendChild(root)
self.lv1 = self.doc.createElement('level1-1')
root.appendChild(self.lv1)
self.lv11 = self.doc.createElement('level2-1')
self.lv1.appendChild(self.lv11)
lv111 = self.doc.createElement('level3-1')
self.lv11.appendChild(lv111)
root.appendChild(self.doc.createElement('level1-2'))
root.appendChild(self.doc.createElement('level1-3'))
lv4 = self.doc.createElement('level1-4')
root.appendChild(lv4)
@functools.wraps(unittest.TestCase.tearDown)
def tearDown(self):
del self.doc
def test_simple(self):
"""
testing xml_elem_by_path().
"""
elm = imagenet.xml_elem_by_path(self.doc, [])
self.assertEqual(elm, self.doc.documentElement)
self.assertRaises(IndexError, imagenet.xml_elem_by_path,
self.doc, ['nonexisting'])
self.assertRaises(IndexError, imagenet.xml_elem_by_path,
self.doc, ['level1-1', 'nonexisting'])
elm = imagenet.xml_elem_by_path(self.doc, ['level1-1'])
self.assertEqual(elm, self.lv1)
elm = imagenet.xml_elem_by_path(self.doc, ['level1-1', 'level2-1'])
self.assertEqual(elm, self.lv11)
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestXmlFromUrl(unittest.TestCase):
"""
Tests for xml_from_url().
"""
def test_simple(self, mock_urlopen):
"""
testing xml_from_url().
"""
mok = Mock()
mok.read.side_effect = ['<root></root>',
'<root><el>test text</el></root>',
'',
' a ']
mock_urlopen.return_value = mok
doc = imagenet.xml_from_url('some_url')
self.assertEqual(doc.documentElement.tagName, 'root')
doc = imagenet.xml_from_url('some_url')
self.assertEqual(doc.documentElement.tagName, 'root')
self.assertRaises(ExpatError, imagenet.xml_from_url, 'some_url')
self.assertRaises(ExpatError, imagenet.xml_from_url, 'some_url')
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestGetSynsets(unittest.TestCase):
"""
Tests for get_synsets().
"""
def test_simple(self, mock_urlopen):
"""
testing get_synsets().
"""
mok = Mock()
mok.read.side_effect = [TEST_SYNSETS]
mock_urlopen.return_value = mok
lst = imagenet.get_synsets()
self.assertListEqual(lst, ['n04386664', 'n10731013', 'n03002948',
'n07609632', 'n03003091', 'n10562968',
'n07586179', 'n09929577', 'n07933530',
'n04136161', 'n03602194', 'n03703075',
'n12990597'])
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestGetWords(unittest.TestCase):
"""
Tests for get_words().
"""
def test_simple(self, mock_urlopen):
"""
testing get_words().
"""
mok = Mock()
mok.read.side_effect = ["chickeree\nDouglas squirrel\n"
"Tamiasciurus douglasi"]
mock_urlopen.return_value = mok
lst = imagenet.get_words('some_url/%s', 'n07609632')
self.assertListEqual(lst, ['chickeree',
'Douglas squirrel',
'Tamiasciurus douglasi'])
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestGetHypos(unittest.TestCase):
"""
Tests for get_hypos().
"""
@functools.wraps(unittest.TestCase.setUp)
def setUp(self):
pass
@functools.wraps(unittest.TestCase.tearDown)
def tearDown(self):
pass
def test_simple(self, mock_urlopen):
"""
testing get_hypos().
"""
mok = Mock()
mok.read.side_effect = [TEST_SYNSETS]
mock_urlopen.return_value = mok
lst = imagenet.get_hypos('some_url/%s-%s', 'n07609632', True)
self.assertListEqual(lst, ['n04386664', 'n10731013', 'n03002948',
'n07609632', 'n03003091', 'n10562968',
'n07586179', 'n09929577', 'n07933530',
'n04136161', 'n03602194', 'n03703075',
'n12990597'])
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestGetImageCount(unittest.TestCase):
"""
Tests for get_image_count().
"""
@functools.wraps(unittest.TestCase.setUp)
def setUp(self):
self.sample = RELEASE_STATUS_SAMPLE
@functools.wraps(unittest.TestCase.tearDown)
def tearDown(self):
pass
def test_simple(self, mock_urlopen):
"""
testing get_image_count().
"""
mok = Mock()
mok.read.side_effect = [self.sample]
mock_urlopen.return_value = mok
lst = imagenet.get_image_count('some_url', True)
self.assertDictEqual(lst, {'n10801802': 269,
'n10772937': 58,
'n10028541': 201,
'n10712374': 175,
'n09878921': 46,
'n10789415': 48,
'n10370955': 502})
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestGetImageSynsets(unittest.TestCase):
"""
Tests for get_image_synsets().
"""
@functools.wraps(unittest.TestCase.setUp)
def setUp(self):
self.sample = RELEASE_STATUS_SAMPLE
@functools.wraps(unittest.TestCase.tearDown)
def tearDown(self):
pass
def test_simple(self, mock_urlopen):
"""
testing get_image_synsets().
"""
mok = Mock()
mok.read.side_effect = [self.sample]
mock_urlopen.return_value = mok
lst = imagenet.get_image_synsets('some_url', True)
self.assertListEqual(lst, ['n10801802', 'n10772937', 'n10028541',
'n10712374', 'n09878921', 'n10789415',
'n10370955'])
@patch('pyl2extra.scripts.datasets.imagenet.urllib2.urlopen')
class TestGetImageUrls(unittest.TestCase):
"""
Tests for get_image_urls().
"""
@functools.wraps(unittest.TestCase.setUp)
def setUp(self):
pass
@functools.wraps(unittest.TestCase.tearDown)
def tearDown(self):
pass
def test_simple(self, mock_urlopen):
"""
testing get_image_urls().
"""
mok = Mock()
mok.read.side_effect = [GET_MAPPING_SAMPLE]
mock_urlopen.return_value = mok
lst = imagenet.get_image_urls('some_url/%s', 'n02109150')
self.assertDictEqual(lst, {'n02109150_5962': 'http://1.jpg',
'n02109150_5969': 'http://2.jpg',
'n02109150_5976': 'http://3.jpg',
'n02109150_5981': 'http://4.jpg',
'n02109150_307': 'http://www.scbeacon.com/beacon_issues/03_09_18/images/Guidedog_pjh_091803.jpg',
'n02109150_323': 'http://www.braille.be/content/lig_braille/rapport_2005/img_05.jpg'})
class TestHashFile(unittest.TestCase):
"""
Tests for hashfile().
"""
@functools.wraps(unittest.TestCase.setUp)
def setUp(self):
self.tmp_dir = tempfile.mkdtemp()
self.file_empty = os.path.join(self.tmp_dir, 'file_empty.txt')
with open(self.file_empty, 'wt') as fhnd:
fhnd.write('')
self.file_a = os.path.join(self.tmp_dir, 'file_a.txt')
with open(self.file_a, 'wt') as fhnd:
fhnd.write('a')
self.file_line = os.path.join(self.tmp_dir, 'file_line.txt')
with open(self.file_line, 'wt') as fhnd:
fhnd.write('abcdefghij')
self.file_mlines = os.path.join(self.tmp_dir, 'file_mlines.txt')
with open(self.file_mlines, 'wt') as fhnd:
fhnd.write('abcdefghij\nabcdefghij\nabcdefghij\n')
@functools.wraps(unittest.TestCase.tearDown)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
del self.tmp_dir
def test_simple(self):
"""
testing hashfile().
"""
self.assertEqual(imagenet.hashfile(self.file_empty),
'd41d8cd98f00b204e9800998ecf8427e')
self.assertEqual(imagenet.hashfile(self.file_a),
'0cc175b9c0f1b6a831c399e269772661')
self.assertEqual(imagenet.hashfile(self.file_line),
'a925576942e94b2ef57a066101b48876')
self.assertEqual(imagenet.hashfile(self.file_mlines),
'f90932f561733ea4558ada7ac7d27527')
if __name__ == '__main__':
unittest.main()
|
helenst/django
|
refs/heads/master
|
django/contrib/gis/forms/widgets.py
|
39
|
from __future__ import unicode_literals
import logging
from django.conf import settings
from django.contrib.gis import gdal
from django.contrib.gis.geos import GEOSGeometry, GEOSException
from django.forms.widgets import Widget
from django.template import loader
from django.utils import six
from django.utils import translation
logger = logging.getLogger('django.contrib.gis')
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Renders a map using the WKT of the geometry.
"""
geom_type = 'GEOMETRY'
map_srid = 4326
map_width = 600
map_height = 400
display_raw = False
supports_3d = False
template_name = '' # set on subclasses
def __init__(self, attrs=None):
self.attrs = {}
for key in ('geom_type', 'map_srid', 'map_width', 'map_height', 'display_raw'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ''
def deserialize(self, value):
try:
return GEOSGeometry(value, self.map_srid)
except (GEOSException, ValueError) as err:
logger.error(
"Error creating geometry from value '%s' (%s)" % (
value, err)
)
return None
def render(self, name, value, attrs=None):
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if isinstance(value, six.string_types):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.OGRException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)" % (
value.srid, self.map_srid, err)
)
context = self.build_attrs(
attrs,
name=name,
module='geodjango_%s' % name.replace('-', '_'), # JS-safe
serialized=self.serialize(value),
geom_type=gdal.OGRGeomType(self.attrs['geom_type']),
STATIC_URL=settings.STATIC_URL,
LANGUAGE_BIDI=translation.get_language_bidi(),
)
return loader.render_to_string(self.template_name, context)
class OpenLayersWidget(BaseGeometryWidget):
template_name = 'gis/openlayers.html'
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
class OSMWidget(BaseGeometryWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
template_name = 'gis/openlayers-osm.html'
default_lon = 5
default_lat = 47
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'http://www.openstreetmap.org/openlayers/OpenStreetMap.js',
'gis/js/OLMapWidget.js',
)
def __init__(self, attrs=None):
super(OSMWidget, self).__init__()
for key in ('default_lon', 'default_lat'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
@property
def map_srid(self):
# Use the official spherical mercator projection SRID when GDAL is
# available; otherwise, fallback to 900913.
if gdal.HAS_GDAL:
return 3857
else:
return 900913
|
le9i0nx/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/f5/bigip_monitor_http.py
|
5
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_monitor_http
short_description: Manages F5 BIG-IP LTM http monitors
description: Manages F5 BIG-IP LTM http monitors.
version_added: "2.5"
options:
name:
description:
- Monitor name.
required: True
aliases:
- monitor
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(http)
parent on the C(Common) partition.
default: "/Common/http"
send:
description:
- The send string for the monitor call. When creating a new monitor, if
this value is not provided, the default C(GET /\r\n) will be used.
receive:
description:
- The receive string for the monitor call.
receive_disable:
description:
- This setting works like C(receive), except that the system marks the node
or pool member disabled when its response matches the C(receive_disable)
string but not C(receive). To use this setting, you must specify both
C(receive_disable) and C(receive).
ip:
description:
- IP address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'.
port:
description:
- Port address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'. Note that if specifying an IP address, a value between 1 and 65535
must be specified.
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run. If this parameter is not provided when creating
a new monitor, then the default value will be 5. This value B(must)
be less than the C(timeout) value.
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. If this parameter is not
provided when creating a new monitor, then the default value will be 16.
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. If this parameter is not provided when creating
a new monitor, then the default value will be 0.
target_username:
description:
- Specifies the user name, if the monitored target requires authentication.
target_password:
description:
- Specifies the password, if the monitored target requires authentication.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
- Requires BIG-IP software version >= 12
requirements:
- f5-sdk >= 2.2.3
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create HTTP Monitor
bigip_monitor_http:
state: present
ip: 10.10.10.10
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
delegate_to: localhost
- name: Remove HTTP Monitor
bigip_monitor_http:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
delegate_to: localhost
- name: Include a username and password in the HTTP monitor
bigip_monitor_http:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
target_username: monitor_user
target_password: monitor_pass
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: http
ip:
description: The new IP of IP/port definition.
returned: changed
type: string
sample: 10.12.13.14
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
'''
import os
try:
import netaddr
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
from ansible.module_utils.f5_utils import AnsibleF5Client
from ansible.module_utils.f5_utils import AnsibleF5Parameters
from ansible.module_utils.f5_utils import HAS_F5SDK
from ansible.module_utils.f5_utils import F5ModuleError
from ansible.module_utils.six import iteritems
from collections import defaultdict
try:
from ansible.module_utils.f5_utils import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'recv': 'receive'
}
api_attributes = [
'timeUntilUp', 'defaultsFrom', 'interval', 'timeout', 'recv', 'send',
'destination', 'username', 'password'
]
returnables = [
'parent', 'send', 'receive', 'ip', 'port', 'interval', 'timeout',
'time_until_up'
]
updatables = [
'destination', 'send', 'receive', 'interval', 'timeout', 'time_until_up',
'target_username', 'target_password'
]
def __init__(self, params=None):
self._values = defaultdict(lambda: None)
self._values['__warnings'] = []
if params:
self.update(params=params)
def update(self, params=None):
if params:
for k, v in iteritems(params):
if self.api_map is not None and k in self.api_map:
map_key = self.api_map[k]
else:
map_key = k
# Handle weird API parameters like `dns.proxy.__iter__` by
# using a map provided by the module developer
class_attr = getattr(type(self), map_key, None)
if isinstance(class_attr, property):
# There is a mapped value for the api_map key
if class_attr.fset is None:
# If the mapped value does not have
# an associated setter
self._values[map_key] = v
else:
# The mapped value has a setter
setattr(self, map_key, v)
else:
# If the mapped value is not a @property
self._values[map_key] = v
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def interval(self):
if self._values['interval'] is None:
return None
# Per BZ617284, the BIG-IP UI does not raise a warning about this.
# So I do
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
try:
if self._values['ip'] in ['*', '0.0.0.0']:
return '*'
result = str(netaddr.IPAddress(self._values['ip']))
return result
except netaddr.core.AddrFormatError:
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
if self._values['parent'].startswith('/'):
parent = os.path.basename(self._values['parent'])
result = '/{0}/{1}'.format(self.partition, parent)
else:
result = '/{0}/{1}'.format(self.partition, self._values['parent'])
return result
@property
def type(self):
return 'http'
@property
def username(self):
return self._values['target_username']
@property
def password(self):
return self._values['target_password']
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.want.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = Changes(changed)
return True
return False
def _announce_deprecations(self):
warnings = []
if self.want:
warnings += self.want._values.get('__warnings', [])
if self.have:
warnings += self.have._values.get('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations()
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.want.timeout is None:
self.want.update({'timeout': 16})
if self.want.interval is None:
self.want.update({'interval': 5})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.ip is None:
self.want.update({'ip': '*'})
if self.want.port is None:
self.want.update({'port': '*'})
if self.want.send is None:
self.want.update({'send': 'GET /\r\n'})
if self.client.check_mode:
return True
self.create_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.client.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the monitor.")
return True
def read_current_from_device(self):
resource = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(result)
def exists(self):
result = self.client.api.tm.ltm.monitor.https.http.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.ltm.monitor.https.http.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
result = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
name=dict(required=True),
parent=dict(default='http'),
send=dict(),
receive=dict(),
receive_disable=dict(required=False),
ip=dict(),
port=dict(type='int'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
target_username=dict(),
target_password=dict(no_log=True),
# Deprecated params
parent_partition=dict(
removed_in_version='2.4'
)
)
self.f5_product_name = 'bigip'
def cleanup_tokens(client):
try:
resource = client.api.shared.authz.tokens_s.token.load(
name=client.api.icrs.token
)
resource.delete()
except Exception:
pass
def main():
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name
)
try:
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
if not HAS_NETADDR:
raise F5ModuleError("The python netaddr module is required")
mm = ModuleManager(client)
results = mm.exec_module()
cleanup_tokens(client)
client.module.exit_json(**results)
except F5ModuleError as e:
cleanup_tokens(client)
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
shanglt/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/niconico.py
|
34
|
# encoding: utf-8
from __future__ import unicode_literals
import re
import json
import datetime
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
compat_urlparse,
)
from ..utils import (
encode_dict,
ExtractorError,
int_or_none,
parse_duration,
parse_iso8601,
xpath_text,
determine_ext,
)
class NiconicoIE(InfoExtractor):
IE_NAME = 'niconico'
IE_DESC = 'ニコニコ動画'
_TESTS = [{
'url': 'http://www.nicovideo.jp/watch/sm22312215',
'md5': 'd1a75c0823e2f629128c43e1212760f9',
'info_dict': {
'id': 'sm22312215',
'ext': 'mp4',
'title': 'Big Buck Bunny',
'uploader': 'takuya0301',
'uploader_id': '2698420',
'upload_date': '20131123',
'timestamp': 1385182762,
'description': '(c) copyright 2008, Blender Foundation / www.bigbuckbunny.org',
'duration': 33,
},
}, {
# File downloaded with and without credentials are different, so omit
# the md5 field
'url': 'http://www.nicovideo.jp/watch/nm14296458',
'info_dict': {
'id': 'nm14296458',
'ext': 'swf',
'title': '【鏡音リン】Dance on media【オリジナル】take2!',
'description': 'md5:689f066d74610b3b22e0f1739add0f58',
'uploader': 'りょうた',
'uploader_id': '18822557',
'upload_date': '20110429',
'timestamp': 1304065916,
'duration': 209,
},
}, {
# 'video exists but is marked as "deleted"
# md5 is unstable
'url': 'http://www.nicovideo.jp/watch/sm10000',
'info_dict': {
'id': 'sm10000',
'ext': 'unknown_video',
'description': 'deleted',
'title': 'ドラえもんエターナル第3話「決戦第3新東京市」<前編>',
'upload_date': '20071224',
'timestamp': 1198527840, # timestamp field has different value if logged in
'duration': 304,
},
}, {
'url': 'http://www.nicovideo.jp/watch/so22543406',
'info_dict': {
'id': '1388129933',
'ext': 'mp4',
'title': '【第1回】RADIOアニメロミックス ラブライブ!~のぞえりRadio Garden~',
'description': 'md5:b27d224bb0ff53d3c8269e9f8b561cf1',
'timestamp': 1388851200,
'upload_date': '20140104',
'uploader': 'アニメロチャンネル',
'uploader_id': '312',
}
}]
_VALID_URL = r'https?://(?:www\.|secure\.)?nicovideo\.jp/watch/(?P<id>(?:[a-z]{2})?[0-9]+)'
_NETRC_MACHINE = 'niconico'
# Determine whether the downloader used authentication to download video
_AUTHENTICATED = False
def _real_initialize(self):
self._login()
def _login(self):
(username, password) = self._get_login_info()
# No authentication to be performed
if not username:
return True
# Log in
login_form_strs = {
'mail': username,
'password': password,
}
login_data = compat_urllib_parse.urlencode(encode_dict(login_form_strs)).encode('utf-8')
request = compat_urllib_request.Request(
'https://secure.nicovideo.jp/secure/login', login_data)
login_results = self._download_webpage(
request, None, note='Logging in', errnote='Unable to log in')
if re.search(r'(?i)<h1 class="mb8p4">Log in error</h1>', login_results) is not None:
self._downloader.report_warning('unable to log in: bad username or password')
return False
# Successful login
self._AUTHENTICATED = True
return True
def _real_extract(self, url):
video_id = self._match_id(url)
# Get video webpage. We are not actually interested in it for normal
# cases, but need the cookies in order to be able to download the
# info webpage
webpage, handle = self._download_webpage_handle(
'http://www.nicovideo.jp/watch/' + video_id, video_id)
if video_id.startswith('so'):
video_id = self._match_id(handle.geturl())
video_info = self._download_xml(
'http://ext.nicovideo.jp/api/getthumbinfo/' + video_id, video_id,
note='Downloading video info page')
if self._AUTHENTICATED:
# Get flv info
flv_info_webpage = self._download_webpage(
'http://flapi.nicovideo.jp/api/getflv/' + video_id + '?as3=1',
video_id, 'Downloading flv info')
else:
# Get external player info
ext_player_info = self._download_webpage(
'http://ext.nicovideo.jp/thumb_watch/' + video_id, video_id)
thumb_play_key = self._search_regex(
r'\'thumbPlayKey\'\s*:\s*\'(.*?)\'', ext_player_info, 'thumbPlayKey')
# Get flv info
flv_info_data = compat_urllib_parse.urlencode({
'k': thumb_play_key,
'v': video_id
})
flv_info_request = compat_urllib_request.Request(
'http://ext.nicovideo.jp/thumb_watch', flv_info_data,
{'Content-Type': 'application/x-www-form-urlencoded'})
flv_info_webpage = self._download_webpage(
flv_info_request, video_id,
note='Downloading flv info', errnote='Unable to download flv info')
flv_info = compat_urlparse.parse_qs(flv_info_webpage)
if 'url' not in flv_info:
if 'deleted' in flv_info:
raise ExtractorError('The video has been deleted.',
expected=True)
else:
raise ExtractorError('Unable to find video URL')
video_real_url = flv_info['url'][0]
# Start extracting information
title = xpath_text(video_info, './/title')
if not title:
title = self._og_search_title(webpage, default=None)
if not title:
title = self._html_search_regex(
r'<span[^>]+class="videoHeaderTitle"[^>]*>([^<]+)</span>',
webpage, 'video title')
watch_api_data_string = self._html_search_regex(
r'<div[^>]+id="watchAPIDataContainer"[^>]+>([^<]+)</div>',
webpage, 'watch api data', default=None)
watch_api_data = self._parse_json(watch_api_data_string, video_id) if watch_api_data_string else {}
video_detail = watch_api_data.get('videoDetail', {})
extension = xpath_text(video_info, './/movie_type')
if not extension:
extension = determine_ext(video_real_url)
thumbnail = (
xpath_text(video_info, './/thumbnail_url') or
self._html_search_meta('image', webpage, 'thumbnail', default=None) or
video_detail.get('thumbnail'))
description = xpath_text(video_info, './/description')
timestamp = parse_iso8601(xpath_text(video_info, './/first_retrieve'))
if not timestamp:
match = self._html_search_meta('datePublished', webpage, 'date published', default=None)
if match:
timestamp = parse_iso8601(match.replace('+', ':00+'))
if not timestamp and video_detail.get('postedAt'):
timestamp = parse_iso8601(
video_detail['postedAt'].replace('/', '-'),
delimiter=' ', timezone=datetime.timedelta(hours=9))
view_count = int_or_none(xpath_text(video_info, './/view_counter'))
if not view_count:
match = self._html_search_regex(
r'>Views: <strong[^>]*>([^<]+)</strong>',
webpage, 'view count', default=None)
if match:
view_count = int_or_none(match.replace(',', ''))
view_count = view_count or video_detail.get('viewCount')
comment_count = int_or_none(xpath_text(video_info, './/comment_num'))
if not comment_count:
match = self._html_search_regex(
r'>Comments: <strong[^>]*>([^<]+)</strong>',
webpage, 'comment count', default=None)
if match:
comment_count = int_or_none(match.replace(',', ''))
comment_count = comment_count or video_detail.get('commentCount')
duration = (parse_duration(
xpath_text(video_info, './/length') or
self._html_search_meta(
'video:duration', webpage, 'video duration', default=None)) or
video_detail.get('length'))
webpage_url = xpath_text(video_info, './/watch_url') or url
if video_info.find('.//ch_id') is not None:
uploader_id = video_info.find('.//ch_id').text
uploader = video_info.find('.//ch_name').text
elif video_info.find('.//user_id') is not None:
uploader_id = video_info.find('.//user_id').text
uploader = video_info.find('.//user_nickname').text
else:
uploader_id = uploader = None
return {
'id': video_id,
'url': video_real_url,
'title': title,
'ext': extension,
'format_id': 'economy' if video_real_url.endswith('low') else 'normal',
'thumbnail': thumbnail,
'description': description,
'uploader': uploader,
'timestamp': timestamp,
'uploader_id': uploader_id,
'view_count': view_count,
'comment_count': comment_count,
'duration': duration,
'webpage_url': webpage_url,
}
class NiconicoPlaylistIE(InfoExtractor):
_VALID_URL = r'https?://www\.nicovideo\.jp/mylist/(?P<id>\d+)'
_TEST = {
'url': 'http://www.nicovideo.jp/mylist/27411728',
'info_dict': {
'id': '27411728',
'title': 'AKB48のオールナイトニッポン',
},
'playlist_mincount': 225,
}
def _real_extract(self, url):
list_id = self._match_id(url)
webpage = self._download_webpage(url, list_id)
entries_json = self._search_regex(r'Mylist\.preload\(\d+, (\[.*\])\);',
webpage, 'entries')
entries = json.loads(entries_json)
entries = [{
'_type': 'url',
'ie_key': NiconicoIE.ie_key(),
'url': ('http://www.nicovideo.jp/watch/%s' %
entry['item_data']['video_id']),
} for entry in entries]
return {
'_type': 'playlist',
'title': self._search_regex(r'\s+name: "(.*?)"', webpage, 'title'),
'id': list_id,
'entries': entries,
}
|
andnovar/networkx
|
refs/heads/master
|
networkx/readwrite/tests/test_graph6.py
|
54
|
#!/usr/bin/env python
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from nose.tools import *
import networkx as nx
import networkx.readwrite.graph6 as g6
class TestGraph6Utils(object):
def test_n_data_n_conversion(self):
for i in [0, 1, 42, 62, 63, 64, 258047, 258048, 7744773, 68719476735]:
assert_equal(g6.data_to_n(g6.n_to_data(i))[0], i)
assert_equal(g6.data_to_n(g6.n_to_data(i))[1], [])
assert_equal(g6.data_to_n(g6.n_to_data(i) + [42, 43])[1],
[42, 43])
def test_data_sparse6_data_conversion(self):
for data in [[], [0], [63], [63, 63], [0]*42,
[0, 1, 62, 42, 3, 11, 0, 11]]:
assert_equal(g6.graph6_to_data(g6.data_to_graph6(data)), data)
assert_equal(len(g6.data_to_graph6(data)), len(data))
class TestGraph6(object):
def test_parse_graph6(self):
data="""DF{"""
G=nx.parse_graph6(data)
assert_equal(sorted(G.nodes()),[0, 1, 2, 3, 4])
assert_equal([e for e in sorted(G.edges())],
[(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)])
def test_read_graph6(self):
data="""DF{"""
G=nx.parse_graph6(data)
fh = StringIO(data)
Gin=nx.read_graph6(fh)
assert_equal(sorted(G.nodes()),sorted(Gin.nodes()))
assert_equal(sorted(G.edges()),sorted(Gin.edges()))
def test_read_many_graph6(self):
# Read many graphs into list
data="""DF{\nD`{\nDqK\nD~{\n"""
fh = StringIO(data)
glist=nx.read_graph6(fh)
assert_equal(len(glist),4)
for G in glist:
assert_equal(sorted(G.nodes()),[0, 1, 2, 3, 4])
def test_generate_graph6(self):
assert_equal(nx.generate_graph6(nx.empty_graph(0)), '>>graph6<<?')
assert_equal(nx.generate_graph6(nx.empty_graph(1)), '>>graph6<<@')
G1 = nx.complete_graph(4)
assert_equal(nx.generate_graph6(G1, header=True), '>>graph6<<C~')
assert_equal(nx.generate_graph6(G1, header=False), 'C~')
G2 = nx.complete_bipartite_graph(6,9)
assert_equal(nx.generate_graph6(G2, header=False),
'N??F~z{~Fw^_~?~?^_?') # verified by Sage
G3 = nx.complete_graph(67)
assert_equal(nx.generate_graph6(G3, header=False),
'~?@B' + '~' * 368 + 'w')
def test_write_graph6(self):
fh = StringIO()
nx.write_graph6(nx.complete_bipartite_graph(6,9), fh)
fh.seek(0)
assert_equal(fh.read(), '>>graph6<<N??F~z{~Fw^_~?~?^_?\n')
def test_generate_and_parse_graph6(self):
for i in list(range(13)) + [31, 47, 62, 63, 64, 72]:
g = nx.random_graphs.gnm_random_graph(i, i * i // 4, seed=i)
gstr = nx.generate_graph6(g, header=False)
assert_equal(len(gstr),
((i-1) * i // 2 + 5) // 6 + (1 if i < 63 else 4))
g2 = nx.parse_graph6(gstr)
assert_equal(g2.order(), g.order())
assert_equal(sorted(g2.edges()), sorted(g.edges()))
@raises(nx.NetworkXError)
def directed_error(self):
nx.generate_graph6(nx.DiGraph())
|
huobaowangxi/scikit-learn
|
refs/heads/master
|
sklearn/neural_network/rbm.py
|
206
|
"""Restricted Boltzmann Machine
"""
# Authors: Yann N. Dauphin <dauphiya@iro.umontreal.ca>
# Vlad Niculae
# Gabriel Synnaeve
# Lars Buitinck
# License: BSD 3 clause
import time
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator
from ..base import TransformerMixin
from ..externals.six.moves import xrange
from ..utils import check_array
from ..utils import check_random_state
from ..utils import gen_even_slices
from ..utils import issparse
from ..utils.extmath import safe_sparse_dot
from ..utils.extmath import log_logistic
from ..utils.fixes import expit # logistic function
from ..utils.validation import check_is_fitted
class BernoulliRBM(BaseEstimator, TransformerMixin):
"""Bernoulli Restricted Boltzmann Machine (RBM).
A Restricted Boltzmann Machine with binary visible units and
binary hiddens. Parameters are estimated using Stochastic Maximum
Likelihood (SML), also known as Persistent Contrastive Divergence (PCD)
[2].
The time complexity of this implementation is ``O(d ** 2)`` assuming
d ~ n_features ~ n_components.
Read more in the :ref:`User Guide <rbm>`.
Parameters
----------
n_components : int, optional
Number of binary hidden units.
learning_rate : float, optional
The learning rate for weight updates. It is *highly* recommended
to tune this hyper-parameter. Reasonable values are in the
10**[0., -3.] range.
batch_size : int, optional
Number of examples per minibatch.
n_iter : int, optional
Number of iterations/sweeps over the training dataset to perform
during training.
verbose : int, optional
The verbosity level. The default, zero, means silent mode.
random_state : integer or numpy.RandomState, optional
A random number generator instance to define the state of the
random permutations generator. If an integer is given, it fixes the
seed. Defaults to the global numpy random number generator.
Attributes
----------
intercept_hidden_ : array-like, shape (n_components,)
Biases of the hidden units.
intercept_visible_ : array-like, shape (n_features,)
Biases of the visible units.
components_ : array-like, shape (n_components, n_features)
Weight matrix, where n_features in the number of
visible units and n_components is the number of hidden units.
Examples
--------
>>> import numpy as np
>>> from sklearn.neural_network import BernoulliRBM
>>> X = np.array([[0, 0, 0], [0, 1, 1], [1, 0, 1], [1, 1, 1]])
>>> model = BernoulliRBM(n_components=2)
>>> model.fit(X)
BernoulliRBM(batch_size=10, learning_rate=0.1, n_components=2, n_iter=10,
random_state=None, verbose=0)
References
----------
[1] Hinton, G. E., Osindero, S. and Teh, Y. A fast learning algorithm for
deep belief nets. Neural Computation 18, pp 1527-1554.
http://www.cs.toronto.edu/~hinton/absps/fastnc.pdf
[2] Tieleman, T. Training Restricted Boltzmann Machines using
Approximations to the Likelihood Gradient. International Conference
on Machine Learning (ICML) 2008
"""
def __init__(self, n_components=256, learning_rate=0.1, batch_size=10,
n_iter=10, verbose=0, random_state=None):
self.n_components = n_components
self.learning_rate = learning_rate
self.batch_size = batch_size
self.n_iter = n_iter
self.verbose = verbose
self.random_state = random_state
def transform(self, X):
"""Compute the hidden layer activation probabilities, P(h=1|v=X).
Parameters
----------
X : {array-like, sparse matrix} shape (n_samples, n_features)
The data to be transformed.
Returns
-------
h : array, shape (n_samples, n_components)
Latent representations of the data.
"""
check_is_fitted(self, "components_")
X = check_array(X, accept_sparse='csr', dtype=np.float)
return self._mean_hiddens(X)
def _mean_hiddens(self, v):
"""Computes the probabilities P(h=1|v).
Parameters
----------
v : array-like, shape (n_samples, n_features)
Values of the visible layer.
Returns
-------
h : array-like, shape (n_samples, n_components)
Corresponding mean field values for the hidden layer.
"""
p = safe_sparse_dot(v, self.components_.T)
p += self.intercept_hidden_
return expit(p, out=p)
def _sample_hiddens(self, v, rng):
"""Sample from the distribution P(h|v).
Parameters
----------
v : array-like, shape (n_samples, n_features)
Values of the visible layer to sample from.
rng : RandomState
Random number generator to use.
Returns
-------
h : array-like, shape (n_samples, n_components)
Values of the hidden layer.
"""
p = self._mean_hiddens(v)
return (rng.random_sample(size=p.shape) < p)
def _sample_visibles(self, h, rng):
"""Sample from the distribution P(v|h).
Parameters
----------
h : array-like, shape (n_samples, n_components)
Values of the hidden layer to sample from.
rng : RandomState
Random number generator to use.
Returns
-------
v : array-like, shape (n_samples, n_features)
Values of the visible layer.
"""
p = np.dot(h, self.components_)
p += self.intercept_visible_
expit(p, out=p)
return (rng.random_sample(size=p.shape) < p)
def _free_energy(self, v):
"""Computes the free energy F(v) = - log sum_h exp(-E(v,h)).
Parameters
----------
v : array-like, shape (n_samples, n_features)
Values of the visible layer.
Returns
-------
free_energy : array-like, shape (n_samples,)
The value of the free energy.
"""
return (- safe_sparse_dot(v, self.intercept_visible_)
- np.logaddexp(0, safe_sparse_dot(v, self.components_.T)
+ self.intercept_hidden_).sum(axis=1))
def gibbs(self, v):
"""Perform one Gibbs sampling step.
Parameters
----------
v : array-like, shape (n_samples, n_features)
Values of the visible layer to start from.
Returns
-------
v_new : array-like, shape (n_samples, n_features)
Values of the visible layer after one Gibbs step.
"""
check_is_fitted(self, "components_")
if not hasattr(self, "random_state_"):
self.random_state_ = check_random_state(self.random_state)
h_ = self._sample_hiddens(v, self.random_state_)
v_ = self._sample_visibles(h_, self.random_state_)
return v_
def partial_fit(self, X, y=None):
"""Fit the model to the data X which should contain a partial
segment of the data.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data.
Returns
-------
self : BernoulliRBM
The fitted model.
"""
X = check_array(X, accept_sparse='csr', dtype=np.float)
if not hasattr(self, 'random_state_'):
self.random_state_ = check_random_state(self.random_state)
if not hasattr(self, 'components_'):
self.components_ = np.asarray(
self.random_state_.normal(
0,
0.01,
(self.n_components, X.shape[1])
),
order='fortran')
if not hasattr(self, 'intercept_hidden_'):
self.intercept_hidden_ = np.zeros(self.n_components, )
if not hasattr(self, 'intercept_visible_'):
self.intercept_visible_ = np.zeros(X.shape[1], )
if not hasattr(self, 'h_samples_'):
self.h_samples_ = np.zeros((self.batch_size, self.n_components))
self._fit(X, self.random_state_)
def _fit(self, v_pos, rng):
"""Inner fit for one mini-batch.
Adjust the parameters to maximize the likelihood of v using
Stochastic Maximum Likelihood (SML).
Parameters
----------
v_pos : array-like, shape (n_samples, n_features)
The data to use for training.
rng : RandomState
Random number generator to use for sampling.
"""
h_pos = self._mean_hiddens(v_pos)
v_neg = self._sample_visibles(self.h_samples_, rng)
h_neg = self._mean_hiddens(v_neg)
lr = float(self.learning_rate) / v_pos.shape[0]
update = safe_sparse_dot(v_pos.T, h_pos, dense_output=True).T
update -= np.dot(h_neg.T, v_neg)
self.components_ += lr * update
self.intercept_hidden_ += lr * (h_pos.sum(axis=0) - h_neg.sum(axis=0))
self.intercept_visible_ += lr * (np.asarray(
v_pos.sum(axis=0)).squeeze() -
v_neg.sum(axis=0))
h_neg[rng.uniform(size=h_neg.shape) < h_neg] = 1.0 # sample binomial
self.h_samples_ = np.floor(h_neg, h_neg)
def score_samples(self, X):
"""Compute the pseudo-likelihood of X.
Parameters
----------
X : {array-like, sparse matrix} shape (n_samples, n_features)
Values of the visible layer. Must be all-boolean (not checked).
Returns
-------
pseudo_likelihood : array-like, shape (n_samples,)
Value of the pseudo-likelihood (proxy for likelihood).
Notes
-----
This method is not deterministic: it computes a quantity called the
free energy on X, then on a randomly corrupted version of X, and
returns the log of the logistic function of the difference.
"""
check_is_fitted(self, "components_")
v = check_array(X, accept_sparse='csr')
rng = check_random_state(self.random_state)
# Randomly corrupt one feature in each sample in v.
ind = (np.arange(v.shape[0]),
rng.randint(0, v.shape[1], v.shape[0]))
if issparse(v):
data = -2 * v[ind] + 1
v_ = v + sp.csr_matrix((data.A.ravel(), ind), shape=v.shape)
else:
v_ = v.copy()
v_[ind] = 1 - v_[ind]
fe = self._free_energy(v)
fe_ = self._free_energy(v_)
return v.shape[1] * log_logistic(fe_ - fe)
def fit(self, X, y=None):
"""Fit the model to the data X.
Parameters
----------
X : {array-like, sparse matrix} shape (n_samples, n_features)
Training data.
Returns
-------
self : BernoulliRBM
The fitted model.
"""
X = check_array(X, accept_sparse='csr', dtype=np.float)
n_samples = X.shape[0]
rng = check_random_state(self.random_state)
self.components_ = np.asarray(
rng.normal(0, 0.01, (self.n_components, X.shape[1])),
order='fortran')
self.intercept_hidden_ = np.zeros(self.n_components, )
self.intercept_visible_ = np.zeros(X.shape[1], )
self.h_samples_ = np.zeros((self.batch_size, self.n_components))
n_batches = int(np.ceil(float(n_samples) / self.batch_size))
batch_slices = list(gen_even_slices(n_batches * self.batch_size,
n_batches, n_samples))
verbose = self.verbose
begin = time.time()
for iteration in xrange(1, self.n_iter + 1):
for batch_slice in batch_slices:
self._fit(X[batch_slice], rng)
if verbose:
end = time.time()
print("[%s] Iteration %d, pseudo-likelihood = %.2f,"
" time = %.2fs"
% (type(self).__name__, iteration,
self.score_samples(X).mean(), end - begin))
begin = end
return self
|
MichaelQQ/Wireshark-PE
|
refs/heads/master
|
tools/wireshark_be.py
|
30
|
# -*- python -*-
#
# File : wireshark_be.py
#
# Author : Frank Singleton (frank.singleton@ericsson.com)
#
# Copyright (C) 2001 Frank Singleton, Ericsson Inc.
#
# This file is a backend to "omniidl", used to generate "Wireshark"
# dissectors from IDL descriptions. The output language generated
# is "C". It will generate code to use the GIOP/IIOP get_CDR_XXX API.
#
# Please see packet-giop.h in Wireshark distro for API description.
# Wireshark is available at http://www.wireshark.org/
#
# Omniidl is part of the OmniOrb distribution, and is available at
# http://omniorb.sourceforge.net
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Description:
#
# Omniidl Back-end which parses an IDL data structure provided by the frontend
# and generates packet-idl-xxx.[ch] for compiling as a dissector in
# Wireshark IP protocol anlayser.
#
#
#
#
# Strategy.
#
# Crawl all the way down all branches until I hit "Operation", "Enum", "Attribute",
# "Struct" and "Union" nodes. Then store these nodes in lists.
#
# Pass these lists (via an object ref) to the src code
# generator (wireshark_gen) class and let it do the hard work !
#
#
# Dont forget structs can contain embedded structs etc .. so dont forget
# to peek inside and check :-)
#
#
"""Wireshark IDL compiler back-end."""
from omniidl import idlast, idltype, idlvisitor, idlutil, output
import sys, string
from os import path
from wireshark_gen import wireshark_gen_C
#
# This class finds the "Operation" nodes ,Enum Nodes, "Attribute" nodes, Struct Nodes
# and Union Nodes. Then it hands them off to an instance of the source code generator
# class "wireshark_gen"
#
class WiresharkVisitor:
DEBUG = 0 # debug flag
def __init__(self, st):
self.st = st
self.oplist = [] # list of operation nodes
self.enlist = [] # list of enum nodes
self.atlist = [] # list of attribute nodes
self.stlist = [] # list of struct nodes
self.unlist = [] # list of union nodes
def visitAST(self, node):
if self.DEBUG:
print "XXX visitAST() node = ", node
for n in node.declarations():
if isinstance(n, idlast.Module):
self.visitModule(n)
if isinstance(n, idlast.Interface):
self.visitInterface(n)
if isinstance(n, idlast.Operation):
self.visitOperation(n)
if isinstance(n, idlast.Attribute):
self.visitAttribute(n)
if isinstance(n, idlast.Enum):
self.visitEnum(n)
if isinstance(n, idlast.Struct):
self.visitStruct(n)
if isinstance(n, idlast.Union):
self.visitUnion(n)
# Check for Typedef structs and unions
if isinstance(n, idlast.Typedef):
self.visitTypedef(n) # who are you ?
def visitModule(self, node):
if self.DEBUG:
print "XXX visitModule() node = ", node
for n in node.definitions():
if isinstance(n, idlast.Module):
self.visitModule(n)
if isinstance(n, idlast.Interface):
self.visitInterface(n)
if isinstance(n, idlast.Operation):
self.visitOperation(n)
if isinstance(n, idlast.Attribute):
self.visitAttribute(n)
if isinstance(n, idlast.Enum):
self.visitEnum(n)
if isinstance(n, idlast.Struct):
self.visitStruct(n)
if isinstance(n, idlast.Union):
self.visitUnion(n)
# Check for Typedef structs and unions
if isinstance(n, idlast.Typedef):
self.visitTypedef(n) # who are you ?
def visitInterface(self, node):
if self.DEBUG:
print "XXX visitInterface() node = ", node
for c in node.callables():
if isinstance(c, idlast.Operation):
self.visitOperation(c)
if isinstance(c, idlast.Attribute):
self.visitAttribute(c)
for d in node.contents():
if isinstance(d, idlast.Enum):
self.visitEnum(d)
if isinstance(d, idlast.Struct):
self.visitStruct(d)
if isinstance(d, idlast.Union):
self.visitUnion(d)
# Check for Typedef structs and unions
if isinstance(d, idlast.Typedef):
self.visitTypedef(d) # who are you ?
#
# visitOperation
#
# populates the operations node list "oplist"
#
#
def visitOperation(self,opnode):
if not opnode in self.oplist:
self.oplist.append(opnode) # store operation node
#
# visitAttribute
#
# populates the attribute node list "atlist"
#
#
def visitAttribute(self,atnode):
if not atnode in self.atlist:
self.atlist.append(atnode) # store attribute node
#
# visitEnum
#
# populates the Enum node list "enlist"
#
#
def visitEnum(self,enode):
if not enode in self.enlist:
self.enlist.append(enode) # store enum node if unique
#
# visitTypedef
#
# Search to see if its a typedef'd struct, union, or enum
#
# eg: typdef enum colors {red, green, blue } mycolors;
#
def visitTypedef(self,td):
d = td.aliasType() # get Type, possibly Declared
if isinstance(d,idltype.Declared):
self.visitDeclared(d)
#
# visitDeclared
#
# Search to see if its a struct, union, or enum
#
#
def visitDeclared(self,d):
if isinstance(d,idltype.Declared):
sue = d.decl() # grab the struct or union or enum
if isinstance(sue, idlast.Struct):
self.visitStruct(sue)
if isinstance(sue, idlast.Union):
self.visitUnion(sue)
if isinstance(sue, idlast.Enum):
self.visitEnum(sue)
#
# visitStruct
#
# populates the struct node list "stlist"
# and checks its members also
#
#
def visitStruct(self,stnode):
if not stnode in self.stlist:
self.stlist.append(stnode) # store struct node if unique and avoid recursive loops
# if we come across recursive structs
for m in stnode.members(): # find embedded struct definitions within this
mt = m.memberType()
if isinstance(mt,idltype.Declared):
self.visitDeclared(mt) # if declared, then check it out
#
# visitUnion
#
# populates the struct node list "unlist"
# and checks its members also
#
#
def visitUnion(self,unnode):
if not unnode in self.unlist:
self.unlist.append(unnode) # store union node if unique
if unnode.constrType(): # enum defined within switch type
if isinstance(unnode.switchType(),idltype.Declared):
self.visitDeclared(unnode.switchType())
for c in unnode.cases():
ct = c.caseType()
if isinstance(ct,idltype.Declared):
self.visitDeclared(ct) # if declared, then check it out
def run(tree, args):
st = output.Stream(sys.stdout, 4) # set indent for stream
ev = WiresharkVisitor(st) # create visitor object
ev.visitAST(tree) # go find some operations
#
# Grab name of main IDL file being compiled.
#
# Assumption: Name is of the form abcdefg.xyz (eg: CosNaming.idl)
#
fname = path.basename(tree.file()) # grab basename only, dont care about path
nl = string.split(fname,".")[0] # split name of main IDL file using "." as separator
# and grab first field (eg: CosNaming)
if ev.DEBUG:
for i in ev.oplist:
print "XXX - Operation node ", i, " repoId() = ", i.repoId()
for i in ev.atlist:
print "XXX - Attribute node ", i, " identifiers() = ", i.identifiers()
for i in ev.enlist:
print "XXX - Enum node ", i, " repoId() = ", i.repoId()
for i in ev.stlist:
print "XXX - Struct node ", i, " repoId() = ", i.repoId()
for i in ev.unlist:
print "XXX - Union node ", i, " repoId() = ", i.repoId()
# create a C generator object
# and generate some C code
eg = wireshark_gen_C(ev.st, string.upper(nl), string.lower(nl), string.capitalize(nl) + " Dissector Using GIOP API")
eg.genCode(ev.oplist, ev.atlist, ev.enlist, ev.stlist, ev.unlist) # pass them onto the C generator
#
# Editor modelines - http://www.wireshark.org/tools/modelines.html
#
# Local variables:
# c-basic-offset: 4
# indent-tabs-mode: nil
# End:
#
# vi: set shiftwidth=4 expandtab:
# :indentSize=4:noTabs=true:
#
|
claymation/pystogram
|
refs/heads/master
|
setup.py
|
1
|
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name = "pystogram",
version = "0.2.0",
description = "Display time-series histograms of Apache-style log files on the command-line",
author = "Clay McClure",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/pystogram",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Logging',
],
packages = ['pystogram'],
scripts = ['bin/pystogram'],
tests_require = ['pytest'],
cmdclass = {'test': PyTest},
)
|
glgerard/MDBN
|
refs/heads/master
|
src/archived/mdbn_v1.py
|
1
|
import numpy as np
import theano
from theano import tensor
from src.archived.rbm_v1 import GRBM
from src.archived.rbm_v1 import RBM
from src.utils import zscore
def importdata(file):
with open(file) as f:
ncols = len(f.readline().split('\t'))
return (ncols-1,
np.loadtxt(file,
dtype=theano.config.floatX,
delimiter='\t',
skiprows=1,
usecols=range(1,ncols)))
def test_mdbn():
# upload the data, each column is a single person
gecols, GE = importdata("../data/3.GE1_0.5.out")
mecols, ME = importdata("../data/3.Methylation_0.5.out")
rnacols, mRNA = importdata("../data/3.miRNA_0.5.out")
# Pass to a row representation, i.e. the data for each person is now on a
# single row.
# Normalize the data so that each measurement on our population has zero
# mean and zero variance
datage = zscore(GE.T)
datame = zscore(ME.T)
datarna = zscore(mRNA.T)
x = tensor.matrix('x')
# rbm.training(self, dataset, batch_size, training_epochs, k, lr,
# lam1=0, lam2=0,
# stocastic_steps=True,
# data_shuffle=False,
# display_fn=None)
# Create the 1st layer of Gaussian Bernoulli RBM
rna_GRBM_l1 = GRBM(x, datarna.shape[1], 40)
rna_GRBM_l1.training(datarna, 1, 8000, 10, 0.0005, 0.1, 0.1, data_shuffle=True, stocastic_steps=False)
ge_GRBM_l1 = GRBM(x, datage.shape[1], 400)
ge_GRBM_l1.training(datage, 1, 8000, 1, 0.0005, 1, 1, data_shuffle=True, stocastic_steps=False)
ge_RBM_l2 = RBM(x, 400, 40)
ge_RBM_l2 = ge_RBM_l2.training(ge_GRBM_l1.output(),1,800, 1, 0.1)
me_GRBM_l1 = GRBM(x, datame.shape[1], 400)
me_GRBM_l1.training(datame, 1, 8000, 1, 0.0005, 1, 1, data_shuffle=True, stocastic_steps=False)
me_RBM_l2 = RBM(x, 400, 40)
me_RBM_l2 = me_RBM_l2.training(me_GRBM_l1.output(),1,800, 1, 0.1)
joint_layer = []
joint_layer.append(rna_GRBM_l1.output())
joint_layer.append(ge_RBM_l2.output())
joint_layer.append(me_RBM_l2.output())
np.savez('parameters_at_gaussian_layer_RNA.npz',
k=20,
epoch=8000,
batch_size=10,
learning_rate=0.0005,
stocastic_steps=False,
momentum=False,
weight_cost=False,
W=rnaGRBM.W.get_value(borrow=True),
a=rnaGRBM.a.get_value(borrow=True),
b=rnaGRBM.b.get_value(borrow=True))
if __name__ == '__main__':
test_mdbn()
|
ramyfarid922/Quiz-Program
|
refs/heads/master
|
vendor/bundle/ruby/2.2.0/gems/libv8-3.16.14.7/vendor/gyp/test/win/gyptest-link-entrypointsymbol.py
|
342
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure entrypointsymbol setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('entrypointsymbol.gyp', chdir=CHDIR)
test.build('entrypointsymbol.gyp', 'test_ok', chdir=CHDIR)
test.build('entrypointsymbol.gyp', 'test_fail', chdir=CHDIR, status=1)
test.pass_test()
|
leilihh/nova
|
refs/heads/stable/icehouse
|
nova/tests/api/openstack/compute/contrib/test_extended_volumes.py
|
13
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import webob
from nova.api.openstack.compute.contrib import extended_volumes
from nova import compute
from nova import db
from nova.objects import instance as instance_obj
from nova.openstack.common import jsonutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_block_device
from nova.tests import fake_instance
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
UUID3 = '00000000-0000-0000-0000-000000000003'
def fake_compute_get(*args, **kwargs):
inst = fakes.stub_instance(1, uuid=UUID1)
return fake_instance.fake_instance_obj(args[1], **inst)
def fake_compute_get_all(*args, **kwargs):
db_list = [fakes.stub_instance(1), fakes.stub_instance(2)]
fields = instance_obj.INSTANCE_DEFAULT_FIELDS
return instance_obj._make_instance_list(args[1],
instance_obj.InstanceList(),
db_list, fields)
def fake_bdms_get_all_by_instance(*args, **kwargs):
return [fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': UUID1, 'source_type': 'volume',
'destination_type': 'volume', 'id': 1}),
fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': UUID2, 'source_type': 'volume',
'destination_type': 'volume', 'id': 2})]
class ExtendedVolumesTest(test.TestCase):
content_type = 'application/json'
prefix = 'os-extended-volumes:'
def setUp(self):
super(ExtendedVolumesTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
self.stubs.Set(compute.api.API, 'get_all', fake_compute_get_all)
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_bdms_get_all_by_instance)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Extended_volumes'])
return_server = fakes.fake_instance_get()
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(fakes.wsgi_app(init_only=('servers',)))
return res
def _get_server(self, body):
return jsonutils.loads(body).get('server')
def _get_servers(self, body):
return jsonutils.loads(body).get('servers')
def test_show(self):
url = '/v2/fake/servers/%s' % UUID1
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
server = self._get_server(res.body)
exp_volumes = [{'id': UUID1}, {'id': UUID2}]
if self.content_type == 'application/json':
actual = server.get('%svolumes_attached' % self.prefix)
elif self.content_type == 'application/xml':
actual = [dict(elem.items()) for elem in
server.findall('%svolume_attached' % self.prefix)]
self.assertEqual(exp_volumes, actual)
def test_detail(self):
url = '/v2/fake/servers/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
exp_volumes = [{'id': UUID1}, {'id': UUID2}]
for i, server in enumerate(self._get_servers(res.body)):
if self.content_type == 'application/json':
actual = server.get('%svolumes_attached' % self.prefix)
elif self.content_type == 'application/xml':
actual = [dict(elem.items()) for elem in
server.findall('%svolume_attached' % self.prefix)]
self.assertEqual(exp_volumes, actual)
class ExtendedVolumesXmlTest(ExtendedVolumesTest):
content_type = 'application/xml'
prefix = '{%s}' % extended_volumes.Extended_volumes.namespace
def _get_server(self, body):
return etree.XML(body)
def _get_servers(self, body):
return etree.XML(body).getchildren()
|
wdm0006/sklearn-extensions
|
refs/heads/master
|
examples/kernel_regression/example.py
|
1
|
"""
Example from: https://raw.githubusercontent.com/jmetzen/kernel_regression/master/plot_kernel_regression.py
========================================================================
Comparison of kernel regression (KR) and support vector regression (SVR)
========================================================================
Toy example of 1D regression using kernel regression (KR) and support vector
regression (SVR). KR provides an efficient way of selecting a kernel's
bandwidth via leave-one-out cross-validation, which is considerably faster
that an explicit grid-search as required by SVR. The main disadvantages are
that it does not support regularization and is not robust to outliers.
"""
print(__doc__)
import time
import numpy as np
from sklearn.svm import SVR
from sklearn.grid_search import GridSearchCV
from sklearn_extensions.kernel_regression import KernelRegression
np.random.seed(0)
# Generate sample data
X = np.sort(5 * np.random.rand(100, 1), axis=0)
y = np.sin(X).ravel()
# Add noise to targets
y += 0.5 * (0.5 - np.random.rand(y.size))
# Fit regression models
svr = GridSearchCV(SVR(kernel='rbf'), cv=5, param_grid={"C": [1e-1, 1e0, 1e1, 1e2], "gamma": np.logspace(-2, 2, 10)})
kr = KernelRegression(kernel="rbf", gamma=np.logspace(-2, 2, 10))
t0 = time.time()
y_svr = svr.fit(X, y).predict(X)
print("SVR complexity and bandwidth selected and model fitted in %.3f s" % (time.time() - t0))
t0 = time.time()
y_kr = kr.fit(X, y).predict(X)
print("KR including bandwith fitted in %.3f s" % (time.time() - t0))
|
windofthesky/ansible
|
refs/heads/devel
|
test/units/template/__init__.py
|
7690
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
bikong2/scikit-learn
|
refs/heads/master
|
examples/linear_model/plot_ransac.py
|
250
|
"""
===========================================
Robust linear model estimation using RANSAC
===========================================
In this example we see how to robustly fit a linear model to faulty data using
the RANSAC algorithm.
"""
import numpy as np
from matplotlib import pyplot as plt
from sklearn import linear_model, datasets
n_samples = 1000
n_outliers = 50
X, y, coef = datasets.make_regression(n_samples=n_samples, n_features=1,
n_informative=1, noise=10,
coef=True, random_state=0)
# Add outlier data
np.random.seed(0)
X[:n_outliers] = 3 + 0.5 * np.random.normal(size=(n_outliers, 1))
y[:n_outliers] = -3 + 10 * np.random.normal(size=n_outliers)
# Fit line using all data
model = linear_model.LinearRegression()
model.fit(X, y)
# Robustly fit linear model with RANSAC algorithm
model_ransac = linear_model.RANSACRegressor(linear_model.LinearRegression())
model_ransac.fit(X, y)
inlier_mask = model_ransac.inlier_mask_
outlier_mask = np.logical_not(inlier_mask)
# Predict data of estimated models
line_X = np.arange(-5, 5)
line_y = model.predict(line_X[:, np.newaxis])
line_y_ransac = model_ransac.predict(line_X[:, np.newaxis])
# Compare estimated coefficients
print("Estimated coefficients (true, normal, RANSAC):")
print(coef, model.coef_, model_ransac.estimator_.coef_)
plt.plot(X[inlier_mask], y[inlier_mask], '.g', label='Inliers')
plt.plot(X[outlier_mask], y[outlier_mask], '.r', label='Outliers')
plt.plot(line_X, line_y, '-k', label='Linear regressor')
plt.plot(line_X, line_y_ransac, '-b', label='RANSAC regressor')
plt.legend(loc='lower right')
plt.show()
|
machinaut/gym
|
refs/heads/master
|
examples/agents/tabular_q_agent.py
|
7
|
class TabularQAgent(object):
"""
Agent implementing tabular Q-learning.
"""
def __init__(self, observation_space, action_space, **userconfig):
if not isinstance(observation_space, discrete.Discrete):
raise UnsupportedSpace('Observation space {} incompatible with {}. (Only supports Discrete observation spaces.)'.format(observation_space, self))
if not isinstance(action_space, discrete.Discrete):
raise UnsupportedSpace('Action space {} incompatible with {}. (Only supports Discrete action spaces.)'.format(action_space, self))
self.observation_space = observation_space
self.action_space = action_space
self.action_n = action_space.n
self.config = {
"init_mean" : 0.0, # Initialize Q values with this mean
"init_std" : 0.0, # Initialize Q values with this standard deviation
"learning_rate" : 0.1,
"eps": 0.05, # Epsilon in epsilon greedy policies
"discount": 0.95,
"n_iter": 10000} # Number of iterations
self.config.update(userconfig)
self.q = defaultdict(lambda: self.config["init_std"] * np.random.randn(self.action_n) + self.config["init_mean"])
def act(self, observation, eps=None):
if eps is None:
eps = self.config["eps"]
# epsilon greedy.
action = np.argmax(self.q[observation.item()]) if np.random.random() > eps else self.action_space.sample()
return action
def learn(self, env):
config = self.config
obs = env.reset()
q = self.q
for t in range(config["n_iter"]):
action, _ = self.act(obs)
obs2, reward, done, _ = env.step(action)
future = 0.0
if not done:
future = np.max(q[obs2.item()])
q[obs.item()][action] -= \
self.config["learning_rate"] * (q[obs.item()][action] - reward - config["discount"] * future)
obs = obs2
|
herow/planning_qgis
|
refs/heads/master
|
python/plugins/processing/algs/qgis/ExtentFromLayer.py
|
6
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ExtentFromLayer.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QVariant
from qgis.core import QGis, QgsField, QgsPoint, QgsGeometry, QgsFeature
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterBoolean
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class ExtentFromLayer(GeoAlgorithm):
INPUT_LAYER = 'INPUT_LAYER'
BY_FEATURE = 'BY_FEATURE'
OUTPUT = 'OUTPUT'
def defineCharacteristics(self):
self.name = 'Polygon from layer extent'
self.group = 'Vector general tools'
self.addParameter(ParameterVector(self.INPUT_LAYER,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterBoolean(self.BY_FEATURE,
self.tr('Calculate extent for each feature separately'), False))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Output layer')))
def processAlgorithm(self, progress):
layer = dataobjects.getObjectFromUri(
self.getParameterValue(self.INPUT_LAYER))
byFeature = self.getParameterValue(self.BY_FEATURE)
fields = [
QgsField('MINX', QVariant.Double),
QgsField('MINY', QVariant.Double),
QgsField('MAXX', QVariant.Double),
QgsField('MAXY', QVariant.Double),
QgsField('CNTX', QVariant.Double),
QgsField('CNTY', QVariant.Double),
QgsField('AREA', QVariant.Double),
QgsField('PERIM', QVariant.Double),
QgsField('HEIGHT', QVariant.Double),
QgsField('WIDTH', QVariant.Double),
]
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(fields,
QGis.WKBPolygon, layer.crs())
if byFeature:
self.featureExtent(layer, writer, progress)
else:
self.layerExtent(layer, writer, progress)
del writer
def layerExtent(self, layer, writer, progress):
rect = layer.extent()
minx = rect.xMinimum()
miny = rect.yMinimum()
maxx = rect.xMaximum()
maxy = rect.yMaximum()
height = rect.height()
width = rect.width()
cntx = minx + width / 2.0
cnty = miny + height / 2.0
area = width * height
perim = 2 * width + 2 * height
rect = [QgsPoint(minx, miny), QgsPoint(minx, maxy), QgsPoint(maxx,
maxy), QgsPoint(maxx, miny), QgsPoint(minx, miny)]
geometry = QgsGeometry().fromPolygon([rect])
feat = QgsFeature()
feat.setGeometry(geometry)
attrs = [
minx,
miny,
maxx,
maxy,
cntx,
cnty,
area,
perim,
height,
width,
]
feat.setAttributes(attrs)
writer.addFeature(feat)
def featureExtent(self, layer, writer, progress):
current = 0
features = vector.features(layer)
total = 100.0 / float(len(features))
feat = QgsFeature()
for f in features:
rect = f.geometry().boundingBox()
minx = rect.xMinimum()
miny = rect.yMinimum()
maxx = rect.xMaximum()
maxy = rect.yMaximum()
height = rect.height()
width = rect.width()
cntx = minx + width / 2.0
cnty = miny + height / 2.0
area = width * height
perim = 2 * width + 2 * height
rect = [QgsPoint(minx, miny), QgsPoint(minx, maxy), QgsPoint(maxx,
maxy), QgsPoint(maxx, miny), QgsPoint(minx, miny)]
geometry = QgsGeometry().fromPolygon([rect])
feat.setGeometry(geometry)
attrs = [
minx,
miny,
maxx,
maxy,
cntx,
cnty,
area,
perim,
height,
width,
]
feat.setAttributes(attrs)
writer.addFeature(feat)
current += 1
progress.setPercentage(int(current * total))
|
sbromley07/example-app
|
refs/heads/master
|
lib/flask/blueprints.py
|
773
|
# -*- coding: utf-8 -*-
"""
flask.blueprints
~~~~~~~~~~~~~~~~
Blueprints are the recommended way to implement larger or more
pluggable applications in Flask 0.7 and later.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from .helpers import _PackageBoundObject, _endpoint_from_view_func
class BlueprintSetupState(object):
"""Temporary holder object for registering a blueprint with the
application. An instance of this class is created by the
:meth:`~flask.Blueprint.make_setup_state` method and later passed
to all register callback functions.
"""
def __init__(self, blueprint, app, options, first_registration):
#: a reference to the current application
self.app = app
#: a reference to the blueprint that created this setup state.
self.blueprint = blueprint
#: a dictionary with all options that were passed to the
#: :meth:`~flask.Flask.register_blueprint` method.
self.options = options
#: as blueprints can be registered multiple times with the
#: application and not everything wants to be registered
#: multiple times on it, this attribute can be used to figure
#: out if the blueprint was registered in the past already.
self.first_registration = first_registration
subdomain = self.options.get('subdomain')
if subdomain is None:
subdomain = self.blueprint.subdomain
#: The subdomain that the blueprint should be active for, `None`
#: otherwise.
self.subdomain = subdomain
url_prefix = self.options.get('url_prefix')
if url_prefix is None:
url_prefix = self.blueprint.url_prefix
#: The prefix that should be used for all URLs defined on the
#: blueprint.
self.url_prefix = url_prefix
#: A dictionary with URL defaults that is added to each and every
#: URL that was defined with the blueprint.
self.url_defaults = dict(self.blueprint.url_values_defaults)
self.url_defaults.update(self.options.get('url_defaults', ()))
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
"""A helper method to register a rule (and optionally a view function)
to the application. The endpoint is automatically prefixed with the
blueprint's name.
"""
if self.url_prefix:
rule = self.url_prefix + rule
options.setdefault('subdomain', self.subdomain)
if endpoint is None:
endpoint = _endpoint_from_view_func(view_func)
defaults = self.url_defaults
if 'defaults' in options:
defaults = dict(defaults, **options.pop('defaults'))
self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint),
view_func, defaults=defaults, **options)
class Blueprint(_PackageBoundObject):
"""Represents a blueprint. A blueprint is an object that records
functions that will be called with the
:class:`~flask.blueprint.BlueprintSetupState` later to register functions
or other things on the main application. See :ref:`blueprints` for more
information.
.. versionadded:: 0.7
"""
warn_on_modifications = False
_got_registered_once = False
def __init__(self, name, import_name, static_folder=None,
static_url_path=None, template_folder=None,
url_prefix=None, subdomain=None, url_defaults=None):
_PackageBoundObject.__init__(self, import_name, template_folder)
self.name = name
self.url_prefix = url_prefix
self.subdomain = subdomain
self.static_folder = static_folder
self.static_url_path = static_url_path
self.deferred_functions = []
self.view_functions = {}
if url_defaults is None:
url_defaults = {}
self.url_values_defaults = url_defaults
def record(self, func):
"""Registers a function that is called when the blueprint is
registered on the application. This function is called with the
state as argument as returned by the :meth:`make_setup_state`
method.
"""
if self._got_registered_once and self.warn_on_modifications:
from warnings import warn
warn(Warning('The blueprint was already registered once '
'but is getting modified now. These changes '
'will not show up.'))
self.deferred_functions.append(func)
def record_once(self, func):
"""Works like :meth:`record` but wraps the function in another
function that will ensure the function is only called once. If the
blueprint is registered a second time on the application, the
function passed is not called.
"""
def wrapper(state):
if state.first_registration:
func(state)
return self.record(update_wrapper(wrapper, func))
def make_setup_state(self, app, options, first_registration=False):
"""Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState`
object that is later passed to the register callback functions.
Subclasses can override this to return a subclass of the setup state.
"""
return BlueprintSetupState(self, app, options, first_registration)
def register(self, app, options, first_registration=False):
"""Called by :meth:`Flask.register_blueprint` to register a blueprint
on the application. This can be overridden to customize the register
behavior. Keyword arguments from
:func:`~flask.Flask.register_blueprint` are directly forwarded to this
method in the `options` dictionary.
"""
self._got_registered_once = True
state = self.make_setup_state(app, options, first_registration)
if self.has_static_folder:
state.add_url_rule(self.static_url_path + '/<path:filename>',
view_func=self.send_static_file,
endpoint='static')
for deferred in self.deferred_functions:
deferred(state)
def route(self, rule, **options):
"""Like :meth:`Flask.route` but for a blueprint. The endpoint for the
:func:`url_for` function is prefixed with the name of the blueprint.
"""
def decorator(f):
endpoint = options.pop("endpoint", f.__name__)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
"""Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for
the :func:`url_for` function is prefixed with the name of the blueprint.
"""
if endpoint:
assert '.' not in endpoint, "Blueprint endpoint's should not contain dot's"
self.record(lambda s:
s.add_url_rule(rule, endpoint, view_func, **options))
def endpoint(self, endpoint):
"""Like :meth:`Flask.endpoint` but for a blueprint. This does not
prefix the endpoint with the blueprint name, this has to be done
explicitly by the user of this method. If the endpoint is prefixed
with a `.` it will be registered to the current blueprint, otherwise
it's an application independent endpoint.
"""
def decorator(f):
def register_endpoint(state):
state.app.view_functions[endpoint] = f
self.record_once(register_endpoint)
return f
return decorator
def app_template_filter(self, name=None):
"""Register a custom template filter, available application wide. Like
:meth:`Flask.template_filter` but for a blueprint.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_filter(f, name=name)
return f
return decorator
def add_app_template_filter(self, f, name=None):
"""Register a custom template filter, available application wide. Like
:meth:`Flask.add_template_filter` but for a blueprint. Works exactly
like the :meth:`app_template_filter` decorator.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.filters[name or f.__name__] = f
self.record_once(register_template)
def app_template_test(self, name=None):
"""Register a custom template test, available application wide. Like
:meth:`Flask.template_test` but for a blueprint.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_test(f, name=name)
return f
return decorator
def add_app_template_test(self, f, name=None):
"""Register a custom template test, available application wide. Like
:meth:`Flask.add_template_test` but for a blueprint. Works exactly
like the :meth:`app_template_test` decorator.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.tests[name or f.__name__] = f
self.record_once(register_template)
def app_template_global(self, name=None):
"""Register a custom template global, available application wide. Like
:meth:`Flask.template_global` but for a blueprint.
.. versionadded:: 0.10
:param name: the optional name of the global, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_global(f, name=name)
return f
return decorator
def add_app_template_global(self, f, name=None):
"""Register a custom template global, available application wide. Like
:meth:`Flask.add_template_global` but for a blueprint. Works exactly
like the :meth:`app_template_global` decorator.
.. versionadded:: 0.10
:param name: the optional name of the global, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.globals[name or f.__name__] = f
self.record_once(register_template)
def before_request(self, f):
"""Like :meth:`Flask.before_request` but for a blueprint. This function
is only executed before each request that is handled by a function of
that blueprint.
"""
self.record_once(lambda s: s.app.before_request_funcs
.setdefault(self.name, []).append(f))
return f
def before_app_request(self, f):
"""Like :meth:`Flask.before_request`. Such a function is executed
before each request, even if outside of a blueprint.
"""
self.record_once(lambda s: s.app.before_request_funcs
.setdefault(None, []).append(f))
return f
def before_app_first_request(self, f):
"""Like :meth:`Flask.before_first_request`. Such a function is
executed before the first request to the application.
"""
self.record_once(lambda s: s.app.before_first_request_funcs.append(f))
return f
def after_request(self, f):
"""Like :meth:`Flask.after_request` but for a blueprint. This function
is only executed after each request that is handled by a function of
that blueprint.
"""
self.record_once(lambda s: s.app.after_request_funcs
.setdefault(self.name, []).append(f))
return f
def after_app_request(self, f):
"""Like :meth:`Flask.after_request` but for a blueprint. Such a function
is executed after each request, even if outside of the blueprint.
"""
self.record_once(lambda s: s.app.after_request_funcs
.setdefault(None, []).append(f))
return f
def teardown_request(self, f):
"""Like :meth:`Flask.teardown_request` but for a blueprint. This
function is only executed when tearing down requests handled by a
function of that blueprint. Teardown request functions are executed
when the request context is popped, even when no actual request was
performed.
"""
self.record_once(lambda s: s.app.teardown_request_funcs
.setdefault(self.name, []).append(f))
return f
def teardown_app_request(self, f):
"""Like :meth:`Flask.teardown_request` but for a blueprint. Such a
function is executed when tearing down each request, even if outside of
the blueprint.
"""
self.record_once(lambda s: s.app.teardown_request_funcs
.setdefault(None, []).append(f))
return f
def context_processor(self, f):
"""Like :meth:`Flask.context_processor` but for a blueprint. This
function is only executed for requests handled by a blueprint.
"""
self.record_once(lambda s: s.app.template_context_processors
.setdefault(self.name, []).append(f))
return f
def app_context_processor(self, f):
"""Like :meth:`Flask.context_processor` but for a blueprint. Such a
function is executed each request, even if outside of the blueprint.
"""
self.record_once(lambda s: s.app.template_context_processors
.setdefault(None, []).append(f))
return f
def app_errorhandler(self, code):
"""Like :meth:`Flask.errorhandler` but for a blueprint. This
handler is used for all requests, even if outside of the blueprint.
"""
def decorator(f):
self.record_once(lambda s: s.app.errorhandler(code)(f))
return f
return decorator
def url_value_preprocessor(self, f):
"""Registers a function as URL value preprocessor for this
blueprint. It's called before the view functions are called and
can modify the url values provided.
"""
self.record_once(lambda s: s.app.url_value_preprocessors
.setdefault(self.name, []).append(f))
return f
def url_defaults(self, f):
"""Callback function for URL defaults for this blueprint. It's called
with the endpoint and values and should update the values passed
in place.
"""
self.record_once(lambda s: s.app.url_default_functions
.setdefault(self.name, []).append(f))
return f
def app_url_value_preprocessor(self, f):
"""Same as :meth:`url_value_preprocessor` but application wide.
"""
self.record_once(lambda s: s.app.url_value_preprocessors
.setdefault(None, []).append(f))
return f
def app_url_defaults(self, f):
"""Same as :meth:`url_defaults` but application wide.
"""
self.record_once(lambda s: s.app.url_default_functions
.setdefault(None, []).append(f))
return f
def errorhandler(self, code_or_exception):
"""Registers an error handler that becomes active for this blueprint
only. Please be aware that routing does not happen local to a
blueprint so an error handler for 404 usually is not handled by
a blueprint unless it is caused inside a view function. Another
special case is the 500 internal server error which is always looked
up from the application.
Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator
of the :class:`~flask.Flask` object.
"""
def decorator(f):
self.record_once(lambda s: s.app._register_error_handler(
self.name, code_or_exception, f))
return f
return decorator
|
spock1104/android_kernel_zte_msm8930
|
refs/heads/stockmod
|
scripts/build-all.py
|
1182
|
#! /usr/bin/env python
# Copyright (c) 2009-2011, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Build the kernel for all targets using the Android build environment.
#
# TODO: Accept arguments to indicate what to build.
import glob
from optparse import OptionParser
import subprocess
import os
import os.path
import shutil
import sys
version = 'build-all.py, version 0.01'
build_dir = '../all-kernels'
make_command = ["vmlinux", "modules"]
make_env = os.environ
make_env.update({
'ARCH': 'arm',
'CROSS_COMPILE': 'arm-none-linux-gnueabi-',
'KCONFIG_NOTIMESTAMP': 'true' })
all_options = {}
def error(msg):
sys.stderr.write("error: %s\n" % msg)
def fail(msg):
"""Fail with a user-printed message"""
error(msg)
sys.exit(1)
def check_kernel():
"""Ensure that PWD is a kernel directory"""
if (not os.path.isfile('MAINTAINERS') or
not os.path.isfile('arch/arm/mach-msm/Kconfig')):
fail("This doesn't seem to be an MSM kernel dir")
def check_build():
"""Ensure that the build directory is present."""
if not os.path.isdir(build_dir):
try:
os.makedirs(build_dir)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def update_config(file, str):
print 'Updating %s with \'%s\'\n' % (file, str)
defconfig = open(file, 'a')
defconfig.write(str + '\n')
defconfig.close()
def scan_configs():
"""Get the full list of defconfigs appropriate for this tree."""
names = {}
for n in glob.glob('arch/arm/configs/[fm]sm[0-9-]*_defconfig'):
names[os.path.basename(n)[:-10]] = n
for n in glob.glob('arch/arm/configs/qsd*_defconfig'):
names[os.path.basename(n)[:-10]] = n
for n in glob.glob('arch/arm/configs/apq*_defconfig'):
names[os.path.basename(n)[:-10]] = n
return names
class Builder:
def __init__(self, logname):
self.logname = logname
self.fd = open(logname, 'w')
def run(self, args):
devnull = open('/dev/null', 'r')
proc = subprocess.Popen(args, stdin=devnull,
env=make_env,
bufsize=0,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
count = 0
# for line in proc.stdout:
rawfd = proc.stdout.fileno()
while True:
line = os.read(rawfd, 1024)
if not line:
break
self.fd.write(line)
self.fd.flush()
if all_options.verbose:
sys.stdout.write(line)
sys.stdout.flush()
else:
for i in range(line.count('\n')):
count += 1
if count == 64:
count = 0
print
sys.stdout.write('.')
sys.stdout.flush()
print
result = proc.wait()
self.fd.close()
return result
failed_targets = []
def build(target):
dest_dir = os.path.join(build_dir, target)
log_name = '%s/log-%s.log' % (build_dir, target)
print 'Building %s in %s log %s' % (target, dest_dir, log_name)
if not os.path.isdir(dest_dir):
os.mkdir(dest_dir)
defconfig = 'arch/arm/configs/%s_defconfig' % target
dotconfig = '%s/.config' % dest_dir
savedefconfig = '%s/defconfig' % dest_dir
shutil.copyfile(defconfig, dotconfig)
devnull = open('/dev/null', 'r')
subprocess.check_call(['make', 'O=%s' % dest_dir,
'%s_defconfig' % target], env=make_env, stdin=devnull)
devnull.close()
if not all_options.updateconfigs:
build = Builder(log_name)
result = build.run(['make', 'O=%s' % dest_dir] + make_command)
if result != 0:
if all_options.keep_going:
failed_targets.append(target)
fail_or_error = error
else:
fail_or_error = fail
fail_or_error("Failed to build %s, see %s" % (target, build.logname))
# Copy the defconfig back.
if all_options.configs or all_options.updateconfigs:
devnull = open('/dev/null', 'r')
subprocess.check_call(['make', 'O=%s' % dest_dir,
'savedefconfig'], env=make_env, stdin=devnull)
devnull.close()
shutil.copyfile(savedefconfig, defconfig)
def build_many(allconf, targets):
print "Building %d target(s)" % len(targets)
for target in targets:
if all_options.updateconfigs:
update_config(allconf[target], all_options.updateconfigs)
build(target)
if failed_targets:
fail('\n '.join(["Failed targets:"] +
[target for target in failed_targets]))
def main():
global make_command
check_kernel()
check_build()
configs = scan_configs()
usage = ("""
%prog [options] all -- Build all targets
%prog [options] target target ... -- List specific targets
%prog [options] perf -- Build all perf targets
%prog [options] noperf -- Build all non-perf targets""")
parser = OptionParser(usage=usage, version=version)
parser.add_option('--configs', action='store_true',
dest='configs',
help="Copy configs back into tree")
parser.add_option('--list', action='store_true',
dest='list',
help='List available targets')
parser.add_option('-v', '--verbose', action='store_true',
dest='verbose',
help='Output to stdout in addition to log file')
parser.add_option('--oldconfig', action='store_true',
dest='oldconfig',
help='Only process "make oldconfig"')
parser.add_option('--updateconfigs',
dest='updateconfigs',
help="Update defconfigs with provided option setting, "
"e.g. --updateconfigs=\'CONFIG_USE_THING=y\'")
parser.add_option('-j', '--jobs', type='int', dest="jobs",
help="Number of simultaneous jobs")
parser.add_option('-l', '--load-average', type='int',
dest='load_average',
help="Don't start multiple jobs unless load is below LOAD_AVERAGE")
parser.add_option('-k', '--keep-going', action='store_true',
dest='keep_going', default=False,
help="Keep building other targets if a target fails")
parser.add_option('-m', '--make-target', action='append',
help='Build the indicated make target (default: %s)' %
' '.join(make_command))
(options, args) = parser.parse_args()
global all_options
all_options = options
if options.list:
print "Available targets:"
for target in configs.keys():
print " %s" % target
sys.exit(0)
if options.oldconfig:
make_command = ["oldconfig"]
elif options.make_target:
make_command = options.make_target
if options.jobs:
make_command.append("-j%d" % options.jobs)
if options.load_average:
make_command.append("-l%d" % options.load_average)
if args == ['all']:
build_many(configs, configs.keys())
elif args == ['perf']:
targets = []
for t in configs.keys():
if "perf" in t:
targets.append(t)
build_many(configs, targets)
elif args == ['noperf']:
targets = []
for t in configs.keys():
if "perf" not in t:
targets.append(t)
build_many(configs, targets)
elif len(args) > 0:
targets = []
for t in args:
if t not in configs.keys():
parser.error("Target '%s' not one of %s" % (t, configs.keys()))
targets.append(t)
build_many(configs, targets)
else:
parser.error("Must specify a target to build, or 'all'")
if __name__ == "__main__":
main()
|
infoxchange/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.3/tests/regressiontests/inspectdb/tests.py
|
52
|
from StringIO import StringIO
from django.core.management import call_command
from django.test import TestCase, skipUnlessDBFeature
class InspectDBTestCase(TestCase):
@skipUnlessDBFeature('can_introspect_foreign_keys')
def test_attribute_name_not_python_keyword(self):
out = StringIO()
call_command('inspectdb', stdout=out)
error_message = "inspectdb generated an attribute name which is a python keyword"
self.assertNotIn("from = models.ForeignKey(InspectdbPeople)", out.getvalue(), msg=error_message)
self.assertIn("from_field = models.ForeignKey(InspectdbPeople)", out.getvalue())
out.close()
|
sfromm/snmpryte
|
refs/heads/master
|
lib/netspryte/commands/rrdtune.py
|
2
|
# Written by Stephen Fromm <stephenf nero net>
# Copyright (C) 2015-2017 University of Oregon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
import argparse
import os
import sys
import logging
import re
from netspryte.commands import BaseCommand
from netspryte import constants as C
from netspryte.utils import *
from netspryte.db.rrd import *
from netspryte.manager import *
class RrdTuneCommand(BaseCommand):
def __init__(self, daemonize=False):
super(RrdTuneCommand, self).__init__(daemonize)
self.parser.add_argument('--file',
help='Path to RRD to tune')
self.parser.add_argument('-n', '--dryrun',
action="store_true", default=False,
help="Perform dryrun. Do not make changes")
def run(self):
args = self.parser.parse_args()
setup_logging(args.verbose)
cfg = C.load_config()
rrd_xml = list()
rrd_path = args.file
if not os.path.exists(rrd_path):
logging.error("rrd path does not exist: %s", rrd_path)
return
data_id = mk_data_instance_id_from_filename(rrd_path)
xml_path = rrd_path.replace('rrd', 'xml')
mgr = Manager()
this_inst = mgr.get(MeasurementInstance, name=data_id)
if not this_inst:
logging.error("failed to look up measurement instance associated with file %s", rrd_path)
return
info = rrd_info(rrd_path)
try:
if this_inst.measurement_class.name in ["interface", "cbqos"]:
ds_max = int(this_inst.attrs['ifSpeed'])
if ds_max == ( 2**32 - 1):
ds_max = int(this_inst.attrs['ifHighSpeed']) * 10**6
if ds_max == 0:
ds_max = 40 * 10**9
rrd_tune_ds_max(rrd_path, ds_max)
except KeyError as e:
logging.error("failed to look up key: %s", e)
|
dchaplinsky/pep.org.ua
|
refs/heads/master
|
pepdb/core/migrations/0149_auto_20180910_1922.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-09-10 16:22
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0148_auto_20180826_2045'),
]
operations = [
migrations.AddField(
model_name='feedbackmessage',
name='answer_added',
field=models.DateTimeField(blank=True, null=True, verbose_name='\u0411\u0443\u043b\u0430 \u043d\u0430\u0434\u0456\u0441\u043b\u0430\u043d\u0430'),
),
migrations.AddField(
model_name='feedbackmessage',
name='answered_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='\u0412\u0456\u0434\u043f\u043e\u0432\u0456\u0432'),
),
migrations.AddField(
model_name='feedbackmessage',
name='short_answer',
field=models.TextField(blank=True, null=True, verbose_name='\u0421\u0443\u0442\u044c \u0432\u0456\u0434\u043f\u043e\u0432\u0456\u0434\u0456'),
),
]
|
mgax/czl-scrape
|
refs/heads/master
|
sanatate/scrapy_proj/helpers/romanian.py
|
5
|
# -*- coding: utf-8 -*-
class RomanianHelper(object):
@staticmethod
def englishize_romanian(string):
symbols = (u"țţȚŢșşȘŞăǎĂîÎâÂ",
u"ttTTssSSaaAiIaA")
tr = {ord(a):ord(b) for a, b in zip(*symbols)}
return string.translate(tr)
@staticmethod
def beautify_romanian(string):
symbols = (u"ǎţşŢŞ",
u"ățșȚȘ")
tr = {ord(a):ord(b) for a, b in zip(*symbols)}
return string.translate(tr)
|
drukhil/frappe
|
refs/heads/master
|
frappe/patches/v6_16/feed_doc_owner.py
|
32
|
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Communication")
for doctype, name in frappe.db.sql("""select distinct reference_doctype, reference_name
from `tabCommunication`
where
(reference_doctype is not null and reference_doctype != '')
and (reference_name is not null and reference_name != '')
and (reference_owner is null or reference_owner = '')
for update"""):
owner = frappe.db.get_value(doctype, name, "owner")
if not owner:
continue
frappe.db.sql("""update `tabCommunication`
set reference_owner=%(owner)s
where
reference_doctype=%(doctype)s
and reference_name=%(name)s
and (reference_owner is null or reference_owner = '')""".format(doctype=doctype), {
"doctype": doctype,
"name": name,
"owner": owner
})
frappe.db.commit()
|
saimn/astropy
|
refs/heads/main
|
astropy/io/tests/test_registry_help.py
|
8
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from io import StringIO
from astropy.table import Table
from astropy.nddata import CCDData
def test_table_read_help_fits():
"""
Test dynamically created documentation help via the I/O registry for 'fits'.
"""
out = StringIO()
Table.read.help('fits', out)
doc = out.getvalue()
# Check a smattering of expected content
assert "Table.read general documentation" not in doc
assert "The available built-in formats" not in doc
assert "Table.read(format='fits') documentation" in doc
assert "hdu : int or str, optional" in doc
def test_table_read_help_ascii():
"""
Test dynamically created documentation help via the I/O registry for 'ascii'.
"""
out = StringIO()
Table.read.help('ascii', out)
doc = out.getvalue()
# Check a smattering of expected content
assert "Table.read general documentation" not in doc
assert "The available built-in formats" not in doc
assert "Table.read(format='ascii') documentation" in doc
assert "delimiter : str" in doc
assert "ASCII reader 'ascii' details" in doc
assert "Character-delimited table with a single header line" in doc
def test_table_write_help_hdf5():
"""
Test dynamically created documentation help via the I/O registry for 'hdf5'.
"""
out = StringIO()
Table.write.help('hdf5', out)
doc = out.getvalue()
# Check a smattering of expected content
assert "Table.write general documentation" not in doc
assert "The available built-in formats" not in doc
assert "Table.write(format='hdf5') documentation" in doc
assert "Write a Table object to an HDF5 file" in doc
assert "compression : bool or str or int" in doc
def test_list_formats():
"""
Test getting list of available formats
"""
out = StringIO()
CCDData.write.list_formats(out)
output = out.getvalue()
assert output == """\
Format Read Write Auto-identify
------ ---- ----- -------------
fits Yes Yes Yes"""
def test_table_write_help_fits():
"""
Test dynamically created documentation help via the I/O registry for 'fits'.
"""
out = StringIO()
Table.write.help('fits', out)
doc = out.getvalue()
# Check a smattering of expected content
assert "Table.write general documentation" not in doc
assert "The available built-in formats" not in doc
assert "Table.write(format='fits') documentation" in doc
assert "Write a Table object to a FITS file" in doc
def test_table_write_help_no_format():
"""
Test dynamically created documentation help via the I/O registry for no
format provided.
"""
out = StringIO()
Table.write.help(out=out)
doc = out.getvalue()
# Check a smattering of expected content
assert "Table.write general documentation" in doc
assert "The available built-in formats" in doc
def test_table_read_help_no_format():
"""
Test dynamically created documentation help via the I/O registry for not
format provided.
"""
out = StringIO()
Table.read.help(out=out)
doc = out.getvalue()
# Check a smattering of expected content
assert "Table.read general documentation" in doc
assert "The available built-in formats" in doc
def test_ccddata_write_help_fits():
"""
Test dynamically created documentation help via the I/O registry for 'fits'.
"""
out = StringIO()
CCDData.write.help('fits', out)
doc = out.getvalue()
# Check a smattering of expected content
assert "CCDData.write(format='fits') documentation" in doc
assert "Write CCDData object to FITS file" in doc
assert "key_uncertainty_type : str, optional" in doc
def test_ccddata_read_help_fits():
"""Test dynamically created documentation help via the I/O registry for
CCDData 'fits'.
"""
out = StringIO()
CCDData.read.help('fits', out)
doc = out.getvalue()
# Check a smattering of expected content
assert "CCDData.read(format='fits') documentation" in doc
assert "Generate a CCDData object from a FITS file" in doc
assert "hdu_uncertainty : str or None, optional" in doc
def test_table_write_help_jsviewer():
"""
Test dynamically created documentation help via the I/O registry for
'jsviewer'.
"""
out = StringIO()
Table.write.help('jsviewer', out)
doc = out.getvalue()
# Check a smattering of expected content
assert "Table.write general documentation" not in doc
assert "The available built-in formats" not in doc
assert "Table.write(format='jsviewer') documentation" in doc
|
luiseduardohdbackup/odoo
|
refs/heads/8.0
|
openerp/addons/test_documentation_examples/delegation.py
|
366
|
# -*- coding: utf-8 -*-
from openerp import models, fields
class Child0(models.Model):
_name = 'delegation.child0'
field_0 = fields.Integer()
class Child1(models.Model):
_name = 'delegation.child1'
field_1 = fields.Integer()
class Delegating(models.Model):
_name = 'delegation.parent'
_inherits = {
'delegation.child0': 'child0_id',
'delegation.child1': 'child1_id',
}
child0_id = fields.Many2one('delegation.child0', required=True, ondelete='cascade')
child1_id = fields.Many2one('delegation.child1', required=True, ondelete='cascade')
|
alajara/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/test/test_endtoend.py
|
449
|
#!/usr/bin/env python
#
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""End-to-end tests for pywebsocket. Tests standalone.py by default. You
can also test mod_pywebsocket hosted on an Apache server by setting
_use_external_server to True and modifying _external_server_port to point to
the port on which the Apache server is running.
"""
import logging
import os
import signal
import socket
import subprocess
import sys
import time
import unittest
import set_sys_path # Update sys.path to locate mod_pywebsocket module.
from test import client_for_testing
from test import mux_client_for_testing
# Special message that tells the echo server to start closing handshake
_GOODBYE_MESSAGE = 'Goodbye'
_SERVER_WARMUP_IN_SEC = 0.2
# If you want to use external server to run end to end tests, set following
# parameters correctly.
_use_external_server = False
_external_server_port = 0
# Test body functions
def _echo_check_procedure(client):
client.connect()
client.send_message('test')
client.assert_receive('test')
client.send_message('helloworld')
client.assert_receive('helloworld')
client.send_close()
client.assert_receive_close()
client.assert_connection_closed()
def _echo_check_procedure_with_binary(client):
client.connect()
client.send_message('binary', binary=True)
client.assert_receive('binary', binary=True)
client.send_message('\x00\x80\xfe\xff\x00\x80', binary=True)
client.assert_receive('\x00\x80\xfe\xff\x00\x80', binary=True)
client.send_close()
client.assert_receive_close()
client.assert_connection_closed()
def _echo_check_procedure_with_goodbye(client):
client.connect()
client.send_message('test')
client.assert_receive('test')
client.send_message(_GOODBYE_MESSAGE)
client.assert_receive(_GOODBYE_MESSAGE)
client.assert_receive_close()
client.send_close()
client.assert_connection_closed()
def _echo_check_procedure_with_code_and_reason(client, code, reason):
client.connect()
client.send_close(code, reason)
client.assert_receive_close(code, reason)
client.assert_connection_closed()
def _unmasked_frame_check_procedure(client):
client.connect()
client.send_message('test', mask=False)
client.assert_receive_close(client_for_testing.STATUS_PROTOCOL_ERROR, '')
client.assert_connection_closed()
def _mux_echo_check_procedure(mux_client):
mux_client.connect()
mux_client.send_flow_control(1, 1024)
logical_channel_options = client_for_testing.ClientOptions()
logical_channel_options.server_host = 'localhost'
logical_channel_options.server_port = 80
logical_channel_options.origin = 'http://localhost'
logical_channel_options.resource = '/echo'
mux_client.add_channel(2, logical_channel_options)
mux_client.send_flow_control(2, 1024)
mux_client.send_message(2, 'test')
mux_client.assert_receive(2, 'test')
mux_client.add_channel(3, logical_channel_options)
mux_client.send_flow_control(3, 1024)
mux_client.send_message(2, 'hello')
mux_client.send_message(3, 'world')
mux_client.assert_receive(2, 'hello')
mux_client.assert_receive(3, 'world')
# Don't send close message on channel id 1 so that server-initiated
# closing handshake won't occur.
mux_client.send_close(2)
mux_client.send_close(3)
mux_client.assert_receive_close(2)
mux_client.assert_receive_close(3)
mux_client.send_physical_connection_close()
mux_client.assert_physical_connection_receive_close()
class EndToEndTestBase(unittest.TestCase):
"""Base class for end-to-end tests that launch pywebsocket standalone
server as a separate process, connect to it using the client_for_testing
module, and check if the server behaves correctly by exchanging opening
handshake and frames over a TCP connection.
"""
def setUp(self):
self.server_stderr = None
self.top_dir = os.path.join(os.path.split(__file__)[0], '..')
os.putenv('PYTHONPATH', os.path.pathsep.join(sys.path))
self.standalone_command = os.path.join(
self.top_dir, 'mod_pywebsocket', 'standalone.py')
self.document_root = os.path.join(self.top_dir, 'example')
s = socket.socket()
s.bind(('localhost', 0))
(_, self.test_port) = s.getsockname()
s.close()
self._options = client_for_testing.ClientOptions()
self._options.server_host = 'localhost'
self._options.origin = 'http://localhost'
self._options.resource = '/echo'
# TODO(toyoshim): Eliminate launching a standalone server on using
# external server.
if _use_external_server:
self._options.server_port = _external_server_port
else:
self._options.server_port = self.test_port
# TODO(tyoshino): Use tearDown to kill the server.
def _run_python_command(self, commandline, stdout=None, stderr=None):
return subprocess.Popen([sys.executable] + commandline, close_fds=True,
stdout=stdout, stderr=stderr)
def _run_server(self):
args = [self.standalone_command,
'-H', 'localhost',
'-V', 'localhost',
'-p', str(self.test_port),
'-P', str(self.test_port),
'-d', self.document_root]
# Inherit the level set to the root logger by test runner.
root_logger = logging.getLogger()
log_level = root_logger.getEffectiveLevel()
if log_level != logging.NOTSET:
args.append('--log-level')
args.append(logging.getLevelName(log_level).lower())
return self._run_python_command(args,
stderr=self.server_stderr)
def _kill_process(self, pid):
if sys.platform in ('win32', 'cygwin'):
subprocess.call(
('taskkill.exe', '/f', '/pid', str(pid)), close_fds=True)
else:
os.kill(pid, signal.SIGKILL)
class EndToEndHyBiTest(EndToEndTestBase):
def setUp(self):
EndToEndTestBase.setUp(self)
def _run_test_with_client_options(self, test_function, options):
server = self._run_server()
try:
# TODO(tyoshino): add some logic to poll the server until it
# becomes ready
time.sleep(_SERVER_WARMUP_IN_SEC)
client = client_for_testing.create_client(options)
try:
test_function(client)
finally:
client.close_socket()
finally:
self._kill_process(server.pid)
def _run_test(self, test_function):
self._run_test_with_client_options(test_function, self._options)
def _run_deflate_frame_test(self, test_function):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
self._options.enable_deflate_frame()
client = client_for_testing.create_client(self._options)
try:
test_function(client)
finally:
client.close_socket()
finally:
self._kill_process(server.pid)
def _run_permessage_deflate_test(
self, offer, response_checker, test_function):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
self._options.extensions += offer
self._options.check_permessage_deflate = response_checker
client = client_for_testing.create_client(self._options)
try:
client.connect()
if test_function is not None:
test_function(client)
client.assert_connection_closed()
finally:
client.close_socket()
finally:
self._kill_process(server.pid)
def _run_close_with_code_and_reason_test(self, test_function, code,
reason):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client = client_for_testing.create_client(self._options)
try:
test_function(client, code, reason)
finally:
client.close_socket()
finally:
self._kill_process(server.pid)
def _run_http_fallback_test(self, options, status):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client = client_for_testing.create_client(options)
try:
client.connect()
self.fail('Could not catch HttpStatusException')
except client_for_testing.HttpStatusException, e:
self.assertEqual(status, e.status)
except Exception, e:
self.fail('Catch unexpected exception')
finally:
client.close_socket()
finally:
self._kill_process(server.pid)
def _run_mux_test(self, test_function):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client = mux_client_for_testing.MuxClient(self._options)
try:
test_function(client)
finally:
client.close_socket()
finally:
self._kill_process(server.pid)
def test_echo(self):
self._run_test(_echo_check_procedure)
def test_echo_binary(self):
self._run_test(_echo_check_procedure_with_binary)
def test_echo_server_close(self):
self._run_test(_echo_check_procedure_with_goodbye)
def test_unmasked_frame(self):
self._run_test(_unmasked_frame_check_procedure)
def test_echo_deflate_frame(self):
self._run_deflate_frame_test(_echo_check_procedure)
def test_echo_deflate_frame_server_close(self):
self._run_deflate_frame_test(
_echo_check_procedure_with_goodbye)
def test_echo_permessage_deflate(self):
def test_function(client):
# From the examples in the spec.
compressed_hello = '\xf2\x48\xcd\xc9\xc9\x07\x00'
client._stream.send_data(
compressed_hello,
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
compressed_hello,
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEquals('permessage-deflate', parameter.name())
self.assertEquals([], parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate'],
response_checker,
test_function)
def test_echo_permessage_deflate_two_frames(self):
def test_function(client):
# From the examples in the spec.
client._stream.send_data(
'\xf2\x48\xcd',
client_for_testing.OPCODE_TEXT,
end=False,
rsv1=1)
client._stream.send_data(
'\xc9\xc9\x07\x00',
client_for_testing.OPCODE_TEXT)
client._stream.assert_receive_binary(
'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEquals('permessage-deflate', parameter.name())
self.assertEquals([], parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate'],
response_checker,
test_function)
def test_echo_permessage_deflate_two_messages(self):
def test_function(client):
# From the examples in the spec.
client._stream.send_data(
'\xf2\x48\xcd\xc9\xc9\x07\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.send_data(
'\xf2\x00\x11\x00\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
'\xf2\x00\x11\x00\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEquals('permessage-deflate', parameter.name())
self.assertEquals([], parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate'],
response_checker,
test_function)
def test_echo_permessage_deflate_two_msgs_server_no_context_takeover(self):
def test_function(client):
# From the examples in the spec.
client._stream.send_data(
'\xf2\x48\xcd\xc9\xc9\x07\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.send_data(
'\xf2\x00\x11\x00\x00',
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
'\xf2\x48\xcd\xc9\xc9\x07\x00',
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEquals('permessage-deflate', parameter.name())
self.assertEquals([('server_no_context_takeover', None)],
parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate; server_no_context_takeover'],
response_checker,
test_function)
def test_echo_permessage_deflate_preference(self):
def test_function(client):
# From the examples in the spec.
compressed_hello = '\xf2\x48\xcd\xc9\xc9\x07\x00'
client._stream.send_data(
compressed_hello,
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
compressed_hello,
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEquals('permessage-deflate', parameter.name())
self.assertEquals([], parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate', 'deflate-frame'],
response_checker,
test_function)
def test_echo_permessage_deflate_with_parameters(self):
def test_function(client):
# From the examples in the spec.
compressed_hello = '\xf2\x48\xcd\xc9\xc9\x07\x00'
client._stream.send_data(
compressed_hello,
client_for_testing.OPCODE_TEXT,
rsv1=1)
client._stream.assert_receive_binary(
compressed_hello,
opcode=client_for_testing.OPCODE_TEXT,
rsv1=1)
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
self.assertEquals('permessage-deflate', parameter.name())
self.assertEquals([('server_max_window_bits', '10'),
('server_no_context_takeover', None)],
parameter.get_parameters())
self._run_permessage_deflate_test(
['permessage-deflate; server_max_window_bits=10; '
'server_no_context_takeover'],
response_checker,
test_function)
def test_echo_permessage_deflate_with_bad_server_max_window_bits(self):
def test_function(client):
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
raise Exception('Unexpected acceptance of permessage-deflate')
self._run_permessage_deflate_test(
['permessage-deflate; server_max_window_bits=3000000'],
response_checker,
test_function)
def test_echo_permessage_deflate_with_bad_server_max_window_bits(self):
def test_function(client):
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
raise Exception('Unexpected acceptance of permessage-deflate')
self._run_permessage_deflate_test(
['permessage-deflate; server_max_window_bits=3000000'],
response_checker,
test_function)
def test_echo_permessage_deflate_with_undefined_parameter(self):
def test_function(client):
client.send_close()
client.assert_receive_close()
def response_checker(parameter):
raise Exception('Unexpected acceptance of permessage-deflate')
self._run_permessage_deflate_test(
['permessage-deflate; foo=bar'],
response_checker,
test_function)
def test_echo_close_with_code_and_reason(self):
self._options.resource = '/close'
self._run_close_with_code_and_reason_test(
_echo_check_procedure_with_code_and_reason, 3333, 'sunsunsunsun')
def test_echo_close_with_empty_body(self):
self._options.resource = '/close'
self._run_close_with_code_and_reason_test(
_echo_check_procedure_with_code_and_reason, None, '')
def test_mux_echo(self):
self._run_mux_test(_mux_echo_check_procedure)
def test_close_on_protocol_error(self):
"""Tests that the server sends a close frame with protocol error status
code when the client sends data with some protocol error.
"""
def test_function(client):
client.connect()
# Intermediate frame without any preceding start of fragmentation
# frame.
client.send_frame_of_arbitrary_bytes('\x80\x80', '')
client.assert_receive_close(
client_for_testing.STATUS_PROTOCOL_ERROR)
self._run_test(test_function)
def test_close_on_unsupported_frame(self):
"""Tests that the server sends a close frame with unsupported operation
status code when the client sends data asking some operation that is
not supported by the server.
"""
def test_function(client):
client.connect()
# Text frame with RSV3 bit raised.
client.send_frame_of_arbitrary_bytes('\x91\x80', '')
client.assert_receive_close(
client_for_testing.STATUS_UNSUPPORTED_DATA)
self._run_test(test_function)
def test_close_on_invalid_frame(self):
"""Tests that the server sends a close frame with invalid frame payload
data status code when the client sends an invalid frame like containing
invalid UTF-8 character.
"""
def test_function(client):
client.connect()
# Text frame with invalid UTF-8 string.
client.send_message('\x80', raw=True)
client.assert_receive_close(
client_for_testing.STATUS_INVALID_FRAME_PAYLOAD_DATA)
self._run_test(test_function)
def test_close_on_internal_endpoint_error(self):
"""Tests that the server sends a close frame with internal endpoint
error status code when the handler does bad operation.
"""
self._options.resource = '/internal_error'
def test_function(client):
client.connect()
client.assert_receive_close(
client_for_testing.STATUS_INTERNAL_ENDPOINT_ERROR)
self._run_test(test_function)
# TODO(toyoshim): Add tests to verify invalid absolute uri handling like
# host unmatch, port unmatch and invalid port description (':' without port
# number).
def test_absolute_uri(self):
"""Tests absolute uri request."""
options = self._options
options.resource = 'ws://localhost:%d/echo' % options.server_port
self._run_test_with_client_options(_echo_check_procedure, options)
def test_origin_check(self):
"""Tests http fallback on origin check fail."""
options = self._options
options.resource = '/origin_check'
# Server shows warning message for http 403 fallback. This warning
# message is confusing. Following pipe disposes warning messages.
self.server_stderr = subprocess.PIPE
self._run_http_fallback_test(options, 403)
def test_version_check(self):
"""Tests http fallback on version check fail."""
options = self._options
options.version = 99
self._run_http_fallback_test(options, 400)
class EndToEndHyBi00Test(EndToEndTestBase):
def setUp(self):
EndToEndTestBase.setUp(self)
def _run_test(self, test_function):
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client = client_for_testing.create_client_hybi00(self._options)
try:
test_function(client)
finally:
client.close_socket()
finally:
self._kill_process(server.pid)
def test_echo(self):
self._run_test(_echo_check_procedure)
def test_echo_server_close(self):
self._run_test(_echo_check_procedure_with_goodbye)
class EndToEndTestWithEchoClient(EndToEndTestBase):
def setUp(self):
EndToEndTestBase.setUp(self)
def _check_example_echo_client_result(
self, expected, stdoutdata, stderrdata):
actual = stdoutdata.decode("utf-8")
if actual != expected:
raise Exception('Unexpected result on example echo client: '
'%r (expected) vs %r (actual)' %
(expected, actual))
if stderrdata is not None:
raise Exception('Unexpected error message on example echo '
'client: %r' % stderrdata)
def test_example_echo_client(self):
"""Tests that the echo_client.py example can talk with the server."""
server = self._run_server()
try:
time.sleep(_SERVER_WARMUP_IN_SEC)
client_command = os.path.join(
self.top_dir, 'example', 'echo_client.py')
# Expected output for the default messages.
default_expectation = ('Send: Hello\n' 'Recv: Hello\n'
u'Send: \u65e5\u672c\n' u'Recv: \u65e5\u672c\n'
'Send close\n' 'Recv ack\n')
args = [client_command,
'-p', str(self._options.server_port)]
client = self._run_python_command(args, stdout=subprocess.PIPE)
stdoutdata, stderrdata = client.communicate()
self._check_example_echo_client_result(
default_expectation, stdoutdata, stderrdata)
# Process a big message for which extended payload length is used.
# To handle extended payload length, ws_version attribute will be
# accessed. This test checks that ws_version is correctly set.
big_message = 'a' * 1024
args = [client_command,
'-p', str(self._options.server_port),
'-m', big_message]
client = self._run_python_command(args, stdout=subprocess.PIPE)
stdoutdata, stderrdata = client.communicate()
expected = ('Send: %s\nRecv: %s\nSend close\nRecv ack\n' %
(big_message, big_message))
self._check_example_echo_client_result(
expected, stdoutdata, stderrdata)
# Test the permessage-deflate extension.
args = [client_command,
'-p', str(self._options.server_port),
'--use_permessage_deflate']
client = self._run_python_command(args, stdout=subprocess.PIPE)
stdoutdata, stderrdata = client.communicate()
self._check_example_echo_client_result(
default_expectation, stdoutdata, stderrdata)
finally:
self._kill_process(server.pid)
if __name__ == '__main__':
unittest.main()
# vi:sts=4 sw=4 et
|
gepd/uPiotMicroPythonTool
|
refs/heads/develop
|
commands/list_files.py
|
1
|
# This file is part of the uPiot project, https://github.com/gepd/upiot/
#
# MIT License
#
# Copyright (c) 2017 GEPD
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sublime
from sublime_plugin import WindowCommand
from threading import Thread
from ..tools import sampy_manager
from ..tools.serial import selected_port
from ..tools.thread_progress import ThreadProgress
class upiotListFilesCommand(WindowCommand):
def run(self):
port = selected_port(request_port=True)
if(not port):
return
th = Thread(target=sampy_manager.list_files)
th.start()
ThreadProgress(th, '', '')
|
nealtodd/django
|
refs/heads/master
|
django/db/backends/postgresql/client.py
|
346
|
import os
import subprocess
from django.core.files.temp import NamedTemporaryFile
from django.db.backends.base.client import BaseDatabaseClient
from django.utils.six import print_
def _escape_pgpass(txt):
"""
Escape a fragment of a PostgreSQL .pgpass file.
"""
return txt.replace('\\', '\\\\').replace(':', '\\:')
class DatabaseClient(BaseDatabaseClient):
executable_name = 'psql'
@classmethod
def runshell_db(cls, settings_dict):
args = [cls.executable_name]
host = settings_dict.get('HOST', '')
port = settings_dict.get('PORT', '')
name = settings_dict.get('NAME', '')
user = settings_dict.get('USER', '')
passwd = settings_dict.get('PASSWORD', '')
if user:
args += ['-U', user]
if host:
args += ['-h', host]
if port:
args += ['-p', str(port)]
args += [name]
temp_pgpass = None
try:
if passwd:
# Create temporary .pgpass file.
temp_pgpass = NamedTemporaryFile(mode='w+')
try:
print_(
_escape_pgpass(host) or '*',
str(port) or '*',
_escape_pgpass(name) or '*',
_escape_pgpass(user) or '*',
_escape_pgpass(passwd),
file=temp_pgpass,
sep=':',
flush=True,
)
os.environ['PGPASSFILE'] = temp_pgpass.name
except UnicodeEncodeError:
# If the current locale can't encode the data, we let
# the user input the password manually.
pass
subprocess.call(args)
finally:
if temp_pgpass:
temp_pgpass.close()
if 'PGPASSFILE' in os.environ: # unit tests need cleanup
del os.environ['PGPASSFILE']
def runshell(self):
DatabaseClient.runshell_db(self.connection.settings_dict)
|
aksaxena80/test
|
refs/heads/master
|
tensorflow/python/ops/image_ops.py
|
5
|
# pylint: disable=g-short-docstring-punctuation
"""## Encoding and Decoding
TensorFlow provides Ops to decode and encode JPEG and PNG formats. Encoded
images are represented by scalar string Tensors, decoded images by 3-D uint8
tensors of shape `[height, width, channels]`.
The encode and decode Ops apply to one image at a time. Their input and output
are all of variable size. If you need fixed size images, pass the output of
the decode Ops to one of the cropping and resizing Ops.
Note: The PNG encode and decode Ops support RGBA, but the conversions Ops
presently only support RGB, HSV, and GrayScale.
@@decode_jpeg
@@encode_jpeg
@@decode_png
@@encode_png
## Resizing
The resizing Ops accept input images as tensors of several types. They always
output resized images as float32 tensors.
The convenience function [resize_images()](#resize_images) supports both 4-D
and 3-D tensors as input and output. 4-D tensors are for batches of images,
3-D tensors for individual images.
Other resizing Ops only support 3-D individual images as input:
[resize_area](#resize_area), [resize_bicubic](#resize_bicubic),
[resize_bilinear](#resize_bilinear),
[resize_nearest_neighbor](#resize_nearest_neighbor).
Example:
```python
# Decode a JPG image and resize it to 299 by 299.
image = tf.image.decode_jpeg(...)
resized_image = tf.image.resize_bilinear(image, [299, 299])
```
<i>Maybe refer to the Queue examples that show how to add images to a Queue
after resizing them to a fixed size, and how to dequeue batches of resized
images from the Queue.</i>
@@resize_images
@@resize_area
@@resize_bicubic
@@resize_bilinear
@@resize_nearest_neighbor
## Cropping
@@resize_image_with_crop_or_pad
@@pad_to_bounding_box
@@crop_to_bounding_box
@@random_crop
@@extract_glimpse
## Flipping and Transposing
@@flip_up_down
@@random_flip_up_down
@@flip_left_right
@@random_flip_left_right
@@transpose_image
## Image Adjustments
TensorFlow provides functions to adjust images in various ways: brightness,
contrast, hue, and saturation. Each adjustment can be done with predefined
parameters or with random parameters picked from predefined intervals. Random
adjustments are often useful to expand a training set and reduce overfitting.
@@adjust_brightness
@@random_brightness
@@adjust_contrast
@@random_contrast
@@per_image_whitening
"""
import math
import tensorflow.python.platform
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import types
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import common_shapes
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import gen_image_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_image_ops import *
from tensorflow.python.ops.gen_attention_ops import *
# pylint: enable=wildcard-import
ops.NoGradient('ResizeBilinear')
ops.NoGradient('RandomCrop')
def _ImageDimensions(images):
"""Returns the dimensions of an image tensor.
Args:
images: 4-D Tensor of shape [batch, height, width, channels]
Returns:
list of integers [batch, height, width, channels]
"""
# A simple abstraction to provide names for each dimension. This abstraction
# should make it simpler to switch dimensions in the future (e.g. if we ever
# want to switch height and width.)
return images.get_shape().as_list()
def _Check3DImage(image):
"""Assert that we are working with properly shaped image.
Args:
image: 3-D Tensor of shape [height, width, channels]
Raises:
ValueError: if image.shape is not a [3] vector.
"""
if not image.get_shape().is_fully_defined():
raise ValueError('\'image\' must be fully defined.')
if image.get_shape().ndims != 3:
raise ValueError('\'image\' must be three-dimensional.')
if not all(x > 0 for x in image.get_shape()):
raise ValueError('all dims of \'image.shape\' must be > 0: %s' %
image.get_shape())
def _CheckAtLeast3DImage(image):
"""Assert that we are working with properly shaped image.
Args:
image: >= 3-D Tensor of size [*, height, width, depth]
Raises:
ValueError: if image.shape is not a [>= 3] vector.
"""
if not image.get_shape().is_fully_defined():
raise ValueError('\'image\' must be fully defined.')
if image.get_shape().ndims < 3:
raise ValueError('\'image\' must be at least three-dimensional.')
if not all(x > 0 for x in image.get_shape()):
raise ValueError('all dims of \'image.shape\' must be > 0: %s' %
image.get_shape())
def random_flip_up_down(image, seed=None):
"""Randomly flips an image vertically (upside down).
With a 1 in 2 chance, outputs the contents of `image` flipped along the first
dimension, which is `height`. Otherwise output the image as-is.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
seed: A Python integer. Used to create a random seed. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
_Check3DImage(image)
uniform_random = random_ops.random_uniform([], 0, 1.0, seed=seed)
mirror = math_ops.less(array_ops.pack([uniform_random, 1.0, 1.0]), 0.5)
return array_ops.reverse(image, mirror)
def random_flip_left_right(image, seed=None):
"""Randomly flip an image horizontally (left to right).
With a 1 in 2 chance, outputs the contents of `image` flipped along the
second dimension, which is `width`. Otherwise output the image as-is.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
seed: A Python integer. Used to create a random seed. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
_Check3DImage(image)
uniform_random = random_ops.random_uniform([], 0, 1.0, seed=seed)
mirror = math_ops.less(array_ops.pack([1.0, uniform_random, 1.0]), 0.5)
return array_ops.reverse(image, mirror)
def flip_left_right(image):
"""Flip an image horizontally (left to right).
Outputs the contents of `image` flipped along the second dimension, which is
`width`.
See also `reverse()`.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
_Check3DImage(image)
return array_ops.reverse(image, [False, True, False])
def flip_up_down(image):
"""Flip an image horizontally (upside down).
Outputs the contents of `image` flipped along the first dimension, which is
`height`.
See also `reverse()`.
Args:
image: A 3-D tensor of shape `[height, width, channels].`
Returns:
A 3-D tensor of the same type and shape as `image`.
Raises:
ValueError: if the shape of `image` not supported.
"""
_Check3DImage(image)
return array_ops.reverse(image, [True, False, False])
def transpose_image(image):
"""Transpose an image by swapping the first and second dimension.
See also `transpose()`.
Args:
image: 3-D tensor of shape `[height, width, channels]`
Returns:
A 3-D tensor of shape `[width, height, channels]`
Raises:
ValueError: if the shape of `image` not supported.
"""
_Check3DImage(image)
return array_ops.transpose(image, [1, 0, 2], name='transpose_image')
def pad_to_bounding_box(image, offset_height, offset_width, target_height,
target_width):
"""Pad `image` with zeros to the specified `height` and `width`.
Adds `offset_height` rows of zeros on top, `offset_width` columns of
zeros on the left, and then pads the image on the bottom and right
with zeros until it has dimensions `target_height`, `target_width`.
This op does nothing if `offset_*` is zero and the image already has size
`target_height` by `target_width`.
Args:
image: 3-D tensor with shape `[height, width, channels]`
offset_height: Number of rows of zeros to add on top.
offset_width: Number of columns of zeros to add on the left.
target_height: Height of output image.
target_width: Width of output image.
Returns:
3-D tensor of shape `[target_height, target_width, channels]`
Raises:
ValueError: If the shape of `image` is incompatible with the `offset_*` or
`target_*` arguments
"""
_Check3DImage(image)
height, width, depth = _ImageDimensions(image)
if target_width < width:
raise ValueError('target_width must be >= width')
if target_height < height:
raise ValueError('target_height must be >= height')
after_padding_width = target_width - offset_width - width
after_padding_height = target_height - offset_height - height
if after_padding_width < 0:
raise ValueError('target_width not possible given '
'offset_width and image width')
if after_padding_height < 0:
raise ValueError('target_height not possible given '
'offset_height and image height')
# Do not pad on the depth dimensions.
if (offset_width or offset_height or after_padding_width or
after_padding_height):
paddings = [[offset_height, after_padding_height],
[offset_width, after_padding_width], [0, 0]]
padded = array_ops.pad(image, paddings)
padded.set_shape([target_height, target_width, depth])
else:
padded = image
return padded
def crop_to_bounding_box(image, offset_height, offset_width, target_height,
target_width):
"""Crops an image to a specified bounding box.
This op cuts a rectangular part out of `image`. The top-left corner of the
returned image is at `offset_height, offset_width` in `image`, and its
lower-right corner is at
`offset_height + target_height, offset_width + target_width'.
Args:
image: 3-D tensor with shape `[height, width, channels]`
offset_height: Vertical coordinate of the top-left corner of the result in
the input.
offset_width: Horizontal coordinate of the top-left corner of the result in
the input.
target_height: Height of the result.
target_width: Width of the result.
Returns:
3-D tensor of image with shape `[target_height, target_width, channels]`
Raises:
ValueError: If the shape of `image` is incompatible with the `offset_*` or
`target_*` arguments
"""
_Check3DImage(image)
height, width, _ = _ImageDimensions(image)
if offset_width < 0:
raise ValueError('offset_width must be >= 0.')
if offset_height < 0:
raise ValueError('offset_height must be >= 0.')
if width < (target_width + offset_width):
raise ValueError('width must be >= target + offset.')
if height < (target_height + offset_height):
raise ValueError('height must be >= target + offset.')
cropped = array_ops.slice(image, [offset_height, offset_width, 0],
[target_height, target_width, -1])
return cropped
def resize_image_with_crop_or_pad(image, target_height, target_width):
"""Crops and/or pads an image to a target width and height.
Resizes an image to a target width and height by either centrally
cropping the image or padding it evenly with zeros.
If `width` or `height` is greater than the specified `target_width` or
`target_height` respectively, this op centrally crops along that dimension.
If `width` or `height` is smaller than the specified `target_width` or
`target_height` respectively, this op centrally pads with 0 along that
dimension.
Args:
image: 3-D tensor of shape [height, width, channels]
target_height: Target height.
target_width: Target width.
Raises:
ValueError: if `target_height` or `target_width` are zero or negative.
Returns:
Cropped and/or padded image of shape
`[target_height, target_width, channels]`
"""
_Check3DImage(image)
original_height, original_width, _ = _ImageDimensions(image)
if target_width <= 0:
raise ValueError('target_width must be > 0.')
if target_height <= 0:
raise ValueError('target_height must be > 0.')
offset_crop_width = 0
offset_pad_width = 0
if target_width < original_width:
offset_crop_width = int((original_width - target_width) / 2)
elif target_width > original_width:
offset_pad_width = int((target_width - original_width) / 2)
offset_crop_height = 0
offset_pad_height = 0
if target_height < original_height:
offset_crop_height = int((original_height - target_height) / 2)
elif target_height > original_height:
offset_pad_height = int((target_height - original_height) / 2)
# Maybe crop if needed.
cropped = crop_to_bounding_box(image, offset_crop_height, offset_crop_width,
min(target_height, original_height),
min(target_width, original_width))
# Maybe pad if needed.
resized = pad_to_bounding_box(cropped, offset_pad_height, offset_pad_width,
target_height, target_width)
if resized.get_shape().ndims is None:
raise ValueError('resized contains no shape.')
if not resized.get_shape()[0].is_compatible_with(target_height):
raise ValueError('resized height is not correct.')
if not resized.get_shape()[1].is_compatible_with(target_width):
raise ValueError('resized width is not correct.')
return resized
class ResizeMethod(object):
BILINEAR = 0
NEAREST_NEIGHBOR = 1
BICUBIC = 2
AREA = 3
def resize_images(images, new_height, new_width, method=ResizeMethod.BILINEAR):
"""Resize `images` to `new_width`, `new_height` using the specified `method`.
Resized images will be distorted if their original aspect ratio is not
the same as `new_width`, `new_height`. To avoid distortions see
[resize_image_with_crop_or_pad](#resize_image_with_crop_or_pad).
`method` can be one of:
* <b>ResizeMethod.BILINEAR</b>: [Bilinear interpolation.]
(https://en.wikipedia.org/wiki/Bilinear_interpolation)
* <b>ResizeMethod.NEAREST_NEIGHBOR</b>: [Nearest neighbor interpolation.]
(https://en.wikipedia.org/wiki/Nearest-neighbor_interpolation)
* <b>ResizeMethod.BICUBIC</b>: [Bicubic interpolation.]
(https://en.wikipedia.org/wiki/Bicubic_interpolation)
* <b>ResizeMethod.AREA</b>: Area interpolation.
Args:
images: 4-D Tensor of shape `[batch, height, width, channels]` or
3-D Tensor of shape `[height, width, channels]`.
new_height: integer.
new_width: integer.
method: ResizeMethod. Defaults to `ResizeMethod.BILINEAR`.
Raises:
ValueError: if the shape of `images` is incompatible with the
shape arguments to this function
ValueError: if an unsupported resize method is specified.
Returns:
If `images` was 4-D, a 4-D float Tensor of shape
`[batch, new_height, new_width, channels]`.
If `images` was 3-D, a 3-D float Tensor of shape
`[new_height, new_width, channels]`.
"""
if images.get_shape().ndims is None:
raise ValueError('\'images\' contains no shape.')
# TODO(shlens): Migrate this functionality to the underlying Op's.
is_batch = True
if len(images.get_shape()) == 3:
is_batch = False
images = array_ops.expand_dims(images, 0)
_, height, width, depth = _ImageDimensions(images)
if width == new_width and height == new_height:
return images
if method == ResizeMethod.BILINEAR:
images = gen_image_ops.resize_bilinear(images, [new_height, new_width])
elif method == ResizeMethod.NEAREST_NEIGHBOR:
images = gen_image_ops.resize_nearest_neighbor(images, [new_height,
new_width])
elif method == ResizeMethod.BICUBIC:
images = gen_image_ops.resize_bicubic(images, [new_height, new_width])
elif method == ResizeMethod.AREA:
images = gen_image_ops.resize_area(images, [new_height, new_width])
else:
raise ValueError('Resize method is not implemented.')
if not is_batch:
images = array_ops.reshape(images, [new_height, new_width, depth])
return images
def per_image_whitening(image):
"""Linearly scales `image` to have zero mean and unit norm.
This op computes `(x - mean) / adjusted_stddev`, where `mean` is the average
of all values in image, and
`adjusted_stddev = max(stddev, 1.0/srqt(image.NumElements()))`.
`stddev` is the standard deviation of all values in `image`. It is capped
away from zero to protect against division by 0 when handling uniform images.
Note that this implementation is limited:
* It only whitens based on the statistics of an individual image.
* It does not take into account the covariance structure.
Args:
image: 3-D tensor of shape `[height, width, channels]`.
Returns:
The whitened image with same shape as `image`.
Raises:
ValueError: if the shape of 'image' is incompatible with this function.
"""
_Check3DImage(image)
height, width, depth = _ImageDimensions(image)
num_pixels = height * width * depth
image = math_ops.cast(image, dtype=types.float32)
image_mean = math_ops.reduce_mean(image)
variance = (math_ops.reduce_mean(math_ops.square(image)) -
math_ops.square(image_mean))
stddev = math_ops.sqrt(variance)
# Apply a minimum normalization that protects us against uniform images.
min_stddev = constant_op.constant(1.0 / math.sqrt(num_pixels))
pixel_value_scale = math_ops.maximum(stddev, min_stddev)
pixel_value_offset = image_mean
image = math_ops.sub(image, pixel_value_offset)
image = math_ops.div(image, pixel_value_scale)
return image
def random_brightness(image, max_delta, seed=None):
"""Adjust the brightness of images by a random factor.
Equivalent to `adjust_brightness()` using a `delta` randomly picked in the
interval `[-max_delta, max_delta)`.
Note that `delta` is picked as a float. Because for integer type images,
the brightness adjusted result is rounded before casting, integer images may
have modifications in the range `[-max_delta,max_delta]`.
Args:
image: 3-D tensor of shape `[height, width, channels]`.
max_delta: float, must be non-negative.
seed: A Python integer. Used to create a random seed. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
Returns:
3-D tensor of images of shape `[height, width, channels]`
Raises:
ValueError: if max_delta is negative.
"""
_Check3DImage(image)
if max_delta < 0:
raise ValueError('max_delta must be non-negative.')
delta = random_ops.random_uniform([], -max_delta, max_delta, seed=seed)
return adjust_brightness(image, delta)
def random_contrast(image, lower, upper, seed=None):
"""Adjust the contrase of an image by a random factor.
Equivalent to `adjust_constrast()` but uses a `contrast_factor` randomly
picked in the interval `[lower, upper]`.
Args:
image: 3-D tensor of shape `[height, width, channels]`.
lower: float. Lower bound for the random contrast factor.
upper: float. Upper bound for the random contrast factor.
seed: A Python integer. Used to create a random seed. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
Returns:
3-D tensor of shape `[height, width, channels]`.
Raises:
ValueError: if `upper <= lower` or if `lower < 0`.
"""
_Check3DImage(image)
if upper <= lower:
raise ValueError('upper must be > lower.')
if lower < 0:
raise ValueError('lower must be non-negative.')
# Generate an a float in [lower, upper]
contrast_factor = random_ops.random_uniform([], lower, upper, seed=seed)
return adjust_contrast(image, contrast_factor)
def adjust_brightness(image, delta, min_value=None, max_value=None):
"""Adjust the brightness of RGB or Grayscale images.
The value `delta` is added to all components of the tensor `image`. `image`
and `delta` are cast to `float` before adding, and the resulting values are
clamped to `[min_value, max_value]`. Finally, the result is cast back to
`images.dtype`.
If `min_value` or `max_value` are not given, they are set to the minimum and
maximum allowed values for `image.dtype` respectively.
Args:
image: A tensor.
delta: A scalar. Amount to add to the pixel values.
min_value: Minimum value for output.
max_value: Maximum value for output.
Returns:
A tensor of the same shape and type as `image`.
"""
if min_value is None:
min_value = image.dtype.min
if max_value is None:
max_value = image.dtype.max
with ops.op_scope([image, delta, min_value, max_value], None,
'adjust_brightness') as name:
adjusted = math_ops.add(
math_ops.cast(image, types.float32),
math_ops.cast(delta, types.float32),
name=name)
if image.dtype.is_integer:
rounded = math_ops.round(adjusted)
else:
rounded = adjusted
clipped = clip_ops.clip_by_value(rounded, float(min_value),
float(max_value))
output = math_ops.cast(clipped, image.dtype)
return output
def adjust_contrast(images, contrast_factor, min_value=None, max_value=None):
"""Adjust contrast of RGB or grayscale images.
`images` is a tensor of at least 3 dimensions. The last 3 dimensions are
interpreted as `[height, width, channels]`. The other dimensions only
represent a collection of images, such as `[batch, height, width, channels].`
Contrast is adjusted independently for each channel of each image.
For each channel, this Op first computes the mean of the image pixels in the
channel and then adjusts each component `x` of each pixel to
`(x - mean) * contrast_factor + mean`.
The adjusted values are then clipped to fit in the `[min_value, max_value]`
interval. If `min_value` or `max_value` is not given, it is replaced with the
minimum and maximum values for the data type of `images` respectively.
The contrast-adjusted image is always computed as `float`, and it is
cast back to its original type after clipping.
Args:
images: Images to adjust. At least 3-D.
contrast_factor: A float multiplier for adjusting contrast.
min_value: Minimum value for clipping the adjusted pixels.
max_value: Maximum value for clipping the adjusted pixels.
Returns:
The constrast-adjusted image or images.
Raises:
ValueError: if the arguments are invalid.
"""
_CheckAtLeast3DImage(images)
# If these are None, the min/max should be a nop, but still prevent overflows
# from the cast back to images.dtype at the end of adjust_contrast.
if min_value is None:
min_value = images.dtype.min
if max_value is None:
max_value = images.dtype.max
with ops.op_scope(
[images, contrast_factor, min_value,
max_value], None, 'adjust_contrast') as name:
adjusted = gen_image_ops.adjust_contrast(images,
contrast_factor=contrast_factor,
min_value=min_value,
max_value=max_value,
name=name)
if images.dtype.is_integer:
return math_ops.cast(math_ops.round(adjusted), images.dtype)
else:
return math_ops.cast(adjusted, images.dtype)
ops.RegisterShape('AdjustContrast')(
common_shapes.unchanged_shape_with_rank_at_least(3))
@ops.RegisterShape('ResizeBilinear')
@ops.RegisterShape('ResizeNearestNeighbor')
@ops.RegisterShape('ResizeBicubic')
@ops.RegisterShape('ResizeArea')
def _ResizeShape(op):
"""Shape function for the resize_bilinear and resize_nearest_neighbor ops."""
input_shape = op.inputs[0].get_shape().with_rank(4)
size = tensor_util.ConstantValue(op.inputs[1])
if size is not None:
height = size[0]
width = size[1]
else:
height = None
width = None
return [tensor_shape.TensorShape(
[input_shape[0], height, width, input_shape[3]])]
@ops.RegisterShape('DecodeJpeg')
@ops.RegisterShape('DecodePng')
def _ImageDecodeShape(op):
"""Shape function for image decoding ops."""
unused_input_shape = op.inputs[0].get_shape().merge_with(
tensor_shape.scalar())
channels = op.get_attr('channels') or None
return [tensor_shape.TensorShape([None, None, channels])]
@ops.RegisterShape('EncodeJpeg')
@ops.RegisterShape('EncodePng')
def _ImageEncodeShape(op):
"""Shape function for image encoding ops."""
unused_input_shape = op.inputs[0].get_shape().with_rank(3)
return [tensor_shape.scalar()]
@ops.RegisterShape('RandomCrop')
def _random_cropShape(op):
"""Shape function for the random_crop op."""
input_shape = op.inputs[0].get_shape().with_rank(3)
unused_size_shape = op.inputs[1].get_shape().merge_with(
tensor_shape.vector(2))
size = tensor_util.ConstantValue(op.inputs[1])
if size is not None:
height = size[0]
width = size[1]
else:
height = None
width = None
channels = input_shape[2]
return [tensor_shape.TensorShape([height, width, channels])]
def random_crop(image, size, seed=None, name=None):
"""Randomly crops `image` to size `[target_height, target_width]`.
The offset of the output within `image` is uniformly random. `image` always
fully contains the result.
Args:
image: 3-D tensor of shape `[height, width, channels]`
size: 1-D tensor with two elements, specifying target `[height, width]`
seed: A Python integer. Used to create a random seed. See
[`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed)
for behavior.
name: A name for this operation (optional).
Returns:
A cropped 3-D tensor of shape `[target_height, target_width, channels]`.
"""
seed1, seed2 = random_seed.get_seed(seed)
return gen_image_ops.random_crop(image, size, seed=seed1, seed2=seed2,
name=name)
|
Carlosaarodrigues/orpsoc
|
refs/heads/master
|
orpsoc/utils.py
|
1
|
import subprocess
class Launcher:
def __init__(self, cmd, args=[], shell=False, cwd=None, stderr=None, errormsg=None):
self.cmd = cmd
self.args = args
self.shell = shell
self.cwd = cwd
self.stderr = stderr
self.errormsg = errormsg
def run(self):
try:
subprocess.check_call([self.cmd] + self.args,
cwd = self.cwd,
shell = self.shell,
stderr = self.stderr,
stdin=subprocess.PIPE),
except OSError:
raise RuntimeError("Error: Command " + self.cmd + " not found. Make sure it is in $PATH")
except subprocess.CalledProcessError:
if self.stderr is None:
self.stderr = "stderr"
if self.errormsg:
raise RuntimeError(self.errormsg)
else:
raise RuntimeError("Error: " + self.cmd + ' '.join(self.args) + " returned errors. See " + self.stderr + " for details")
def __str__(self):
return self.cmd + ' ' + ' '.join(self.args)
def launch(cmd, args=[], shell=False, cwd=None, stderr=None):
try:
subprocess.check_call([cmd] + args,
cwd = cwd,
shell = shell,
stderr = stderr),
except OSError:
print("Error: Command " + cmd + " not found. Make sure it is in $PATH")
exit(1)
except subprocess.CalledProcessError:
if stderr is None:
stderr = "stderr"
print("Error: " + cmd + ' '.join(args) + " returned errors. See " + stderr + " for details")
exit(1)
|
sigma-geosistemas/django-river
|
refs/heads/master
|
river/management/__init__.py
|
98
|
__author__ = 'cenk'
|
d0ugal/readthedocs.org
|
refs/heads/master
|
readthedocs/builds/migrations/0015_add_privacy.py
|
5
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Version.privacy_level'
db.add_column('builds_version', 'privacy_level',
self.gf('django.db.models.fields.CharField')(default='public', max_length=20),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Version.privacy_level'
db.delete_column('builds_version', 'privacy_level')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 13, 23, 55, 6, 898344)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 13, 23, 55, 6, 898075)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'builds.build': {
'Meta': {'ordering': "['-date']", 'object_name': 'Build'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'error': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'output': ('django.db.models.fields.TextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'builds'", 'to': "orm['projects.Project']"}),
'setup': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'setup_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'finished'", 'max_length': '55'}),
'success': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'html'", 'max_length': '55'}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'builds'", 'null': 'True', 'to': "orm['builds.Version']"})
},
'builds.version': {
'Meta': {'ordering': "['-verbose_name']", 'unique_together': "[('project', 'slug')]", 'object_name': 'Version'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'built': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'versions'", 'to': "orm['projects.Project']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'verbose_name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'builds.versionalias': {
'Meta': {'object_name': 'VersionAlias'},
'from_slug': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'largest': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'aliases'", 'to': "orm['projects.Project']"}),
'to_slug': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'projects.project': {
'Meta': {'ordering': "('slug',)", 'object_name': 'Project'},
'analytics_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'conf_py_file': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'copyright': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'crate_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'default_branch': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_version': ('django.db.models.fields.CharField', [], {'default': "'latest'", 'max_length': '255'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'django_packages_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'documentation_type': ('django.db.models.fields.CharField', [], {'default': "'sphinx'", 'max_length': '20'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'}),
'project_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'related_projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['projects.Project']", 'null': 'True', 'through': "orm['projects.ProjectRelationship']", 'blank': 'True'}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'repo_type': ('django.db.models.fields.CharField', [], {'default': "'git'", 'max_length': '10'}),
'requirements_file': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'skip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "'.rst'", 'max_length': '10'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '20'}),
'use_system_packages': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'use_virtualenv': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'version_privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'})
},
'projects.projectrelationship': {
'Meta': {'object_name': 'ProjectRelationship'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'superprojects'", 'to': "orm['projects.Project']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subprojects'", 'to': "orm['projects.Project']"})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['builds']
|
enolfc/oauthenticator
|
refs/heads/master
|
oauthenticator/tests/test_globus.py
|
1
|
from pytest import fixture, mark, raises
from tornado import web, gen
from unittest.mock import Mock
from globus_sdk import ConfidentialAppAuthClient
from ..globus import GlobusOAuthenticator, GlobusLogoutHandler
from .mocks import setup_oauth_mock, no_code_test, mock_handler
def user_model(username):
"""Return a user model"""
return {
'login': username,
}
@fixture
def mock_globus_sdk(monkeypatch):
"""Mock the globus_sdk request for 'oauth2_exchange_code_for_tokens', and
mock some of the items within the returned 'Tokens' class. """
class Tokens:
by_resource_server = {
'transfer.api.globus.org': {'access_token': 'TRANSFER_TOKEN'},
'auth.globus.org': {'access_token': 'AUTH_TOKEN'}
}
id_token = {'preferred_username': 'wash@globusid.org'}
def decode_id_token(self, client):
return self.id_token
tokens = Tokens()
monkeypatch.setattr(
ConfidentialAppAuthClient,
'oauth2_exchange_code_for_tokens',
lambda self, code: tokens
)
return tokens
@fixture
def globus_client(client):
setup_oauth_mock(
client,
host=['auth.globus.org'],
access_token_path='/v2/oauth2/token',
user_path='/userinfo',
token_type='bearer',
)
return client
@mark.gen_test
def test_globus(globus_client, mock_globus_sdk):
authenticator = GlobusOAuthenticator()
handler = globus_client.handler_for_user(user_model('wash'))
data = yield authenticator.authenticate(handler)
assert data['name'] == 'wash'
tokens = list(data['auth_state']['tokens'].keys())
assert tokens == ['transfer.api.globus.org']
@mark.gen_test
def test_allow_refresh_tokens(globus_client, mock_globus_sdk, monkeypatch):
authenticator = GlobusOAuthenticator()
# Sanity check, this field should be set to True
assert authenticator.allow_refresh_tokens is True
authenticator.allow_refresh_tokens = False
monkeypatch.setattr(
ConfidentialAppAuthClient,
'oauth2_start_flow',
Mock()
)
handler = globus_client.handler_for_user(user_model('wash'))
yield authenticator.authenticate(handler)
ConfidentialAppAuthClient.oauth2_start_flow.assert_called_with(
authenticator.get_callback_url(None),
requested_scopes=' '.join(authenticator.scope),
refresh_tokens=False
)
@mark.gen_test
def test_restricted_domain(globus_client, mock_globus_sdk):
mock_globus_sdk.id_token = {'preferred_username': 'wash@serenity.com'}
authenticator = GlobusOAuthenticator()
authenticator.identity_provider = 'alliance.gov'
handler = globus_client.handler_for_user(user_model('wash'))
with raises(web.HTTPError) as exc:
yield authenticator.authenticate(handler)
assert exc.value.status_code == 403
@mark.gen_test
def test_namespaced_domain(globus_client, mock_globus_sdk):
mock_globus_sdk.id_token = {'preferred_username':
'wash@legitshipping.com@serenity.com'}
authenticator = GlobusOAuthenticator()
# Allow any idp
authenticator.identity_provider = ''
handler = globus_client.handler_for_user(user_model('wash'))
data = yield authenticator.authenticate(handler)
assert data['name'] == 'wash'
@mark.gen_test
def test_token_exclusion(globus_client, mock_globus_sdk):
authenticator = GlobusOAuthenticator()
authenticator.exclude_tokens = [
'transfer.api.globus.org',
'auth.globus.org'
]
handler = globus_client.handler_for_user(user_model('wash'))
data = yield authenticator.authenticate(handler)
assert data['name'] == 'wash'
assert list(data['auth_state']['tokens'].keys()) == []
def test_revoke_tokens(monkeypatch):
monkeypatch.setattr(
ConfidentialAppAuthClient,
'oauth2_revoke_token',
Mock()
)
authenticator = GlobusOAuthenticator()
service = {'transfer.api.globus.org': {'access_token': 'foo',
'refresh_token': 'bar'}}
authenticator.revoke_service_tokens(service)
assert ConfidentialAppAuthClient.oauth2_revoke_token.called
@mark.gen_test
def test_custom_logout(monkeypatch):
custom_logout_url = 'https://universityofindependence.edu/logout'
authenticator = GlobusOAuthenticator()
logout_handler = mock_handler(GlobusLogoutHandler,
authenticator=authenticator)
monkeypatch.setattr(
web.RequestHandler,
'redirect',
Mock()
)
logout_handler.clear_login_cookie = Mock()
logout_handler.get_current_user = Mock()
authenticator.logout_redirect_url = custom_logout_url
yield logout_handler.get()
logout_handler.redirect.assert_called_once_with(custom_logout_url)
assert logout_handler.clear_login_cookie.called
@mark.gen_test
def test_logout_revokes_tokens(monkeypatch):
class User:
@gen.coroutine
def get_auth_state(self):
return {'tokens': {}}
save_auth_state = Mock()
name = 'Wash'
user = User()
authenticator = GlobusOAuthenticator()
logout_handler = mock_handler(GlobusLogoutHandler,
authenticator=authenticator)
monkeypatch.setattr(
web.RequestHandler,
'redirect',
Mock()
)
logout_handler.clear_login_cookie = Mock()
authenticator.revoke_service_tokens = Mock()
authenticator.revoke_tokens_on_logout = True
yield logout_handler.clear_tokens(user)
assert authenticator.revoke_service_tokens.called
assert user.save_auth_state.called
|
frugalware/pacman-g2
|
refs/heads/master
|
pactest/tests/remove021.py
|
1
|
self.description = "Remove a package, with a modified file marked for backup (--nosave)"
p1 = pmpkg("dummy")
p1.files = ["etc/dummy.conf*"]
p1.backup = ["etc/dummy.conf"]
self.addpkg2db("local", p1)
self.args = "-Rn dummy"
self.addrule("!PKG_EXIST=dummy")
self.addrule("!FILE_EXIST=etc/dummy.conf")
self.addrule("!FILE_PACSAVE=etc/dummy.conf")
|
justnom/ansible-modules-core
|
refs/heads/devel
|
cloud/amazon/iam_cert.py
|
102
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: iam_cert
short_description: Manage server certificates for use on ELBs and CloudFront
description:
- Allows for the management of server certificates
version_added: "2.0"
options:
name:
description:
- Name of certificate to add, update or remove.
required: true
aliases: []
new_name:
description:
- When present, this will update the name of the cert with the value passed here.
required: false
aliases: []
new_path:
description:
- When present, this will update the path of the cert with the value passed here.
required: false
aliases: []
state:
description:
- Whether to create, delete certificate. When present is specified it will attempt to make an update if new_path or new_name is specified.
required: true
default: null
choices: [ "present", "absent" ]
aliases: []
path:
description:
- When creating or updating, specify the desired path of the certificate
required: false
default: "/"
aliases: []
cert_chain:
description:
- The path to the CA certificate chain in PEM encoded format.
required: false
default: null
aliases: []
cert:
description:
- The path to the certificate body in PEM encoded format.
required: false
aliases: []
key:
description:
- The path to the private key of the certificate in PEM encoded format.
dup_ok:
description:
- By default the module will not upload a certifcate that is already uploaded into AWS. If set to True, it will upload the certifcate as long as the name is unique.
required: false
default: False
aliases: []
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
requirements: [ "boto" ]
author: Jonathan I. Davila
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Basic server certificate upload
tasks:
- name: Upload Certifcate
iam_cert:
name: very_ssl
state: present
cert: somecert.pem
key: privcertkey
cert_chain: myverytrustedchain
'''
import json
import sys
try:
import boto
import boto.iam
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def boto_exception(err):
'''generic error message handler'''
if hasattr(err, 'error_message'):
error = err.error_message
elif hasattr(err, 'message'):
error = err.message
else:
error = '%s: %s' % (Exception, err)
return error
def cert_meta(iam, name):
opath = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
path
ocert = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
certificate_body
ocert_id = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
server_certificate_id
upload_date = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
upload_date
exp = iam.get_server_certificate(name).get_server_certificate_result.\
server_certificate.\
server_certificate_metadata.\
expiration
return opath, ocert, ocert_id, upload_date, exp
def dup_check(module, iam, name, new_name, cert, orig_cert_names, orig_cert_bodies, dup_ok):
update=False
if any(ct in orig_cert_names for ct in [name, new_name]):
for i_name in [name, new_name]:
if i_name is None:
continue
if cert is not None:
try:
c_index=orig_cert_names.index(i_name)
except NameError:
continue
else:
if orig_cert_bodies[c_index] == cert:
update=True
break
elif orig_cert_bodies[c_index] != cert:
module.fail_json(changed=False, msg='A cert with the name %s already exists and'
' has a different certificate body associated'
' with it. Certifcates cannot have the same name')
else:
update=True
break
elif cert in orig_cert_bodies and not dup_ok:
for crt_name, crt_body in zip(orig_cert_names, orig_cert_bodies):
if crt_body == cert:
module.fail_json(changed=False, msg='This certificate already'
' exists under the name %s' % crt_name)
return update
def cert_action(module, iam, name, cpath, new_name, new_path, state,
cert, key, chain, orig_cert_names, orig_cert_bodies, dup_ok):
if state == 'present':
update = dup_check(module, iam, name, new_name, cert, orig_cert_names,
orig_cert_bodies, dup_ok)
if update:
opath, ocert, ocert_id, upload_date, exp = cert_meta(iam, name)
changed=True
if new_name and new_path:
iam.update_server_cert(name, new_cert_name=new_name, new_path=new_path)
module.exit_json(changed=changed, original_name=name, new_name=new_name,
original_path=opath, new_path=new_path, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
elif new_name and not new_path:
iam.update_server_cert(name, new_cert_name=new_name)
module.exit_json(changed=changed, original_name=name, new_name=new_name,
cert_path=opath, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
elif not new_name and new_path:
iam.update_server_cert(name, new_path=new_path)
module.exit_json(changed=changed, name=new_name,
original_path=opath, new_path=new_path, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
else:
changed=False
module.exit_json(changed=changed, name=name, cert_path=opath, cert_body=ocert,
upload_date=upload_date, expiration_date=exp,
msg='No new path or name specified. No changes made')
else:
changed=True
iam.upload_server_cert(name, cert, key, cert_chain=chain, path=cpath)
opath, ocert, ocert_id, upload_date, exp = cert_meta(iam, name)
module.exit_json(changed=changed, name=name, cert_path=opath, cert_body=ocert,
upload_date=upload_date, expiration_date=exp)
elif state == 'absent':
if name in orig_cert_names:
changed=True
iam.delete_server_cert(name)
module.exit_json(changed=changed, deleted_cert=name)
else:
changed=False
module.exit_json(changed=changed, msg='Certifcate with the name %s already absent' % name)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(
default=None, required=True, choices=['present', 'absent']),
name=dict(default=None, required=False),
cert=dict(default=None, required=False),
key=dict(default=None, required=False),
cert_chain=dict(default=None, required=False),
new_name=dict(default=None, required=False),
path=dict(default='/', required=False),
new_path=dict(default=None, required=False),
dup_ok=dict(default=False, required=False, choices=[False, True])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[],
)
if not HAS_BOTO:
module.fail_json(msg="Boto is required for this module")
ec2_url, aws_access_key, aws_secret_key, region = get_ec2_creds(module)
try:
iam = boto.iam.connection.IAMConnection(
aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
state = module.params.get('state')
name = module.params.get('name')
path = module.params.get('path')
new_name = module.params.get('new_name')
new_path = module.params.get('new_path')
cert_chain = module.params.get('cert_chain')
dup_ok = module.params.get('dup_ok')
if state == 'present':
cert = open(module.params.get('cert'), 'r').read().rstrip()
key = open(module.params.get('key'), 'r').read().rstrip()
if cert_chain is not None:
cert_chain = open(module.params.get('cert_chain'), 'r').read()
else:
key=cert=chain=None
orig_certs = [ctb['server_certificate_name'] for ctb in \
iam.get_all_server_certs().\
list_server_certificates_result.\
server_certificate_metadata_list]
orig_bodies = [iam.get_server_certificate(thing).\
get_server_certificate_result.\
certificate_body \
for thing in orig_certs]
if new_name == name:
new_name = None
if new_path == path:
new_path = None
changed = False
try:
cert_action(module, iam, name, path, new_name, new_path, state,
cert, key, cert_chain, orig_certs, orig_bodies, dup_ok)
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err), debug=[cert,key])
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
zhengbomo/python_practice
|
refs/heads/master
|
project/PaipaiDai/Database.py
|
1
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
import sqlite3
import MySQLdb
import warnings
# 忽略警告
warnings.filterwarnings("ignore")
class ErrorType:
PageList = 1
User = 2
Detail = 3
class Database(object):
def __init__(self):
try:
self.conn = MySQLdb.connect(host="127.0.0.1", user="root", passwd="111111", db="PaipaiDai", charset="utf8")
self.__create_table()
except Exception, e:
print "数据库连接失败", e
def __create_table(self):
cursor = self.conn.cursor()
# 列表表
# cursor.execute("CREATE TABLE IF NOT EXISTS test (id int(11) NOT NULL AUTO_INCREMENT, "\
# "url Text, "\
# "PRIMARY KEY (id)"\
# ");")
# self.conn.commit()
# 详情表
# cursor.execute('')
# 用户表
cursor.execute('Create table if not EXISTS users (id int not null AUTO_INCREMENT, '
'userid text,'
'outcredit text,'
'incredit text,'
'gender text,'
'age int,'
'jobtype text,'
'studyvalid int,' # 学历认证
'hukouvalid int,' # 户口认证
'renbankcredit int,' # 人行征信认证
'registtime text,'
'PRIMARY KEY (id)'
')')
# 散标概览表
cursor.execute('Create table if not EXISTS loans (id int not null AUTO_INCREMENT, '
'loan_id varchar(16),'
'amount real,'
'lilv real,'
'title text,'
'user_url text,'
'rankcode text,' # 魔镜等级
'detail_url text,' # 学历认证
'limit_time int,' # 户口认证
'rank int,' # 人行征信认证
'PRIMARY KEY (id),'
'INDEX (loan_id)'
')')
# 审核表
cursor.execute('Create table if not EXISTS shenhes (id int not null AUTO_INCREMENT, '
'username varchar(32),'
'title text,'
'PRIMARY KEY (id),'
'INDEX (username)'
')')
# 散标详情表
cursor.execute('Create table if not EXISTS userinfo (id int not null AUTO_INCREMENT, '
'username varchar(32),'
'user_url text,'
'rankcode varchar(32),'
'rank integer,'
'wenhua text,'
'gouche varchar(32),'
'age integer,'
'zhuzhai text,'
'gender nvarchar(8),'
'jiehun nvarchar(8),'
'total_borrow_amount real,'
'total_daishou real,'
'total_daihuan real,'
'total_huanqing integer,'
'total_yuqi_16 integer,'
'total_yuqi_15 integer,'
'regist_time real,'
'first_borrow_time real,'
'yuqi_days real,'
'tiqian_days real,'
'PRIMARY KEY (id),'
'INDEX (username)'
')')
# 我的投标
cursor.execute('Create table if not EXISTS my_loan (id int not null AUTO_INCREMENT, '
'url text,'
'buy_date real,'
'amount real,'
'loan_id text,'
'finished integer,'
'lilv real,'
'PRIMARY KEY (id)'
')')
# 我的每日利率任务
cursor.execute('Create table if not EXISTS lilv_task (id int not null AUTO_INCREMENT, '
'create_date real,'
'normal_lilv real,' # 综合利率
'finished_lilv real,' # 完成利率
'unfinished_lilv real,' # 未完成利率
'total_buy real,' # 总投资
'PRIMARY KEY (id)'
')')
self.conn.commit()
cursor.close()
pass
def insert_loans(self, loans):
cursor = self.conn.cursor()
for loan in loans:
cursor.execute('select count(*) from loans where loan_id = %s', (loan['loan_id'],))
result = cursor.fetchone()
if result[0] == 0:
params = [loan['loan_id'], loan['amount'], loan['lilv'], loan['title'], loan['user_url'],
loan['rankcode'], loan['detail_url'], loan['limit_time'], loan['rank']]
cursor.execute('insert into loans(loan_id, amount, lilv, title, user_url, rankcode, detail_url, limit_time, rank) values'
'(%s, %s, %s, %s, %s, %s, %s, %s, %s)', params)
self.conn.commit()
cursor.close()
# 从散标列表获取散标详情url
def get_loan_urls_from_loans(self, count):
cu = self.conn.cursor()
if count > 0:
sql = """
SELECT detail_url FROM loans l
where not exists (
select * from userinfo u
where u.user_url = l.user_url and u.yuqi_days is null
)
limit %s, %s
"""
cu.execute(sql, [0, count])
else:
sql = """
SELECT detail_url FROM loans l
where not exists (
select * from userinfo u
where u.user_url = l.user_url and u.yuqi_days is null
)
"""
cu.execute(sql, [])
result = cu.fetchall()
cu.close()
return result
def get_empty_lilv_user_url(self):
cursor = self.conn.cursor()
cursor.execute('select user_url from userinfo where weight_lilv is null limit 0, 1')
result = cursor.fetchone()
if result:
return result[0]
else:
return None
# 获取未分析的标url
def get_un_analyzed_bid_user_loan_url(self):
cursor = self.conn.cursor()
cursor.execute('select detail_url from loans where has_analyze_for_bid_users is null or '
'has_analyze_for_bid_users = 0 limit 0, 1')
result = cursor.fetchone()
if result:
return result[0]
else:
return None
# 获取未分析的标url
def set_un_analyzed_bid_user_loan_url(self, url, flag=True):
cursor = self.conn.cursor()
cursor.execute('update loans set has_analyze_for_bid_users=%s where detail_url=%s', [flag, url])
self.conn.commit()
cursor.close()
def insert_user_urls(self, urls):
cursor = self.conn.cursor()
cursor.execute('select user_url from userinfo where user_url in (%s)' % (','.join(urls)))
result = cursor.fetchone()
for url in urls:
cursor.execute('select count(*) from userinfo where user_url = %s', [url])
result = cursor.fetchone()
if result[0] == 0:
cursor.execute('insert into userinfo(user_url) values(%s)', [url])
self.conn.commit()
cursor.close()
def insert_user_with_weight_lilv(self, url, lilv):
cursor = self.conn.cursor()
cursor.execute('select count(*) from userinfo where user_url = %s', [url])
result = cursor.fetchone()
if result[0] == 0:
cursor.execute('insert into userinfo(user_url, weight_lilv) values(%s, %s)', [url, lilv])
else:
cursor.execute('update userinfo set weight_lilv=%s where user_url=%s', [lilv, url])
self.conn.commit()
cursor.close()
def insert_user_info(self, info):
username = info['username']
user_url = info['user_url']
yuqi_days = info['yuqi_days']
tiqian_days = info['tiqian_days']
if username and user_url:
cursor = self.conn.cursor()
cursor.execute('select count(*) from userinfo where username = %s', [username])
result = cursor.fetchone()
if result[0] == 0:
cursor.execute('insert into userinfo(username, user_url) values(%s, %s)', [username, user_url])
tongji_info = info['tongji_info']
params = [tongji_info.get('total_borrow_amount'), tongji_info.get('total_huanqing'), tongji_info.get(
'total_daishou'), tongji_info.get('total_yuqi_16'), tongji_info.get('total_yuqi_15'), tongji_info.get(
'total_daihuan'), tongji_info.get('regist_time'), tongji_info.get('first_borrow_time'), username]
cursor.execute('update userinfo set total_borrow_amount=%s, total_huanqing=%s, total_daishou=%s, '
'total_yuqi_16=%s, total_yuqi_15=%s, total_daihuan=%s, regist_time=%s, '
'first_borrow_time=%s where username=%s', params)
cursor.execute('update userinfo set yuqi_days=%s, tiqian_days=%s where username=%s', [yuqi_days,
tiqian_days, username])
borrower_info = info['borrower_info']
wenhua = borrower_info.get(u'文化程度')
gouche = borrower_info.get(u'是否购车')
age = borrower_info.get(u'年龄')
if age:
age = int(age)
zhuzhai = borrower_info.get(u'住宅状况')
gender = borrower_info.get(u'性别')
jiehun = borrower_info.get(u'婚姻情况')
params = [wenhua, gouche, zhuzhai, age, gender, jiehun, username]
cursor.execute('update userinfo set wenhua=%s, gouche=%s, zhuzhai=%s, age=%s, gender=%s, jiehun=%s where '
'username=%s', params)
shenhe_infos = info['shenhe_infos']
for shenhe_info in shenhe_infos:
params = [username, shenhe_info]
cursor.execute('select count(*) from shenhes where username = %s and title = %s', params)
result = cursor.fetchone()
if result[0] == 0:
cursor.execute('insert into shenhes(username, title) values'
'(%s, %s)', params)
self.conn.commit()
cursor.close()
def insert_error(self, url, error_type):
pass
def get_errors(self, error_type):
pass
def get_uncrawl_detail(self):
pass
def get_empty_user_urls(self, count=0):
cu = self.conn.cursor()
if count > 0:
sql = """
select user_url from userinfo
where wenhua is null
limit %s, %s
"""
cu.execute(sql, [0, count])
else:
sql = """
select user_url from userinfo
where wenhua is null
"""
cu.execute(sql, [])
result = cu.fetchall()
cu.close()
return result
def get_most_tiqian_cd_user_urls(self, count=10):
cu = self.conn.cursor()
if count > 0:
sql = """
select user_url, tiqian_days from userinfo
where rankcode='C' or rankcode='D'
order by tiqian_days
limit %s, %s
"""
cu.execute(sql, [0, count])
else:
sql = """
select user_url, tiqian_days from userinfo
where rankcode='C' or rankcode='D'
order by tiqian_days
"""
cu.execute(sql, [])
result = cu.fetchall()
cu.close()
return result
# 我的投标列表
def insert_my_loan(self, url, date, amount, loan_id):
cursor = self.conn.cursor()
cursor.execute('select count(*) from my_loan where url=%s', [url])
result = cursor.fetchone()
if result[0] == 0:
cursor.execute('insert into my_loan(url, buy_date, amount, loan_id) values(%s, %s, %s, %s)', [url, date,
amount, loan_id])
self.conn.commit()
cursor.close()
# 我的投标列表
def insert_my_loans(self, my_loans):
cursor = self.conn.cursor()
for loan in my_loans:
url = loan['url']
date = loan['date']
amount = loan['amount']
loan_id = loan['loan_id']
cursor.execute('select count(*) from my_loan where url=%s', [url])
result = cursor.fetchone()
if result[0]:
cursor.execute('select sum(amount) from my_loan where url=%s and buy_date!=%s', [url, date])
result = cursor.fetchone()
if result[0]:
amount += result[0]
cursor.execute('update my_loan set amount=%s where url = %s', [amount, url])
else:
cursor.execute('update my_loan set amount=%s where url = %s', [amount, url])
else:
cursor.execute('insert into my_loan(url, buy_date, amount, loan_id) values(%s, %s, %s, %s)', [url, date,
amount, loan_id])
self.conn.commit()
cursor.close()
def get_my_loan(self, index, count):
cu = self.conn.cursor()
if count > 0:
sql = """
select * from my_loan
where finished is null or lilv is null or finished = 0
order by buy_date
limit %s, %s
"""
cu.execute(sql, [index, count])
else:
sql = """
select * from my_loan
where finished is null or lilv is null or finished = 0
order by buy_date
"""
cu.execute(sql, [])
result = cu.fetchall()
cu.close()
return result
def update_my_loan(self, url, finished, lilv):
cursor = self.conn.cursor()
if finished:
finished = 1
else:
finished = 0
cursor.execute('update my_loan set finished = %s, lilv=%s where url = %s', [finished, lilv, url])
self.conn.commit()
cursor.close()
# 我的每日利率任务
def lilv_task_exist(self, date):
cursor = self.conn.cursor()
cursor.execute('select count(*) from lilv_task where create_date = %s', [date])
result = cursor.fetchone()
cursor.close()
if result[0]:
return True
else:
return False
# 我的每日任务
def insert_lilv_task(self, date, normal_lilv, finished_lilv, unfinished_lilv):
cursor = self.conn.cursor()
cursor.execute('insert into lilv_task(create_date, normal_lilv, finished_lilv, unfinished_lilv) values(%s, '
'%s, %s, %s)', [date, normal_lilv, finished_lilv, unfinished_lilv])
self.conn.commit()
cursor.close()
# 更新我的每日任务
def update_lilv_task(self):
sql = """
insert into lilv_task (create_date, create_date2, normal_lilv, finished_lilv, unfinished_lilv, total_buy, current_buy)
SELECT
UNIX_TIMESTAMP(CURDATE()) as create_date,
CURDATE() as create_date2,
(select avg(lilv) from my_loan
where `finished` is not null
order by lilv) as normal_lilv,
(select avg(lilv) from my_loan
where `finished` = 1
order by lilv) as finished_lilv,
(select avg(lilv) from my_loan
where `finished` = 0
order by lilv) as unfinished_lilv,
(select sum(`amount`) from my_loan) as total_buy,
(select sum(amount) from my_loan where finished = 0) as current_buy
WHERE NOT EXISTS(
SELECT *
FROM lilv_task
WHERE create_date = UNIX_TIMESTAMP(CURDATE()))
"""
cursor = self.conn.cursor()
cursor.execute(sql)
self.conn.commit()
cursor.close()
|
kevinr/750book-web
|
refs/heads/master
|
_750bookweb/manage.py
|
26
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname, join
try:
import pinax
except ImportError:
sys.stderr.write("Error: Can't import Pinax. Make sure you are in a virtual environment that has\nPinax installed or create one with pinax-boot.py.\n")
sys.exit(1)
from django.conf import settings
from django.core.management import setup_environ, execute_from_command_line
try:
import settings as settings_mod # Assumed to be in the same directory.
except ImportError:
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
# setup the environment before we start accessing things in the settings.
setup_environ(settings_mod)
sys.path.insert(0, join(settings.PROJECT_ROOT, "apps"))
if __name__ == "__main__":
execute_from_command_line()
|
makinacorpus/odoo
|
refs/heads/8.0
|
addons/l10n_ar/__openerp__.py
|
260
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Cubic ERP - Teradata SAC (<http://cubicerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Argentina Localization Chart Account',
'version': '1.0',
'description': """
Argentinian accounting chart and tax localization.
==================================================
Plan contable argentino e impuestos de acuerdo a disposiciones vigentes
""",
'author': ['Cubic ERP'],
'website': 'http://cubicERP.com',
'category': 'Localization/Account Charts',
'depends': ['account_chart'],
'data':[
'account_tax_code.xml',
'l10n_ar_chart.xml',
'account_tax.xml',
'l10n_ar_wizard.xml',
],
'demo': [],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
yongtang/tensorflow
|
refs/heads/master
|
tensorflow/python/keras/optimizer_v2/nadam.py
|
5
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Nadam optimizer implementation."""
# pylint: disable=g-classes-have-attributes
from tensorflow.python.framework import ops
from tensorflow.python.keras import backend_config
from tensorflow.python.keras.optimizer_v2 import learning_rate_schedule
from tensorflow.python.keras.optimizer_v2 import optimizer_v2
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.optimizers.Nadam')
class Nadam(optimizer_v2.OptimizerV2):
r"""Optimizer that implements the NAdam algorithm.
Much like Adam is essentially RMSprop with momentum, Nadam is Adam with
Nesterov momentum.
Args:
learning_rate: A Tensor or a floating point value. The learning rate.
beta_1: A float value or a constant float tensor. The exponential decay
rate for the 1st moment estimates.
beta_2: A float value or a constant float tensor. The exponential decay
rate for the exponentially weighted infinity norm.
epsilon: A small constant for numerical stability.
name: Optional name for the operations created when applying gradients.
Defaults to `"Nadam"`.
**kwargs: Keyword arguments. Allowed to be one of
`"clipnorm"` or `"clipvalue"`.
`"clipnorm"` (float) clips gradients by norm; `"clipvalue"` (float) clips
gradients by value.
Usage Example:
>>> opt = tf.keras.optimizers.Nadam(learning_rate=0.2)
>>> var1 = tf.Variable(10.0)
>>> loss = lambda: (var1 ** 2) / 2.0
>>> step_count = opt.minimize(loss, [var1]).numpy()
>>> "{:.1f}".format(var1.numpy())
9.8
Reference:
- [Dozat, 2015](http://cs229.stanford.edu/proj2015/054_report.pdf).
"""
_HAS_AGGREGATE_GRAD = True
def __init__(self,
learning_rate=0.001,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-7,
name='Nadam',
**kwargs):
# Backwards compatibility with keras NAdam optimizer.
kwargs['decay'] = kwargs.pop('schedule_decay', 0.004)
learning_rate = kwargs.get('lr', learning_rate)
if isinstance(learning_rate, learning_rate_schedule.LearningRateSchedule):
raise ValueError('The Nadam optimizer does not support '
'tf.keras.optimizers.LearningRateSchedules as the '
'learning rate.')
super(Nadam, self).__init__(name, **kwargs)
self._set_hyper('learning_rate', kwargs.get('lr', learning_rate))
self._set_hyper('decay', self._initial_decay)
self._set_hyper('beta_1', beta_1)
self._set_hyper('beta_2', beta_2)
self.epsilon = epsilon or backend_config.epsilon()
self._m_cache = None
def _create_slots(self, var_list):
var_dtype = var_list[0].dtype.base_dtype
if self._m_cache is None:
self._m_cache = self.add_weight(
'momentum_cache',
shape=[],
dtype=var_dtype,
initializer='ones',
trainable=False,
aggregation=tf_variables.VariableAggregation.ONLY_FIRST_REPLICA)
self._weights.append(self._m_cache)
# Separate for-loops to respect the ordering of slot variables from v1.
for var in var_list:
# Create slots for the first moments.
self.add_slot(var, 'm')
for var in var_list:
# Create slots for the second moments.
self.add_slot(var, 'v')
def _prepare_local(self, var_device, var_dtype, apply_state):
lr_t = array_ops.identity(self._get_hyper('learning_rate', var_dtype))
beta_1_t = array_ops.identity(self._get_hyper('beta_1', var_dtype))
beta_2_t = array_ops.identity(self._get_hyper('beta_2', var_dtype))
local_step = math_ops.cast(self.iterations + 1, var_dtype)
next_step = math_ops.cast(self.iterations + 2, var_dtype)
decay_base = math_ops.cast(0.96, var_dtype)
m_t = beta_1_t * (1. - 0.5 * (
math_ops.pow(decay_base, self._initial_decay * local_step)))
m_t_1 = beta_1_t * (1. - 0.5 * (
math_ops.pow(decay_base, self._initial_decay * next_step)))
m_schedule_new = math_ops.cast(self._m_cache_read, var_dtype) * m_t
if var_dtype is self._m_cache.dtype:
m_schedule_new = array_ops.identity(state_ops.assign(
self._m_cache, m_schedule_new, use_locking=self._use_locking))
m_schedule_next = m_schedule_new * m_t_1
apply_state[(var_device, var_dtype)] = dict(
lr_t=lr_t,
neg_lr_t=-lr_t, # pylint: disable=invalid-unary-operand-type
epsilon=ops.convert_to_tensor_v2_with_dispatch(self.epsilon, var_dtype),
beta_1_t=beta_1_t,
beta_2_t=beta_2_t,
m_t=m_t,
m_t_1=m_t_1,
one_minus_beta_1_t=1 - beta_1_t,
one_minus_beta_2_t=1 - beta_2_t,
one_minus_m_t=1. - m_t,
one_minus_m_schedule_new=1. - m_schedule_new,
one_minus_m_schedule_next=1. - m_schedule_next,
v_t_prime_denominator=1. - math_ops.pow(beta_2_t, local_step),
)
def _prepare(self, var_list):
# Get the value of the momentum cache before starting to apply gradients.
self._m_cache_read = array_ops.identity(self._m_cache)
return super(Nadam, self)._prepare(var_list)
def _resource_apply_dense(self, grad, var, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
m = self.get_slot(var, 'm')
v = self.get_slot(var, 'v')
g_prime = grad / coefficients['one_minus_m_schedule_new']
m_t = (coefficients['beta_1_t'] * m +
coefficients['one_minus_beta_1_t'] * grad)
m_t = state_ops.assign(m, m_t, use_locking=self._use_locking)
m_t_prime = m_t / coefficients['one_minus_m_schedule_next']
v_t = (coefficients['beta_2_t'] * v +
coefficients['one_minus_beta_2_t'] * math_ops.square(grad))
v_t = state_ops.assign(v, v_t, use_locking=self._use_locking)
v_t_prime = v_t / coefficients['v_t_prime_denominator']
m_t_bar = (coefficients['one_minus_m_t'] * g_prime +
coefficients['m_t_1'] * m_t_prime)
var_t = var - coefficients['lr_t'] * m_t_bar / (
math_ops.sqrt(v_t_prime) + coefficients['epsilon'])
return state_ops.assign(var, var_t, use_locking=self._use_locking).op
def _resource_apply_sparse(self, grad, var, indices, apply_state=None):
var_device, var_dtype = var.device, var.dtype.base_dtype
coefficients = ((apply_state or {}).get((var_device, var_dtype))
or self._fallback_apply_state(var_device, var_dtype))
m = self.get_slot(var, 'm')
v = self.get_slot(var, 'v')
g_prime = grad / coefficients['one_minus_m_schedule_new']
# m_t = beta1 * m + (1 - beta1) * g_t
m_scaled_g_values = grad * coefficients['one_minus_beta_1_t']
m_t = state_ops.assign(m, m * coefficients['beta_1_t'],
use_locking=self._use_locking)
with ops.control_dependencies([m_t]):
m_t = self._resource_scatter_add(m, indices, m_scaled_g_values)
m_t_slice = array_ops.gather(m_t, indices)
m_t_prime = m_t_slice / coefficients['one_minus_m_schedule_next']
m_t_bar = (coefficients['one_minus_m_t'] * g_prime +
coefficients['m_t_1'] * m_t_prime)
# v_t = beta2 * v + (1 - beta2) * (g_t * g_t)
v_scaled_g_values = (grad * grad) * coefficients['one_minus_beta_2_t']
v_t = state_ops.assign(v, v * coefficients['beta_2_t'],
use_locking=self._use_locking)
with ops.control_dependencies([v_t]):
v_t = self._resource_scatter_add(v, indices, v_scaled_g_values)
v_t_slice = array_ops.gather(v_t, indices)
v_t_prime = v_t_slice / coefficients['v_t_prime_denominator']
v_prime_sqrt_plus_eps = math_ops.sqrt(v_t_prime) + coefficients['epsilon']
var_update = self._resource_scatter_add(
var, indices,
coefficients['neg_lr_t'] * m_t_bar / v_prime_sqrt_plus_eps)
return control_flow_ops.group(*[var_update, m_t_bar, v_t])
def get_config(self):
config = super(Nadam, self).get_config()
config.update({
'learning_rate': self._serialize_hyperparameter('learning_rate'),
'decay': self._initial_decay,
'beta_1': self._serialize_hyperparameter('beta_1'),
'beta_2': self._serialize_hyperparameter('beta_2'),
'epsilon': self.epsilon,
})
return config
|
jsoref/django
|
refs/heads/master
|
tests/validation/test_validators.py
|
320
|
from __future__ import unicode_literals
from . import ValidationTestCase
from .models import ModelToValidate
class TestModelsWithValidators(ValidationTestCase):
def test_custom_validator_passes_for_correct_value(self):
mtv = ModelToValidate(number=10, name='Some Name', f_with_custom_validator=42)
self.assertIsNone(mtv.full_clean())
def test_custom_validator_raises_error_for_incorrect_value(self):
mtv = ModelToValidate(number=10, name='Some Name', f_with_custom_validator=12)
self.assertFailsValidation(mtv.full_clean, ['f_with_custom_validator'])
self.assertFieldFailsValidationWithMessage(
mtv.full_clean,
'f_with_custom_validator',
['This is not the answer to life, universe and everything!']
)
|
klausman/scion
|
refs/heads/master
|
python/test/lib/topology_test.py
|
1
|
# Copyright 2015 ETH Zurich
# Copyright 2018 ETH Zurich, Anapaya Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`lib_topology_test` --- SCION topology tests
=================================================
"""
# Stdlib
from collections import defaultdict
from unittest.mock import call, patch
# External packages
import nose
import nose.tools as ntools
# SCION
from lib.errors import SCIONKeyError
from lib.topology import (
Element,
InterfaceElement,
Topology
)
from test.testcommon import assert_these_calls, create_mock
class TestElementInit(object):
"""
Unit tests for lib.topology.Element.__init__
"""
def test_basic(self):
inst = Element()
ntools.assert_equal(inst.public, None)
ntools.assert_is_none(inst.name)
@patch("lib.topology.haddr_parse_interface", autospec=True)
def test_public(self, parse):
public = {'IPv4': {'Public': {'Addr': 'addr', 'L4Port': 'port'}}}
inst = Element(public)
parse.assert_called_with("addr")
ntools.eq_(inst.public[0], parse.return_value)
ntools.eq_(inst.public[1], 'port')
@patch("lib.topology.haddr_parse_interface", autospec=True)
def test_bind(self, parse):
bind = {'IPv4': {'Bind': {'Addr': 'pub_addr', 'L4Port': 'port'},
'Public': {'Addr': 'bind_addr', 'L4Port': 'port'}}}
inst = Element(bind)
parse.assert_has_calls([call('bind_addr'), call('pub_addr')])
ntools.eq_(inst.bind[0], parse.return_value)
ntools.eq_(inst.public[1], 'port')
def test_name(self):
name = create_mock(["__str__"])
name.__str__.return_value = "hostname"
# Call
inst = Element(name=name)
# Tests
ntools.assert_equal(inst.name, "hostname")
class TestInterfaceElementInit(object):
"""
Unit tests for lib.topology.InterfaceElement.__init__
"""
@patch("lib.topology.haddr_parse_interface", autospec=True)
@patch("lib.topology.ISD_AS", autospec=True)
@patch("lib.topology.RouterAddrElement.__init__", autospec=True)
def test_full(self, super_init, isd_as, parse):
intf_dict = {
'Overlay': 'UDP/IPv4',
'PublicOverlay': {
'Addr': 'addr',
'OverlayPort': 6
},
'RemoteOverlay': {
'Addr': 'toaddr',
'OverlayPort': 5
},
'Bandwidth': 1001,
'ISD_AS': '3-ff00:0:301',
'LinkTo': 'PARENT',
'MTU': 4242
}
if_id = 1
addrs = {'IPv4': {'PublicOverlay': {'Addr': 'addr', 'OverlayPort': 6}}}
# Call
inst = InterfaceElement(if_id, intf_dict, 'name')
# Tests
super_init.assert_called_once_with(inst, addrs, 'name')
ntools.eq_(inst.if_id, 1)
ntools.eq_(inst.isd_as, isd_as.return_value)
ntools.eq_(inst.link_type, "parent")
ntools.eq_(inst.bandwidth, 1001)
ntools.eq_(inst.mtu, 4242)
ntools.eq_(inst.overlay, "UDP/IPv4")
parse.assert_called_once_with("toaddr")
ntools.eq_(inst.remote, (parse.return_value, 5))
@patch("lib.topology.haddr_parse_interface", autospec=True)
@patch("lib.topology.ISD_AS", autospec=True)
@patch("lib.topology.RouterAddrElement.__init__", autospec=True)
def test_stripped(self, super_init, isd_as, parse):
intf_dict = {
'Bandwidth': 1001,
'ISD_AS': '3-ff00:0:301',
'LinkTo': 'PARENT',
'MTU': 4242
}
if_id = 1
# Call
inst = InterfaceElement(if_id, intf_dict, 'name')
# Tests
super_init.assert_called_once_with(inst, None, 'name')
ntools.eq_(inst.if_id, 1)
ntools.eq_(inst.isd_as, isd_as.return_value)
ntools.eq_(inst.link_type, "parent")
ntools.eq_(inst.bandwidth, 1001)
ntools.eq_(inst.mtu, 4242)
ntools.eq_(inst.overlay, None)
assert parse.call_count == 0
ntools.eq_(inst.remote, None)
class TestTopologyParseDict(object):
"""
Unit tests for lib.topology.Topology.parse_dict
"""
@patch("lib.topology.ISD_AS", autospec=True)
def test(self, isd_as):
topo_dict = {'Core': True, 'ISD_AS': '1-ff00:0:312', 'MTU': 440, 'Overlay': 'UDP/IPv4'}
inst = Topology()
inst._parse_srv_dicts = create_mock()
inst._parse_router_dicts = create_mock()
inst._parse_zk_dicts = create_mock()
# Call
inst.parse_dict(topo_dict)
# Tests
ntools.eq_(inst.is_core_as, True)
ntools.eq_(inst.isd_as, isd_as.return_value)
ntools.eq_(inst.mtu, 440)
inst._parse_srv_dicts.assert_called_once_with(topo_dict)
inst._parse_router_dicts.assert_called_once_with(topo_dict)
inst._parse_zk_dicts.assert_called_once_with(topo_dict)
class TestTopologyParseSrvDicts(object):
"""
Unit tests for lib.topology.Topology.parse_srv_dicts
"""
@patch("lib.topology.ServerElement", autospec=True)
def test(self, server):
topo_dict = {
'BeaconService': {"bs1": "bs1 val"},
'CertificateService': {"cs1": "cs1 val"},
'PathService': {"ps1": "ps1 val", "ps2": "ps2 val"},
'SibraService': {"sb1": "sb1 val"},
'SIG': {"sig1": "sig1 val"},
'DiscoveryService': {"ds1": "ds1 val"},
}
inst = Topology()
server.side_effect = lambda v, k: "%s-%s" % (k, v)
# Call
inst._parse_srv_dicts(topo_dict)
# Tests
assert_these_calls(server, [
call("bs1 val", "bs1"),
call("cs1 val", "cs1"),
call("ps1 val", "ps1"),
call("ps2 val", "ps2"),
call("sb1 val", "sb1"),
call("sig1 val", "sig1"),
call("ds1 val", "ds1"),
], any_order=True)
ntools.eq_(inst.beacon_servers, ["bs1-bs1 val"])
ntools.eq_(inst.certificate_servers, ["cs1-cs1 val"])
ntools.eq_(sorted(inst.path_servers),
sorted(["ps1-ps1 val", "ps2-ps2 val"]))
ntools.eq_(inst.discovery_servers, ["ds1-ds1 val"])
class TestTopologyParseRouterDicts(object):
"""
Unit tests for lib.topology.Topology.parse_router_dicts
"""
@patch("lib.topology.RouterElement", autospec=True)
def test(self, router):
def _mk_router(type_):
m = create_mock(["interfaces"])
m.interfaces = {0: create_mock(["link_type"])}
m.interfaces[0].link_type = type_
routers[type_].append(m)
return m
routers = defaultdict(list)
router_dict = {"br-parent": "parent"}
inst = Topology()
router.side_effect = lambda v, k: _mk_router(v)
# Call
inst._parse_router_dicts({"BorderRouters": router_dict})
# Tests
ntools.assert_count_equal(inst.border_routers, routers["parent"])
class TestTopologyParseZkDicts(object):
"""
Unit tests for lib.topology.Topology.parse_zk_dicts
"""
@patch("lib.topology.haddr_parse_interface", autospec=True)
def test(self, parse):
zk_dict = {
'zk0': {'Addr': 'zkv4', 'L4Port': 2181},
'zk1': {'Addr': 'zkv6', 'L4Port': 2182},
}
inst = Topology()
parse.side_effect = lambda x: x
# Call
inst._parse_zk_dicts({"ZookeeperService": zk_dict})
# Tests
ntools.assert_count_equal(inst.zookeepers,
["[zkv4]:2181", "[zkv6]:2182"])
class TestTopologyGetAllInterfaces(object):
"""
Unit tests for lib.topology.Topology.get_all_border_routers
"""
def test(self):
topology = Topology()
topology.parent_interfaces = [0, 1]
topology.child_interfaces = [2]
topology.peer_interfaces = [3, 4, 5]
topology.core_interfaces = [6, 7]
ntools.eq_(topology.get_all_interfaces(), list(range(8)))
class TestTopologyGetOwnConfig(object):
"""
Unit tests for lib.topology.Topology.get_own_config
"""
def test_basic(self):
inst = Topology()
for i in range(4):
bs = create_mock(["name"])
bs.name = "bs%d" % i
inst.beacon_servers.append(bs)
# Call
ntools.eq_(inst.get_own_config("bs", "bs3"),
inst.beacon_servers[3])
def test_unknown_type(self):
inst = Topology()
# Call
ntools.assert_raises(SCIONKeyError, inst.get_own_config, "asdf", 1)
def test_unknown_server(self):
inst = Topology()
# Call
ntools.assert_raises(SCIONKeyError, inst.get_own_config, "bs", "name")
if __name__ == "__main__":
nose.run(defaultTest=__name__)
|
tersmitten/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/vultr/vultr_server.py
|
10
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2017, René Moser <mail@renemoser.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vultr_server
short_description: Manages virtual servers on Vultr.
description:
- Deploy, start, stop, update, restart, reinstall servers.
version_added: "2.5"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the server.
required: true
aliases: [ label ]
hostname:
description:
- Hostname to assign to this server.
os:
description:
- The operating system.
- Required if the server does not yet exist and is not restoring from a snapshot.
snapshot:
version_added: "2.8"
description:
- Name of snapshot to restore server from.
firewall_group:
description:
- The firewall group to assign this server to.
plan:
description:
- Plan to use for the server.
- Required if the server does not yet exist.
force:
description:
- Force stop/start the server if required to apply changes
- Otherwise a running server will not be changed.
type: bool
notify_activate:
description:
- Whether to send an activation email when the server is ready or not.
- Only considered on creation.
type: bool
private_network_enabled:
description:
- Whether to enable private networking or not.
type: bool
auto_backup_enabled:
description:
- Whether to enable automatic backups or not.
type: bool
ipv6_enabled:
description:
- Whether to enable IPv6 or not.
type: bool
tag:
description:
- Tag for the server.
user_data:
description:
- User data to be passed to the server.
startup_script:
description:
- Name of the startup script to execute on boot.
- Only considered while creating the server.
ssh_keys:
description:
- List of SSH keys passed to the server on creation.
aliases: [ ssh_key ]
reserved_ip_v4:
description:
- IP address of the floating IP to use as the main IP of this server.
- Only considered on creation.
region:
description:
- Region the server is deployed into.
- Required if the server does not yet exist.
state:
description:
- State of the server.
default: present
choices: [ present, absent, restarted, reinstalled, started, stopped ]
extends_documentation_fragment: vultr
'''
EXAMPLES = '''
- name: create server
local_action:
module: vultr_server
name: "{{ vultr_server_name }}"
os: CentOS 7 x64
plan: 1024 MB RAM,25 GB SSD,1.00 TB BW
ssh_keys:
- my_key
- your_key
region: Amsterdam
state: present
- name: ensure a server is present and started
local_action:
module: vultr_server
name: "{{ vultr_server_name }}"
os: CentOS 7 x64
plan: 1024 MB RAM,25 GB SSD,1.00 TB BW
ssh_key: my_key
region: Amsterdam
state: started
- name: ensure a server is present and stopped
local_action:
module: vultr_server
name: "{{ vultr_server_name }}"
os: CentOS 7 x64
plan: 1024 MB RAM,25 GB SSD,1.00 TB BW
region: Amsterdam
state: stopped
- name: ensure an existing server is stopped
local_action:
module: vultr_server
name: "{{ vultr_server_name }}"
state: stopped
- name: ensure an existing server is started
local_action:
module: vultr_server
name: "{{ vultr_server_name }}"
state: started
- name: ensure a server is absent
local_action:
module: vultr_server
name: "{{ vultr_server_name }}"
state: absent
'''
RETURN = '''
---
vultr_api:
description: Response from Vultr API with a few additions/modification
returned: success
type: complex
contains:
api_account:
description: Account used in the ini file to select the key
returned: success
type: str
sample: default
api_timeout:
description: Timeout used for the API requests
returned: success
type: int
sample: 60
api_retries:
description: Amount of max retries for the API requests
returned: success
type: int
sample: 5
api_endpoint:
description: Endpoint used for the API requests
returned: success
type: str
sample: "https://api.vultr.com"
vultr_server:
description: Response from Vultr API with a few additions/modification
returned: success
type: complex
contains:
id:
description: ID of the server
returned: success
type: str
sample: 10194376
name:
description: Name (label) of the server
returned: success
type: str
sample: "ansible-test-vm"
plan:
description: Plan used for the server
returned: success
type: str
sample: "1024 MB RAM,25 GB SSD,1.00 TB BW"
allowed_bandwidth_gb:
description: Allowed bandwidth to use in GB
returned: success
type: int
sample: 1000
auto_backup_enabled:
description: Whether automatic backups are enabled
returned: success
type: bool
sample: false
cost_per_month:
description: Cost per month for the server
returned: success
type: float
sample: 5.00
current_bandwidth_gb:
description: Current bandwidth used for the server
returned: success
type: int
sample: 0
date_created:
description: Date when the server was created
returned: success
type: str
sample: "2017-08-26 12:47:48"
default_password:
description: Password to login as root into the server
returned: success
type: str
sample: "!p3EWYJm$qDWYaFr"
disk:
description: Information about the disk
returned: success
type: str
sample: "Virtual 25 GB"
v4_gateway:
description: IPv4 gateway
returned: success
type: str
sample: "45.32.232.1"
internal_ip:
description: Internal IP
returned: success
type: str
sample: ""
kvm_url:
description: URL to the VNC
returned: success
type: str
sample: "https://my.vultr.com/subs/vps/novnc/api.php?data=xyz"
region:
description: Region the server was deployed into
returned: success
type: str
sample: "Amsterdam"
v4_main_ip:
description: Main IPv4
returned: success
type: str
sample: "45.32.233.154"
v4_netmask:
description: Netmask IPv4
returned: success
type: str
sample: "255.255.254.0"
os:
description: Operating system used for the server
returned: success
type: str
sample: "CentOS 6 x64"
firewall_group:
description: Firewall group the server is assinged to
returned: success and available
type: str
sample: "CentOS 6 x64"
pending_charges:
description: Pending charges
returned: success
type: float
sample: 0.01
power_status:
description: Power status of the server
returned: success
type: str
sample: "running"
ram:
description: Information about the RAM size
returned: success
type: str
sample: "1024 MB"
server_state:
description: State about the server
returned: success
type: str
sample: "ok"
status:
description: Status about the deployment of the server
returned: success
type: str
sample: "active"
tag:
description: TBD
returned: success
type: str
sample: ""
v6_main_ip:
description: Main IPv6
returned: success
type: str
sample: ""
v6_network:
description: Network IPv6
returned: success
type: str
sample: ""
v6_network_size:
description: Network size IPv6
returned: success
type: str
sample: ""
v6_networks:
description: Networks IPv6
returned: success
type: list
sample: []
vcpu_count:
description: Virtual CPU count
returned: success
type: int
sample: 1
'''
import time
import base64
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text, to_bytes
from ansible.module_utils.vultr import (
Vultr,
vultr_argument_spec,
)
class AnsibleVultrServer(Vultr):
def __init__(self, module):
super(AnsibleVultrServer, self).__init__(module, "vultr_server")
self.server = None
self.returns = {
'SUBID': dict(key='id'),
'label': dict(key='name'),
'date_created': dict(),
'allowed_bandwidth_gb': dict(convert_to='int'),
'auto_backups': dict(key='auto_backup_enabled', convert_to='bool'),
'current_bandwidth_gb': dict(),
'kvm_url': dict(),
'default_password': dict(),
'internal_ip': dict(),
'disk': dict(),
'cost_per_month': dict(convert_to='float'),
'location': dict(key='region'),
'main_ip': dict(key='v4_main_ip'),
'network_v4': dict(key='v4_network'),
'gateway_v4': dict(key='v4_gateway'),
'os': dict(),
'pending_charges': dict(convert_to='float'),
'power_status': dict(),
'ram': dict(),
'plan': dict(),
'server_state': dict(),
'status': dict(),
'firewall_group': dict(),
'tag': dict(),
'v6_main_ip': dict(),
'v6_network': dict(),
'v6_network_size': dict(),
'v6_networks': dict(),
'vcpu_count': dict(convert_to='int'),
}
self.server_power_state = None
def get_startup_script(self):
return self.query_resource_by_key(
key='name',
value=self.module.params.get('startup_script'),
resource='startupscript',
)
def get_os(self):
if self.module.params.get('snapshot'):
os_name = 'Snapshot'
else:
os_name = self.module.params.get('os')
return self.query_resource_by_key(
key='name',
value=os_name,
resource='os',
use_cache=True
)
def get_snapshot(self):
return self.query_resource_by_key(
key='description',
value=self.module.params.get('snapshot'),
resource='snapshot',
use_cache=True
)
def get_ssh_keys(self):
ssh_key_names = self.module.params.get('ssh_keys')
if not ssh_key_names:
return []
ssh_keys = []
for ssh_key_name in ssh_key_names:
ssh_key = self.query_resource_by_key(
key='name',
value=ssh_key_name,
resource='sshkey',
use_cache=True
)
if ssh_key:
ssh_keys.append(ssh_key)
return ssh_keys
def get_region(self):
return self.query_resource_by_key(
key='name',
value=self.module.params.get('region'),
resource='regions',
use_cache=True
)
def get_plan(self):
return self.query_resource_by_key(
key='name',
value=self.module.params.get('plan'),
resource='plans',
use_cache=True
)
def get_firewall_group(self):
return self.query_resource_by_key(
key='description',
value=self.module.params.get('firewall_group'),
resource='firewall',
query_by='group_list'
)
def get_user_data(self):
user_data = self.module.params.get('user_data')
if user_data is not None:
user_data = to_text(base64.b64encode(to_bytes(user_data)))
return user_data
def get_server_user_data(self, server):
if not server or not server.get('SUBID'):
return None
user_data = self.api_query(path="/v1/server/get_user_data?SUBID=%s" % server.get('SUBID'))
return user_data.get('userdata')
def get_server(self, refresh=False):
if self.server is None or refresh:
self.server = None
server_list = self.api_query(path="/v1/server/list")
if server_list:
for server_id, server_data in server_list.items():
if server_data.get('label') == self.module.params.get('name'):
self.server = server_data
plan = self.query_resource_by_key(
key='VPSPLANID',
value=server_data['VPSPLANID'],
resource='plans',
use_cache=True
)
self.server['plan'] = plan.get('name')
os = self.query_resource_by_key(
key='OSID',
value=int(server_data['OSID']),
resource='os',
use_cache=True
)
self.server['os'] = os.get('name')
fwg_id = server_data.get('FIREWALLGROUPID')
fw = self.query_resource_by_key(
key='FIREWALLGROUPID',
value=server_data.get('FIREWALLGROUPID') if fwg_id and fwg_id != "0" else None,
resource='firewall',
query_by='group_list',
use_cache=True
)
self.server['firewall_group'] = fw.get('description')
return self.server
def present_server(self, start_server=True):
server = self.get_server()
if not server:
server = self._create_server(server=server)
else:
server = self._update_server(server=server, start_server=start_server)
return server
def _create_server(self, server=None):
required_params = [
'os',
'plan',
'region',
]
snapshot_restore = self.module.params.get('snapshot') is not None
if snapshot_restore:
required_params.remove('os')
self.module.fail_on_missing_params(required_params=required_params)
self.result['changed'] = True
if not self.module.check_mode:
data = {
'DCID': self.get_region().get('DCID'),
'VPSPLANID': self.get_plan().get('VPSPLANID'),
'FIREWALLGROUPID': self.get_firewall_group().get('FIREWALLGROUPID'),
'OSID': self.get_os().get('OSID'),
'SNAPSHOTID': self.get_snapshot().get('SNAPSHOTID'),
'label': self.module.params.get('name'),
'hostname': self.module.params.get('hostname'),
'SSHKEYID': ','.join([ssh_key['SSHKEYID'] for ssh_key in self.get_ssh_keys()]),
'enable_ipv6': self.get_yes_or_no('ipv6_enabled'),
'enable_private_network': self.get_yes_or_no('private_network_enabled'),
'auto_backups': self.get_yes_or_no('auto_backup_enabled'),
'notify_activate': self.get_yes_or_no('notify_activate'),
'tag': self.module.params.get('tag'),
'reserved_ip_v4': self.module.params.get('reserved_ip_v4'),
'user_data': self.get_user_data(),
'SCRIPTID': self.get_startup_script().get('SCRIPTID'),
}
self.api_query(
path="/v1/server/create",
method="POST",
data=data
)
server = self._wait_for_state(key='status', state='active')
server = self._wait_for_state(state='running', timeout=3600 if snapshot_restore else 60)
return server
def _update_auto_backups_setting(self, server, start_server):
auto_backup_enabled_changed = self.switch_enable_disable(server, 'auto_backup_enabled', 'auto_backups')
if auto_backup_enabled_changed:
if auto_backup_enabled_changed == "enable" and server['auto_backups'] == 'disable':
self.module.warn("Backups are disabled. Once disabled, backups can only be enabled again by customer support")
else:
server, warned = self._handle_power_status_for_update(server, start_server)
if not warned:
self.result['changed'] = True
self.result['diff']['before']['auto_backup_enabled'] = server.get('auto_backups')
self.result['diff']['after']['auto_backup_enabled'] = self.get_yes_or_no('auto_backup_enabled')
if not self.module.check_mode:
data = {
'SUBID': server['SUBID']
}
self.api_query(
path="/v1/server/backup_%s" % auto_backup_enabled_changed,
method="POST",
data=data
)
return server
def _update_ipv6_setting(self, server, start_server):
ipv6_enabled_changed = self.switch_enable_disable(server, 'ipv6_enabled', 'v6_main_ip')
if ipv6_enabled_changed:
if ipv6_enabled_changed == "disable":
self.module.warn("The Vultr API does not allow to disable IPv6")
else:
server, warned = self._handle_power_status_for_update(server, start_server)
if not warned:
self.result['changed'] = True
self.result['diff']['before']['ipv6_enabled'] = False
self.result['diff']['after']['ipv6_enabled'] = True
if not self.module.check_mode:
data = {
'SUBID': server['SUBID']
}
self.api_query(
path="/v1/server/ipv6_%s" % ipv6_enabled_changed,
method="POST",
data=data
)
server = self._wait_for_state(key='v6_main_ip')
return server
def _update_private_network_setting(self, server, start_server):
private_network_enabled_changed = self.switch_enable_disable(server, 'private_network_enabled', 'internal_ip')
if private_network_enabled_changed:
if private_network_enabled_changed == "disable":
self.module.warn("The Vultr API does not allow to disable private network")
else:
server, warned = self._handle_power_status_for_update(server, start_server)
if not warned:
self.result['changed'] = True
self.result['diff']['before']['private_network_enabled'] = False
self.result['diff']['after']['private_network_enabled'] = True
if not self.module.check_mode:
data = {
'SUBID': server['SUBID']
}
self.api_query(
path="/v1/server/private_network_%s" % private_network_enabled_changed,
method="POST",
data=data
)
return server
def _update_plan_setting(self, server, start_server):
plan = self.get_plan()
plan_changed = True if plan and plan['VPSPLANID'] != server.get('VPSPLANID') else False
if plan_changed:
server, warned = self._handle_power_status_for_update(server, start_server)
if not warned:
self.result['changed'] = True
self.result['diff']['before']['plan'] = server.get('plan')
self.result['diff']['after']['plan'] = plan['name']
if not self.module.check_mode:
data = {
'SUBID': server['SUBID'],
'VPSPLANID': plan['VPSPLANID'],
}
self.api_query(
path="/v1/server/upgrade_plan",
method="POST",
data=data
)
return server
def _handle_power_status_for_update(self, server, start_server):
# Remember the power state before we handle any action
if self.server_power_state is None:
self.server_power_state = server['power_status']
# A stopped server can be updated
if self.server_power_state == "stopped":
return server, False
# A running server must be forced to update unless the wanted state is stopped
elif self.module.params.get('force') or not start_server:
warned = False
if not self.module.check_mode:
# Some update APIs would restart the VM, we handle the restart manually
# by stopping the server and start it at the end of the changes
server = self.stop_server(skip_results=True)
# Warn the user that a running server won't get changed
else:
warned = True
self.module.warn("Some changes won't be applied to running instances. " +
"Use force=true to allow the instance %s to be stopped/started." % server['label'])
return server, warned
def _update_server(self, server=None, start_server=True):
# Wait for server to unlock if restoring
if server.get('os').strip() == 'Snapshot':
server = self._wait_for_state(key='server_status', state='ok', timeout=3600)
# Update auto backups settings, stops server
server = self._update_auto_backups_setting(server=server, start_server=start_server)
# Update IPv6 settings, stops server
server = self._update_ipv6_setting(server=server, start_server=start_server)
# Update private network settings, stops server
server = self._update_private_network_setting(server=server, start_server=start_server)
# Update plan settings, stops server
server = self._update_plan_setting(server=server, start_server=start_server)
# User data
user_data = self.get_user_data()
server_user_data = self.get_server_user_data(server=server)
if user_data is not None and user_data != server_user_data:
self.result['changed'] = True
self.result['diff']['before']['user_data'] = server_user_data
self.result['diff']['after']['user_data'] = user_data
if not self.module.check_mode:
data = {
'SUBID': server['SUBID'],
'userdata': user_data,
}
self.api_query(
path="/v1/server/set_user_data",
method="POST",
data=data
)
# Tags
tag = self.module.params.get('tag')
if tag is not None and tag != server.get('tag'):
self.result['changed'] = True
self.result['diff']['before']['tag'] = server.get('tag')
self.result['diff']['after']['tag'] = tag
if not self.module.check_mode:
data = {
'SUBID': server['SUBID'],
'tag': tag,
}
self.api_query(
path="/v1/server/tag_set",
method="POST",
data=data
)
# Firewall group
firewall_group = self.get_firewall_group()
if firewall_group and firewall_group.get('description') != server.get('firewall_group'):
self.result['changed'] = True
self.result['diff']['before']['firewall_group'] = server.get('firewall_group')
self.result['diff']['after']['firewall_group'] = firewall_group.get('description')
if not self.module.check_mode:
data = {
'SUBID': server['SUBID'],
'FIREWALLGROUPID': firewall_group.get('FIREWALLGROUPID'),
}
self.api_query(
path="/v1/server/firewall_group_set",
method="POST",
data=data
)
# Start server again if it was running before the changes
if not self.module.check_mode:
if self.server_power_state in ['starting', 'running'] and start_server:
server = self.start_server(skip_results=True)
server = self._wait_for_state(key='status', state='active')
return server
def absent_server(self):
server = self.get_server()
if server:
self.result['changed'] = True
self.result['diff']['before']['id'] = server['SUBID']
self.result['diff']['after']['id'] = ""
if not self.module.check_mode:
data = {
'SUBID': server['SUBID']
}
self.api_query(
path="/v1/server/destroy",
method="POST",
data=data
)
for s in range(0, 60):
if server is not None:
break
time.sleep(2)
server = self.get_server(refresh=True)
else:
self.fail_json(msg="Wait for server '%s' to get deleted timed out" % server['label'])
return server
def restart_server(self):
self.result['changed'] = True
server = self.get_server()
if server:
if not self.module.check_mode:
data = {
'SUBID': server['SUBID']
}
self.api_query(
path="/v1/server/reboot",
method="POST",
data=data
)
server = self._wait_for_state(state='running')
return server
def reinstall_server(self):
self.result['changed'] = True
server = self.get_server()
if server:
if not self.module.check_mode:
data = {
'SUBID': server['SUBID']
}
self.api_query(
path="/v1/server/reinstall",
method="POST",
data=data
)
server = self._wait_for_state(state='running')
return server
def _wait_for_state(self, key='power_status', state=None, timeout=60):
time.sleep(1)
server = self.get_server(refresh=True)
for s in range(0, timeout):
# Check for Truely if wanted state is None
if state is None and server.get(key):
break
elif server.get(key) == state:
break
time.sleep(2)
server = self.get_server(refresh=True)
# Timed out
else:
if state is None:
msg = "Wait for '%s' timed out" % key
else:
msg = "Wait for '%s' to get into state '%s' timed out" % (key, state)
self.fail_json(msg=msg)
return server
def start_server(self, skip_results=False):
server = self.get_server()
if server:
if server['power_status'] == 'starting':
server = self._wait_for_state(state='running')
elif server['power_status'] != 'running':
if not skip_results:
self.result['changed'] = True
self.result['diff']['before']['power_status'] = server['power_status']
self.result['diff']['after']['power_status'] = "running"
if not self.module.check_mode:
data = {
'SUBID': server['SUBID']
}
self.api_query(
path="/v1/server/start",
method="POST",
data=data
)
server = self._wait_for_state(state='running')
return server
def stop_server(self, skip_results=False):
server = self.get_server()
if server and server['power_status'] != "stopped":
if not skip_results:
self.result['changed'] = True
self.result['diff']['before']['power_status'] = server['power_status']
self.result['diff']['after']['power_status'] = "stopped"
if not self.module.check_mode:
data = {
'SUBID': server['SUBID'],
}
self.api_query(
path="/v1/server/halt",
method="POST",
data=data
)
server = self._wait_for_state(state='stopped')
return server
def main():
argument_spec = vultr_argument_spec()
argument_spec.update(dict(
name=dict(required=True, aliases=['label']),
hostname=dict(),
os=dict(),
snapshot=dict(),
plan=dict(),
force=dict(type='bool', default=False),
notify_activate=dict(type='bool', default=False),
private_network_enabled=dict(type='bool'),
auto_backup_enabled=dict(type='bool'),
ipv6_enabled=dict(type='bool'),
tag=dict(),
reserved_ip_v4=dict(),
firewall_group=dict(),
startup_script=dict(),
user_data=dict(),
ssh_keys=dict(type='list', aliases=['ssh_key']),
region=dict(),
state=dict(choices=['present', 'absent', 'restarted', 'reinstalled', 'started', 'stopped'], default='present'),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
vultr_server = AnsibleVultrServer(module)
if module.params.get('state') == "absent":
server = vultr_server.absent_server()
else:
if module.params.get('state') == "started":
server = vultr_server.present_server()
server = vultr_server.start_server()
elif module.params.get('state') == "stopped":
server = vultr_server.present_server(start_server=False)
server = vultr_server.stop_server()
elif module.params.get('state') == "restarted":
server = vultr_server.present_server()
server = vultr_server.restart_server()
elif module.params.get('state') == "reinstalled":
server = vultr_server.reinstall_server()
else:
server = vultr_server.present_server()
result = vultr_server.get_result(server)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
jasonseminara/OpenSourceFinal
|
refs/heads/master
|
myvenv/lib/python3.5/site-packages/django/contrib/postgres/signals.py
|
548
|
from psycopg2 import ProgrammingError
from psycopg2.extras import register_hstore
from django.utils import six
def register_hstore_handler(connection, **kwargs):
if connection.vendor != 'postgresql':
return
try:
if six.PY2:
register_hstore(connection.connection, globally=True, unicode=True)
else:
register_hstore(connection.connection, globally=True)
except ProgrammingError:
# Hstore is not available on the database.
#
# If someone tries to create an hstore field it will error there.
# This is necessary as someone may be using PSQL without extensions
# installed but be using other features of contrib.postgres.
#
# This is also needed in order to create the connection in order to
# install the hstore extension.
pass
|
catcosmo/froide
|
refs/heads/master
|
froide/foirequest/south_migrations/0032_auto__add_field_foirequest_resolution.py
|
6
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from froide.helper.auth_migration_util import USER_DB_NAME
APP_MODEL, APP_MODEL_NAME = 'account.User', 'account.user'
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FoiRequest.resolution'
db.add_column(u'foirequest_foirequest', 'resolution',
self.gf('django.db.models.fields.CharField')(default='', max_length=50, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'FoiRequest.resolution'
db.delete_column(u'foirequest_foirequest', 'resolution')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
APP_MODEL_NAME: {
'Meta': {'object_name': 'User', 'db_table': "'%s'" % USER_DB_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'foirequest.deferredmessage': {
'Meta': {'ordering': "('timestamp',)", 'object_name': 'DeferredMessage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mail': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'recipient': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']", 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'foirequest.foiattachment': {
'Meta': {'ordering': "('name',)", 'object_name': 'FoiAttachment'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'belongs_to': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiMessage']", 'null': 'True'}),
'can_approve': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'converted': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'original_set'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['foirequest.FoiAttachment']"}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255'}),
'filetype': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'format': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_converted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_redacted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'redacted': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'unredacted_set'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['foirequest.FoiAttachment']"}),
'size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'foirequest.foievent': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'FoiEvent'},
'context_json': ('django.db.models.fields.TextField', [], {}),
'event_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBody']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'foirequest.foimessage': {
'Meta': {'ordering': "('timestamp',)", 'object_name': 'FoiMessage'},
'content_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_escalation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_postal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_response': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'not_publishable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'original': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'plaintext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'plaintext_redacted': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'recipient': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'recipient_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'recipient_public_body': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'received_messages'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['publicbody.PublicBody']"}),
'redacted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}),
'sender_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'sender_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'sender_public_body': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'send_messages'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['publicbody.PublicBody']"}),
'sender_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'sent': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'subject_redacted': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'})
},
u'foirequest.foirequest': {
'Meta': {'ordering': "('last_message',)", 'object_name': 'FoiRequest'},
'checked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'costs': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'due_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'first_message': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_foi': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.Jurisdiction']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'last_message': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'law': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.FoiLaw']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBody']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'refusal_reason': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'resolution': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'resolved_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'same_as': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'same_as_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'secret_address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'visibility': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'})
},
u'foirequest.publicbodysuggestion': {
'Meta': {'ordering': "('timestamp',)", 'object_name': 'PublicBodySuggestion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public_body': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBody']"}),
'reason': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'request': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['%s']" % APP_MODEL, 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
u'foirequest.taggedfoirequest': {
'Meta': {'object_name': 'TaggedFoiRequest'},
'content_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['foirequest.FoiRequest']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'foirequest_taggedfoirequest_items'", 'to': u"orm['taggit.Tag']"})
},
u'publicbody.foilaw': {
'Meta': {'object_name': 'FoiLaw'},
'combined': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['publicbody.FoiLaw']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.Jurisdiction']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'letter_end': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'letter_start': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'long_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'max_response_time': ('django.db.models.fields.IntegerField', [], {'default': '30', 'null': 'True', 'blank': 'True'}),
'max_response_time_unit': ('django.db.models.fields.CharField', [], {'default': "'day'", 'max_length': '32', 'blank': 'True'}),
'mediator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mediating_laws'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['publicbody.PublicBody']", 'blank': 'True', 'null': 'True'}),
'meta': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'priority': ('django.db.models.fields.SmallIntegerField', [], {'default': '3'}),
'refusal_reasons': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'request_note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'updated': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
u'publicbody.jurisdiction': {
'Meta': {'object_name': 'Jurisdiction'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'rank': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
u'publicbody.publicbody': {
'Meta': {'ordering': "('name',)", 'object_name': 'PublicBody'},
'_created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'public_body_creators'", 'on_delete': 'models.SET_NULL', 'default': '1', 'to': u"orm['%s']" % APP_MODEL, 'blank': 'True', 'null': 'True'}),
'_updated_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'public_body_updaters'", 'on_delete': 'models.SET_NULL', 'default': '1', 'to': u"orm['%s']" % APP_MODEL, 'blank': 'True', 'null': 'True'}),
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'classification': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'classification_slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'contact': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'depth': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.Jurisdiction']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'laws': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['publicbody.FoiLaw']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'number_of_requests': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'other_names': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'children'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['publicbody.PublicBody']", 'blank': 'True', 'null': 'True'}),
'request_note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'root': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'descendants'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['publicbody.PublicBody']", 'blank': 'True', 'null': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': u"orm['sites.Site']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'topic': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['publicbody.PublicBodyTopic']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'website_dump': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'publicbody.publicbodytopic': {
'Meta': {'object_name': 'PublicBodyTopic'},
'count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'rank': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
}
}
complete_apps = ['foirequest']
|
ipmobiletech/owncloud-ios
|
refs/heads/develop
|
automation_Test/loginTest.py
|
14
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
from appium import webdriver
import time
import os
import unittest
from time import sleep
import constants as const
import loginView
import filesView
import settingsView
import actions
class loginTest(unittest.TestCase):
def setUp(self):
# set up appium
self.driver = actions.getWebDriver()
#self.driver.implicitly_wait(60)
def tearDown(self):
self.driver.quit()
def test_ui_login_ok(self):
server_url = const.K_URL_1
user = const.K_USER_1
password = const.K_PASSWORD_1
ssl = const.K_SELF_SIGNED_1
actions.doFirstLoginWith(self,server_url,user,password,ssl)
actions.assert_is_in_files_view(self)
#import ipdb; ipdb.set_trace()
def test_ui_login_incorrect_password(self):
server_url = const.K_URL_1
user = const.K_USER_1
password = const.K_PASSWORD_WRONG_1
ssl = const.K_SELF_SIGNED_1
actions.doFirstLoginWith(self,server_url,user,password,ssl)
actions.assert_is_not_in_files_view(self)
self.assertEqual(self.driver.find_elements_by_class_name(loginView.user_password_field_class)[loginView.user_password_field_index].get_attribute("name"), loginView.user_password_field_name)
#import ipdb; ipdb.set_trace()
def test_ui_multiaccount(self):
driver = self.driver
server_url = const.K_URL_1
user = const.K_USER_1
password = const.K_PASSWORD_1
ssl = const.K_SELF_SIGNED_1
actions.doFirstLoginWith(self,server_url,user,password,ssl)
actions.assert_is_in_files_view(self)
settingsButton = driver.find_element_by_xpath(filesView.settingsButton_xpath);
self.assertEqual(settingsButton.get_attribute("name"), filesView.settingsButton_name)
settingsButton.click();
addNewAccountButton = driver.find_element_by_xpath(settingsView.addNewAccountButton_xpath)
self.assertEqual(addNewAccountButton.get_attribute("name"), settingsView.addNewAccountButton_name)
addNewAccountButton.click()
actions.doLoginWith(self,const.K_URL_2,const.K_USER_2,const.K_PASSWORD_2,const.K_SELF_SIGNED_2)
actions.assert_is_in_files_view(self)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(loginTest)
unittest.TextTestRunner(verbosity=2).run(suite)
|
raccoongang/edx-platform
|
refs/heads/ginkgo-rg
|
common/djangoapps/student/urls.py
|
14
|
"""
URLs for student app
"""
from django.conf import settings
from django.conf.urls import patterns, url
from student.views import LogoutView
urlpatterns = (
'student.views',
url(r'^logout$', LogoutView.as_view(), name='logout'),
# TODO: standardize login
# login endpoint used by cms.
url(r'^login_post$', 'login_user', name='login_post'),
# login endpoints used by lms.
url(r'^login_ajax$', 'login_user', name="login"),
url(r'^login_ajax/(?P<error>[^/]*)$', 'login_user'),
url(r'^email_confirm/(?P<key>[^/]*)$', 'confirm_email_change'),
url(r'^create_account$', 'create_account', name='create_account'),
url(r'^activate/(?P<key>[^/]*)$', 'activate_account', name="activate"),
url(r'^accounts/disable_account_ajax$', 'disable_account_ajax', name="disable_account_ajax"),
url(r'^accounts/manage_user_standing', 'manage_user_standing', name='manage_user_standing'),
url(r'^change_setting$', 'change_setting', name='change_setting'),
url(r'^change_email_settings$', 'change_email_settings', name='change_email_settings'),
# password reset in student.views (see below for password reset django views)
url(r'^password_reset/$', 'password_reset', name='password_reset'),
url(
r'^password_reset_confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm_wrapper',
name='password_reset_confirm',
),
)
# enable automatic login
if settings.FEATURES.get('AUTOMATIC_AUTH_FOR_TESTING'):
urlpatterns += (
url(r'^auto_auth$', 'auto_auth'),
)
# add all student.views url patterns
urlpatterns = patterns(*urlpatterns)
# password reset django views (see above for password reset student.views)
urlpatterns += patterns(
'django.contrib.auth.views',
# TODO: Replace with Mako-ized views
url(
r'^password_reset_complete/$',
'password_reset_complete',
name='password_reset_complete',
),
)
|
sklam/numba
|
refs/heads/master
|
numba/cuda/tests/cudapy/test_alignment.py
|
1
|
import numpy as np
from numba import from_dtype, cuda
from numba.cuda.testing import skip_on_cudasim, CUDATestCase
import unittest
class TestAlignment(CUDATestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec_dtype)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
@skip_on_cudasim('Simulator does not check alignment')
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
|
myarjunar/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/grass7/ext/i_pansharpen.py
|
2
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
i_pansharpen.py
---------------
Date : March 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from __future__ import absolute_import
__author__ = 'Médéric Ribreux'
__date__ = 'March 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from processing.core.parameters import getParameterFromString
def processCommand(alg):
# Temporary remove outputs:
outputs = [alg.getOutputFromName('{}output'.format(f)) for f in ['red', 'green', 'blue']]
for out in outputs:
alg.removeOutputFromName(out.name)
# create a false output
base = getParameterFromString('ParameterString|output|Name for output basename raster map(s)|None|False|False')
base.value = alg.getTempFilename()
alg.addParameter(base)
alg.processCommand()
# Re-add outputs
for output in outputs:
alg.addOutput(output)
def processOutputs(alg):
base = alg.getParameterValue('output')
for channel in ['red', 'green', 'blue']:
command = 'r.out.gdal -c -t -f --overwrite createopt="TFW=YES,COMPRESS=LZW" input={} output=\"{}\"'.format(
'{}_{}'.format(base, channel),
alg.getOutputValue('{}output'.format(channel))
)
alg.commands.append(command)
alg.outputCommands.append(command)
|
lmesz/compose
|
refs/heads/master
|
tests/unit/config_test.py
|
3
|
from __future__ import print_function
import os
import shutil
import tempfile
from operator import itemgetter
from .. import mock
from .. import unittest
from compose.config import config
from compose.config.errors import ConfigurationError
def make_service_dict(name, service_dict, working_dir, filename=None):
"""
Test helper function to construct a ServiceLoader
"""
return config.ServiceLoader(
working_dir=working_dir,
filename=filename,
service_name=name,
service_dict=service_dict).make_service_dict()
def service_sort(services):
return sorted(services, key=itemgetter('name'))
class ConfigTest(unittest.TestCase):
def test_load(self):
service_dicts = config.load(
config.ConfigDetails(
{
'foo': {'image': 'busybox'},
'bar': {'image': 'busybox', 'environment': ['FOO=1']},
},
'tests/fixtures/extends',
'common.yml'
)
)
self.assertEqual(
service_sort(service_dicts),
service_sort([
{
'name': 'bar',
'image': 'busybox',
'environment': {'FOO': '1'},
},
{
'name': 'foo',
'image': 'busybox',
}
])
)
def test_load_throws_error_when_not_dict(self):
with self.assertRaises(ConfigurationError):
config.load(
config.ConfigDetails(
{'web': 'busybox:latest'},
'working_dir',
'filename.yml'
)
)
def test_config_invalid_service_names(self):
with self.assertRaises(ConfigurationError):
for invalid_name in ['?not?allowed', ' ', '', '!', '/', '\xe2']:
config.load(
config.ConfigDetails(
{invalid_name: {'image': 'busybox'}},
'working_dir',
'filename.yml'
)
)
def test_config_integer_service_name_raise_validation_error(self):
expected_error_msg = "Service name: 1 needs to be a string, eg '1'"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{1: {'image': 'busybox'}},
'working_dir',
'filename.yml'
)
)
def test_config_valid_service_names(self):
for valid_name in ['_', '-', '.__.', '_what-up.', 'what_.up----', 'whatup']:
config.load(
config.ConfigDetails(
{valid_name: {'image': 'busybox'}},
'tests/fixtures/extends',
'common.yml'
)
)
def test_config_invalid_ports_format_validation(self):
expected_error_msg = "Service 'web' configuration key 'ports' contains an invalid type"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
for invalid_ports in [{"1": "8000"}, False, 0, "8000", 8000, ["8000", "8000"]]:
config.load(
config.ConfigDetails(
{'web': {'image': 'busybox', 'ports': invalid_ports}},
'working_dir',
'filename.yml'
)
)
def test_config_valid_ports_format_validation(self):
valid_ports = [["8000", "9000"], ["8000/8050"], ["8000"], [8000], ["49153-49154:3002-3003"]]
for ports in valid_ports:
config.load(
config.ConfigDetails(
{'web': {'image': 'busybox', 'ports': ports}},
'working_dir',
'filename.yml'
)
)
def test_config_hint(self):
expected_error_msg = "(did you mean 'privileged'?)"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'foo': {'image': 'busybox', 'privilige': 'something'},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_invalid_config_build_and_image_specified(self):
expected_error_msg = "Service 'foo' has both an image and build path specified."
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'foo': {'image': 'busybox', 'build': '.'},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_invalid_config_type_should_be_an_array(self):
expected_error_msg = "Service 'foo' configuration key 'links' contains an invalid type, it should be an array"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'foo': {'image': 'busybox', 'links': 'an_link'},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_invalid_config_not_a_dictionary(self):
expected_error_msg = "Top level object needs to be a dictionary."
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
['foo', 'lol'],
'tests/fixtures/extends',
'filename.yml'
)
)
def test_invalid_config_not_unique_items(self):
expected_error_msg = "has non-unique elements"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'web': {'build': '.', 'devices': ['/dev/foo:/dev/foo', '/dev/foo:/dev/foo']}
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_invalid_list_of_strings_format(self):
expected_error_msg = "'command' contains an invalid type, valid types are string or list of strings"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'web': {'build': '.', 'command': [1]}
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_config_image_and_dockerfile_raise_validation_error(self):
expected_error_msg = "Service 'web' has both an image and alternate Dockerfile."
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{'web': {'image': 'busybox', 'dockerfile': 'Dockerfile.alt'}},
'working_dir',
'filename.yml'
)
)
def test_config_extra_hosts_string_raises_validation_error(self):
expected_error_msg = "Service 'web' configuration key 'extra_hosts' contains an invalid type"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{'web': {
'image': 'busybox',
'extra_hosts': 'somehost:162.242.195.82'
}},
'working_dir',
'filename.yml'
)
)
def test_config_extra_hosts_list_of_dicts_validation_error(self):
expected_error_msg = "Service 'web' configuration key 'extra_hosts' contains an invalid type"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{'web': {
'image': 'busybox',
'extra_hosts': [
{'somehost': '162.242.195.82'},
{'otherhost': '50.31.209.229'}
]
}},
'working_dir',
'filename.yml'
)
)
def test_valid_config_which_allows_two_type_definitions(self):
expose_values = [["8000"], [8000]]
for expose in expose_values:
service = config.load(
config.ConfigDetails(
{'web': {
'image': 'busybox',
'expose': expose
}},
'working_dir',
'filename.yml'
)
)
self.assertEqual(service[0]['expose'], expose)
class InterpolationTest(unittest.TestCase):
@mock.patch.dict(os.environ)
def test_config_file_with_environment_variable(self):
os.environ.update(
IMAGE="busybox",
HOST_PORT="80",
LABEL_VALUE="myvalue",
)
service_dicts = config.load(
config.find('tests/fixtures/environment-interpolation', None),
)
self.assertEqual(service_dicts, [
{
'name': 'web',
'image': 'busybox',
'ports': ['80:8000'],
'labels': {'mylabel': 'myvalue'},
'hostname': 'host-',
'command': '${ESCAPED}',
}
])
@mock.patch.dict(os.environ)
def test_unset_variable_produces_warning(self):
os.environ.pop('FOO', None)
os.environ.pop('BAR', None)
config_details = config.ConfigDetails(
config={
'web': {
'image': '${FOO}',
'command': '${BAR}',
'container_name': '${BAR}',
},
},
working_dir='.',
filename=None,
)
with mock.patch('compose.config.interpolation.log') as log:
config.load(config_details)
self.assertEqual(2, log.warn.call_count)
warnings = sorted(args[0][0] for args in log.warn.call_args_list)
self.assertIn('BAR', warnings[0])
self.assertIn('FOO', warnings[1])
@mock.patch.dict(os.environ)
def test_invalid_interpolation(self):
with self.assertRaises(config.ConfigurationError) as cm:
config.load(
config.ConfigDetails(
{'web': {'image': '${'}},
'working_dir',
'filename.yml'
)
)
self.assertIn('Invalid', cm.exception.msg)
self.assertIn('for "image" option', cm.exception.msg)
self.assertIn('in service "web"', cm.exception.msg)
self.assertIn('"${"', cm.exception.msg)
@mock.patch.dict(os.environ)
def test_volume_binding_with_environment_variable(self):
os.environ['VOLUME_PATH'] = '/host/path'
d = config.load(
config.ConfigDetails(
config={'foo': {'build': '.', 'volumes': ['${VOLUME_PATH}:/container/path']}},
working_dir='.',
filename=None,
)
)[0]
self.assertEqual(d['volumes'], ['/host/path:/container/path'])
@mock.patch.dict(os.environ)
def test_volume_binding_with_home(self):
os.environ['HOME'] = '/home/user'
d = make_service_dict('foo', {'build': '.', 'volumes': ['~:/container/path']}, working_dir='.')
self.assertEqual(d['volumes'], ['/home/user:/container/path'])
@mock.patch.dict(os.environ)
def test_volume_binding_with_local_dir_name_raises_warning(self):
def make_dict(**config):
config['build'] = '.'
make_service_dict('foo', config, working_dir='.')
with mock.patch('compose.config.config.log.warn') as warn:
make_dict(volumes=['/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['/data:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['.:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['..:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['./data:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['../data:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['.profile:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['~:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['~/data:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['~tmp:/container/path'])
self.assertEqual(0, warn.call_count)
make_dict(volumes=['data:/container/path'], volume_driver='mydriver')
self.assertEqual(0, warn.call_count)
make_dict(volumes=['data:/container/path'])
self.assertEqual(1, warn.call_count)
warning = warn.call_args[0][0]
self.assertIn('"data:/container/path"', warning)
self.assertIn('"./data:/container/path"', warning)
def test_named_volume_with_driver_does_not_expand(self):
d = make_service_dict('foo', {
'build': '.',
'volumes': ['namedvolume:/data'],
'volume_driver': 'foodriver',
}, working_dir='.')
self.assertEqual(d['volumes'], ['namedvolume:/data'])
@mock.patch.dict(os.environ)
def test_home_directory_with_driver_does_not_expand(self):
os.environ['NAME'] = 'surprise!'
d = make_service_dict('foo', {
'build': '.',
'volumes': ['~:/data'],
'volume_driver': 'foodriver',
}, working_dir='.')
self.assertEqual(d['volumes'], ['~:/data'])
class MergePathMappingTest(object):
def config_name(self):
return ""
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn(self.config_name(), service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/foo:/code', '/data']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name(): ['/bar:/code']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code']))
def test_override_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{self.config_name(): ['/bar:/code']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/data']))
def test_add_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/data']},
{self.config_name(): ['/bar:/code', '/quux:/data']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/quux:/data']))
def test_remove_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name(): ['/foo:/code', '/quux:/data']},
{self.config_name(): ['/bar:/code', '/data']},
)
self.assertEqual(set(service_dict[self.config_name()]), set(['/bar:/code', '/data']))
class MergeVolumesTest(unittest.TestCase, MergePathMappingTest):
def config_name(self):
return 'volumes'
class MergeDevicesTest(unittest.TestCase, MergePathMappingTest):
def config_name(self):
return 'devices'
class BuildOrImageMergeTest(unittest.TestCase):
def test_merge_build_or_image_no_override(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {}),
{'build': '.'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {}),
{'image': 'redis'},
)
def test_merge_build_or_image_override_with_same(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'build': './web'}),
{'build': './web'},
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'image': 'postgres'}),
{'image': 'postgres'},
)
def test_merge_build_or_image_override_with_other(self):
self.assertEqual(
config.merge_service_dicts({'build': '.'}, {'image': 'redis'}),
{'image': 'redis'}
)
self.assertEqual(
config.merge_service_dicts({'image': 'redis'}, {'build': '.'}),
{'build': '.'}
)
class MergeListsTest(unittest.TestCase):
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn('ports', service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'ports': ['10:8000', '9000']},
{},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'ports': ['10:8000', '9000']},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000']))
def test_add_item(self):
service_dict = config.merge_service_dicts(
{'ports': ['10:8000', '9000']},
{'ports': ['20:8000']},
)
self.assertEqual(set(service_dict['ports']), set(['10:8000', '9000', '20:8000']))
class MergeStringsOrListsTest(unittest.TestCase):
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8']))
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'dns': '8.8.8.8'},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8']))
def test_add_string(self):
service_dict = config.merge_service_dicts(
{'dns': ['8.8.8.8']},
{'dns': '9.9.9.9'},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8', '9.9.9.9']))
def test_add_list(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{'dns': ['9.9.9.9']},
)
self.assertEqual(set(service_dict['dns']), set(['8.8.8.8', '9.9.9.9']))
class MergeLabelsTest(unittest.TestCase):
def test_empty(self):
service_dict = config.merge_service_dicts({}, {})
self.assertNotIn('labels', service_dict)
def test_no_override(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.'}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
def test_no_base(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.'}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '2'})
def test_override_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '2', 'bar': ''})
def test_add_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['bar=2']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': '2'})
def test_remove_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar=2']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['bar']}, 'tests/'),
)
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
class MemoryOptionsTest(unittest.TestCase):
def test_validation_fails_with_just_memswap_limit(self):
"""
When you set a 'memswap_limit' it is invalid config unless you also set
a mem_limit
"""
expected_error_msg = (
"Invalid 'memswap_limit' configuration for 'foo' service: when "
"defining 'memswap_limit' you must set 'mem_limit' as well"
)
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'foo': {'image': 'busybox', 'memswap_limit': 2000000},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_validation_with_correct_memswap_values(self):
service_dict = config.load(
config.ConfigDetails(
{'foo': {'image': 'busybox', 'mem_limit': 1000000, 'memswap_limit': 2000000}},
'tests/fixtures/extends',
'common.yml'
)
)
self.assertEqual(service_dict[0]['memswap_limit'], 2000000)
def test_memswap_can_be_a_string(self):
service_dict = config.load(
config.ConfigDetails(
{'foo': {'image': 'busybox', 'mem_limit': "1G", 'memswap_limit': "512M"}},
'tests/fixtures/extends',
'common.yml'
)
)
self.assertEqual(service_dict[0]['memswap_limit'], "512M")
class EnvTest(unittest.TestCase):
def test_parse_environment_as_list(self):
environment = [
'NORMAL=F1',
'CONTAINS_EQUALS=F=2',
'TRAILING_EQUALS=',
]
self.assertEqual(
config.parse_environment(environment),
{'NORMAL': 'F1', 'CONTAINS_EQUALS': 'F=2', 'TRAILING_EQUALS': ''},
)
def test_parse_environment_as_dict(self):
environment = {
'NORMAL': 'F1',
'CONTAINS_EQUALS': 'F=2',
'TRAILING_EQUALS': None,
}
self.assertEqual(config.parse_environment(environment), environment)
def test_parse_environment_invalid(self):
with self.assertRaises(ConfigurationError):
config.parse_environment('a=b')
def test_parse_environment_empty(self):
self.assertEqual(config.parse_environment(None), {})
@mock.patch.dict(os.environ)
def test_resolve_environment(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = make_service_dict(
'foo', {
'build': '.',
'environment': {
'FILE_DEF': 'F1',
'FILE_DEF_EMPTY': '',
'ENV_DEF': None,
'NO_DEF': None
},
},
'tests/'
)
self.assertEqual(
service_dict['environment'],
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': ''},
)
def test_env_from_file(self):
service_dict = make_service_dict(
'foo',
{'build': '.', 'env_file': 'one.env'},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'bar'},
)
def test_env_from_multiple_files(self):
service_dict = make_service_dict(
'foo',
{'build': '.', 'env_file': ['one.env', 'two.env']},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'baz', 'DOO': 'dah'},
)
def test_env_nonexistent_file(self):
options = {'env_file': 'nonexistent.env'}
self.assertRaises(
ConfigurationError,
lambda: make_service_dict('foo', options, 'tests/fixtures/env'),
)
@mock.patch.dict(os.environ)
def test_resolve_environment_from_file(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = make_service_dict(
'foo',
{'build': '.', 'env_file': 'resolve.env'},
'tests/fixtures/env',
)
self.assertEqual(
service_dict['environment'],
{'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': ''},
)
@mock.patch.dict(os.environ)
def test_resolve_path(self):
os.environ['HOSTENV'] = '/tmp'
os.environ['CONTAINERENV'] = '/host/tmp'
service_dict = config.load(
config.ConfigDetails(
config={'foo': {'build': '.', 'volumes': ['$HOSTENV:$CONTAINERENV']}},
working_dir="tests/fixtures/env",
filename=None,
)
)[0]
self.assertEqual(set(service_dict['volumes']), set(['/tmp:/host/tmp']))
service_dict = config.load(
config.ConfigDetails(
config={'foo': {'build': '.', 'volumes': ['/opt${HOSTENV}:/opt${CONTAINERENV}']}},
working_dir="tests/fixtures/env",
filename=None,
)
)[0]
self.assertEqual(set(service_dict['volumes']), set(['/opt/tmp:/opt/host/tmp']))
def load_from_filename(filename):
return config.load(config.find('.', filename))
class ExtendsTest(unittest.TestCase):
def test_extends(self):
service_dicts = load_from_filename('tests/fixtures/extends/docker-compose.yml')
self.assertEqual(service_sort(service_dicts), service_sort([
{
'name': 'mydb',
'image': 'busybox',
'command': 'top',
},
{
'name': 'myweb',
'image': 'busybox',
'command': 'top',
'links': ['mydb:db'],
'environment': {
"FOO": "1",
"BAR": "2",
"BAZ": "2",
},
}
]))
def test_nested(self):
service_dicts = load_from_filename('tests/fixtures/extends/nested.yml')
self.assertEqual(service_dicts, [
{
'name': 'myweb',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "2",
"BAR": "2",
},
},
])
def test_self_referencing_file(self):
"""
We specify a 'file' key that is the filename we're already in.
"""
service_dicts = load_from_filename('tests/fixtures/extends/specify-file-as-self.yml')
self.assertEqual(service_sort(service_dicts), service_sort([
{
'environment':
{
'YEP': '1', 'BAR': '1', 'BAZ': '3'
},
'image': 'busybox',
'name': 'myweb'
},
{
'environment':
{'YEP': '1'},
'image': 'busybox',
'name': 'otherweb'
},
{
'environment':
{'YEP': '1', 'BAZ': '3'},
'image': 'busybox',
'name': 'web'
}
]))
def test_circular(self):
try:
load_from_filename('tests/fixtures/extends/circle-1.yml')
raise Exception("Expected config.CircularReference to be raised")
except config.CircularReference as e:
self.assertEqual(
[(os.path.basename(filename), service_name) for (filename, service_name) in e.trail],
[
('circle-1.yml', 'web'),
('circle-2.yml', 'web'),
('circle-1.yml', 'web'),
],
)
def test_extends_validation_empty_dictionary(self):
with self.assertRaisesRegexp(ConfigurationError, 'service'):
config.load(
config.ConfigDetails(
{
'web': {'image': 'busybox', 'extends': {}},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_extends_validation_missing_service_key(self):
with self.assertRaisesRegexp(ConfigurationError, "'service' is a required property"):
config.load(
config.ConfigDetails(
{
'web': {'image': 'busybox', 'extends': {'file': 'common.yml'}},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_extends_validation_invalid_key(self):
expected_error_msg = "Unsupported config option for 'web' service: 'rogue_key'"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'web': {
'image': 'busybox',
'extends': {
'file': 'common.yml',
'service': 'web',
'rogue_key': 'is not allowed'
}
},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_extends_validation_sub_property_key(self):
expected_error_msg = "Service 'web' configuration key 'extends' 'file' contains an invalid type"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
config.load(
config.ConfigDetails(
{
'web': {
'image': 'busybox',
'extends': {
'file': 1,
'service': 'web',
}
},
},
'tests/fixtures/extends',
'filename.yml'
)
)
def test_extends_validation_no_file_key_no_filename_set(self):
dictionary = {'extends': {'service': 'web'}}
def load_config():
return make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
self.assertRaisesRegexp(ConfigurationError, 'file', load_config)
def test_extends_validation_valid_config(self):
service = config.load(
config.ConfigDetails(
{
'web': {'image': 'busybox', 'extends': {'service': 'web', 'file': 'common.yml'}},
},
'tests/fixtures/extends',
'common.yml'
)
)
self.assertEquals(len(service), 1)
self.assertIsInstance(service[0], dict)
self.assertEquals(service[0]['command'], "/bin/true")
def test_extended_service_with_invalid_config(self):
expected_error_msg = "Service 'myweb' has neither an image nor a build path specified"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
load_from_filename('tests/fixtures/extends/service-with-invalid-schema.yml')
def test_extended_service_with_valid_config(self):
service = load_from_filename('tests/fixtures/extends/service-with-valid-composite-extends.yml')
self.assertEquals(service[0]['command'], "top")
def test_extends_file_defaults_to_self(self):
"""
Test not specifying a file in our extends options that the
config is valid and correctly extends from itself.
"""
service_dicts = load_from_filename('tests/fixtures/extends/no-file-specified.yml')
self.assertEqual(service_sort(service_dicts), service_sort([
{
'name': 'myweb',
'image': 'busybox',
'environment': {
"BAR": "1",
"BAZ": "3",
}
},
{
'name': 'web',
'image': 'busybox',
'environment': {
"BAZ": "3",
}
}
]))
def test_invalid_links_in_extended_service(self):
expected_error_msg = "services with 'links' cannot be extended"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
load_from_filename('tests/fixtures/extends/invalid-links.yml')
def test_invalid_volumes_from_in_extended_service(self):
expected_error_msg = "services with 'volumes_from' cannot be extended"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
load_from_filename('tests/fixtures/extends/invalid-volumes.yml')
def test_invalid_net_in_extended_service(self):
expected_error_msg = "services with 'net: container' cannot be extended"
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
load_from_filename('tests/fixtures/extends/invalid-net.yml')
@mock.patch.dict(os.environ)
def test_valid_interpolation_in_extended_service(self):
os.environ.update(
HOSTNAME_VALUE="penguin",
)
expected_interpolated_value = "host-penguin"
service_dicts = load_from_filename('tests/fixtures/extends/valid-interpolation.yml')
for service in service_dicts:
self.assertTrue(service['hostname'], expected_interpolated_value)
def test_volume_path(self):
dicts = load_from_filename('tests/fixtures/volume-path/docker-compose.yml')
paths = [
'%s:/foo' % os.path.abspath('tests/fixtures/volume-path/common/foo'),
'%s:/bar' % os.path.abspath('tests/fixtures/volume-path/bar'),
]
self.assertEqual(set(dicts[0]['volumes']), set(paths))
def test_parent_build_path_dne(self):
child = load_from_filename('tests/fixtures/extends/nonexistent-path-child.yml')
self.assertEqual(child, [
{
'name': 'dnechild',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "1",
"BAR": "2",
},
},
])
def test_load_throws_error_when_base_service_does_not_exist(self):
err_msg = r'''Cannot extend service 'foo' in .*: Service not found'''
with self.assertRaisesRegexp(ConfigurationError, err_msg):
load_from_filename('tests/fixtures/extends/nonexistent-service.yml')
def test_partial_service_config_in_extends_is_still_valid(self):
dicts = load_from_filename('tests/fixtures/extends/valid-common-config.yml')
self.assertEqual(dicts[0]['environment'], {'FOO': '1'})
class BuildPathTest(unittest.TestCase):
def setUp(self):
self.abs_context_path = os.path.join(os.getcwd(), 'tests/fixtures/build-ctx')
def test_nonexistent_path(self):
with self.assertRaises(ConfigurationError):
config.load(
config.ConfigDetails(
{
'foo': {'build': 'nonexistent.path'},
},
'working_dir',
'filename.yml'
)
)
def test_relative_path(self):
relative_build_path = '../build-ctx/'
service_dict = make_service_dict(
'relpath',
{'build': relative_build_path},
working_dir='tests/fixtures/build-path'
)
self.assertEquals(service_dict['build'], self.abs_context_path)
def test_absolute_path(self):
service_dict = make_service_dict(
'abspath',
{'build': self.abs_context_path},
working_dir='tests/fixtures/build-path'
)
self.assertEquals(service_dict['build'], self.abs_context_path)
def test_from_file(self):
service_dict = load_from_filename('tests/fixtures/build-path/docker-compose.yml')
self.assertEquals(service_dict, [{'name': 'foo', 'build': self.abs_context_path}])
class GetConfigPathTestCase(unittest.TestCase):
files = [
'docker-compose.yml',
'docker-compose.yaml',
'fig.yml',
'fig.yaml',
]
def test_get_config_path_default_file_in_basedir(self):
files = self.files
self.assertEqual('docker-compose.yml', get_config_filename_for_files(files[0:]))
self.assertEqual('docker-compose.yaml', get_config_filename_for_files(files[1:]))
self.assertEqual('fig.yml', get_config_filename_for_files(files[2:]))
self.assertEqual('fig.yaml', get_config_filename_for_files(files[3:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_filename_for_files([])
def test_get_config_path_default_file_in_parent_dir(self):
"""Test with files placed in the subdir"""
files = self.files
def get_config_in_subdir(files):
return get_config_filename_for_files(files, subdir=True)
self.assertEqual('docker-compose.yml', get_config_in_subdir(files[0:]))
self.assertEqual('docker-compose.yaml', get_config_in_subdir(files[1:]))
self.assertEqual('fig.yml', get_config_in_subdir(files[2:]))
self.assertEqual('fig.yaml', get_config_in_subdir(files[3:]))
with self.assertRaises(config.ComposeFileNotFound):
get_config_in_subdir([])
def get_config_filename_for_files(filenames, subdir=None):
def make_files(dirname, filenames):
for fname in filenames:
with open(os.path.join(dirname, fname), 'w') as f:
f.write('')
project_dir = tempfile.mkdtemp()
try:
make_files(project_dir, filenames)
if subdir:
base_dir = tempfile.mkdtemp(dir=project_dir)
else:
base_dir = project_dir
return os.path.basename(config.get_config_path(base_dir))
finally:
shutil.rmtree(project_dir)
|
reyrodrigues/EU-SMS
|
refs/heads/master
|
temba/public/tests.py
|
1
|
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from temba.orgs.models import Org
from smartmin.tests import SmartminTest, _CRUDLTest
from .models import *
from .views import VideoCRUDL
class PublicTest(SmartminTest):
def setUp(self):
self.superuser = User.objects.create_superuser(username="super", email="super@user.com", password="super")
self.user = self.create_user("tito")
def test_index(self):
home_url = reverse('public.public_index');
response = self.client.get(home_url, follow=True)
self.assertEquals(response.request['PATH_INFO'], '/')
# try to create a lead from the homepage
lead_create_url = reverse('public.lead_create')
post_data = dict()
response = self.client.post(lead_create_url, post_data, follow=True)
self.assertEquals(response.request['PATH_INFO'], '/')
self.assertTrue(response.context['errors']);
self.assertEquals(response.context['error_msg'], 'This field is required.')
post_data['email'] = 'wrong_email_format'
response = self.client.post(lead_create_url, post_data, follow=True)
self.assertEquals(response.request['PATH_INFO'], '/')
self.assertTrue(response.context['errors'])
self.assertEquals(response.context['error_msg'], 'Enter a valid email address.')
post_data['email'] = 'immortal@temba.com'
response = self.client.post(lead_create_url, post_data, follow=True)
self.assertEquals(response.request['PATH_INFO'], reverse('orgs.org_signup'))
def test_privacy(self):
response = self.client.get(reverse('public.public_privacy'))
self.assertContains(response, "Privacy")
def test_welcome(self):
welcome_url = reverse('public.public_welcome')
response = self.client.get(welcome_url, follow=True)
self.assertTrue('next' in response.request['QUERY_STRING'])
self.assertEquals(response.request['PATH_INFO'], reverse('users.user_login'))
self.login(self.user)
response = self.client.get(welcome_url, follow=True)
self.assertEquals(response.request['PATH_INFO'], reverse('public.public_welcome'))
def test_leads(self):
create_url = reverse('public.lead_create')
post_data = dict()
post_data['email'] = 'eugene@temba.com'
response = self.client.post(create_url, post_data, follow=True)
self.assertEquals(len(Lead.objects.all()), 1)
# create mailing list with the same email again, we actually allow dupes now
post_data['email'] = 'eugene@temba.com'
response = self.client.post(create_url, post_data, follow=True)
self.assertEquals(len(Lead.objects.all()), 2)
# invalid email
post_data['email'] = 'asdfasdf'
response = self.client.post(create_url, post_data, follow=True)
self.assertEquals(response.request['PATH_INFO'], '/')
self.assertEquals(len(Lead.objects.all()), 2)
def test_demo_coupon(self):
coupon_url = reverse('demo.generate_coupon')
response = self.client.get(coupon_url, follow=True)
self.assertEquals(response.request['PATH_INFO'], coupon_url)
self.assertIn('coupon', response.content)
def test_demo_status(self):
status_url = reverse('demo.order_status')
response = self.client.get(status_url, follow=True)
self.assertEquals(response.request['PATH_INFO'], status_url)
self.assertIn('Invalid', response.content)
response = self.client.get("%s?text=somethinginvalid" % status_url)
self.assertEquals(response.request['PATH_INFO'], status_url)
self.assertIn('Invalid', response.content)
response = self.client.get("%s?text=cu001" % status_url)
self.assertEquals(response.request['PATH_INFO'], status_url)
self.assertIn('Shipped', response.content)
response = self.client.get("%s?text=cu002" % status_url)
self.assertEquals(response.request['PATH_INFO'], status_url)
self.assertIn('Pending', response.content)
response = self.client.get("%s?text=cu003" % status_url)
self.assertEquals(response.request['PATH_INFO'], status_url)
self.assertIn('Cancelled', response.content)
def test_templatetags(self):
from .templatetags.public import gear_link_classes
link = dict()
link['posterize'] = True
self.assertTrue("posterize", gear_link_classes(link))
link['js_class'] = 'alright'
self.assertTrue("posterize alright", gear_link_classes(link))
link['style'] = "pull-right"
self.assertTrue("posterize alright pull-right", gear_link_classes(link, True))
link['modal'] = True
self.assertTrue("posterize alright pull-right gear-modal", gear_link_classes(link, True))
link['delete'] = True
self.assertTrue("posterize alright pull-right gear-modal gear-delete", gear_link_classes(link, True))
def test_sitemaps(self):
sitemap_url = reverse('public.sitemaps')
# number of fixed items (i.e. not videos, differs between configurations)
response = self.client.get(sitemap_url)
# but first item is always home page
self.assertEquals(response.context['urlset'][0], {'priority': '0.5',
'item': 'public.public_index',
'lastmod': None,
'changefreq': 'daily',
'location': u'http://example.com/'})
num_fixed_items = len(response.context['urlset'])
# adding a video will dynamically add a new item
Video.objects.create(name="Item14", summary="Unicorn", description="Video of unicorns", vimeo_id="1234",
order=0, created_by=self.superuser, modified_by=self.superuser)
response = self.client.get(sitemap_url)
self.assertEqual(len(response.context['urlset']), num_fixed_items + 1)
class VideoCRUDLTest(_CRUDLTest):
def setUp(self):
super(VideoCRUDLTest, self).setUp()
self.crudl = VideoCRUDL
self.user = User.objects.create_superuser('admin', 'a@b.com', 'admin')
def getCreatePostData(self):
return dict(name="Video One", description="My description", summary="My Summary", vimeo_id="1234", order=0)
def getUpdatePostData(self):
return dict(name="Video Updated", description="My description", summary="My Summary", vimeo_id="1234", order=0)
|
sti-lyneos/shop
|
refs/heads/master
|
softwarecenter/region.py
|
3
|
# Copyright (C) 2012 Canonical
#
# Authors:
# Michael Vogt
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import dbus
from dbus.mainloop.glib import DBusGMainLoop
DBusGMainLoop(set_as_default=True)
import locale
import logging
import os
from gettext import dgettext
from gettext import gettext as _
LOG = logging.getLogger(__name__)
# warning displayed if region does not match
REGION_WARNING_STRING = _("Sorry, this software is not available in your "
"region.")
# the prefix tag for regions that the software is most useful in
REGIONTAG = "iso3166::"
# blacklist this region
REGION_BLACKLIST_TAG = "blacklist-iso3166::"
# or whitelist it
REGION_WHITELIST_TAG = "whitelist-iso3166::"
def get_region_name(countrycode):
""" return translated region name from countrycode using iso3166 """
from lxml import etree
# find translated name
if countrycode:
for iso in ["iso_3166", "iso_3166_2"]:
path = os.path.join("/usr/share/xml/iso-codes/", iso + ".xml")
if os.path.exists(path):
root = etree.parse(path)
xpath = ".//%s_entry[@alpha_2_code='%s']" % (iso, countrycode)
match = root.find(xpath)
if match is not None:
name = match.attrib.get("common_name")
if not name:
name = match.attrib["name"]
return dgettext(iso, name)
return ""
# the first parameter of SetRequirements
class AccuracyLevel:
NONE = 0
COUNTRY = 1
REGION = 2
LOCALITY = 3
POSTALCODE = 4
STREET = 5
DETAILED = 6
class AllowedResources:
NONE = 0
NETWORK = 1 << 0
CELL = 1 << 1
GPS = 1 << 2
ALL = (1 << 10) - 1
class RegionDiscover(object):
def get_region(self):
""" return a dict with at least "county" and "countrycode" as
keys - they may be empty if no region is found
"""
res = {}
# try geoclue first
try:
res = self._get_region_geoclue()
if res:
return res
except Exception as e:
LOG.warn("failed to use geoclue: '%s'" % e)
# fallback
res = self._get_region_dumb()
return res
def _get_region_dumb(self):
""" return dict estimate about the current countrycode/country """
res = {'countrycode': '',
'country': '',
}
try:
# use LC_MONETARY as the best guess
loc = locale.getlocale(locale.LC_MONETARY)[0]
except Exception as e:
LOG.warn("Failed to get locale: '%s'" % e)
return res
if not loc:
return res
countrycode = loc.split("_")[1]
res["countrycode"] = countrycode
res["country"] = get_region_name(countrycode)
return res
def _get_region_geoclue(self):
""" return the dict with at least countrycode,country from a geoclue
provider
"""
bus = dbus.SessionBus()
master = bus.get_object(
'org.freedesktop.Geoclue.Master',
'/org/freedesktop/Geoclue/Master')
client = bus.get_object(
'org.freedesktop.Geoclue.Master', master.Create())
client.SetRequirements(AccuracyLevel.COUNTRY, # (i) accuracy_level
0, # (i) time
False, # (b) require_updates
AllowedResources.ALL) # (i) allowed_resources
# this is crucial
client.AddressStart()
# now get the data
time, address_res, accuracy = client.GetAddress()
return address_res
my_region = None
def get_region_cached():
global my_region
if my_region is None:
rd = RegionDiscover()
my_region = rd.get_region()
return my_region
|
habnabit/pip
|
refs/heads/develop
|
pip/utils/ui.py
|
316
|
from __future__ import absolute_import
from __future__ import division
import itertools
import sys
from signal import signal, SIGINT, default_int_handler
from pip.compat import WINDOWS
from pip.utils import format_size
from pip.utils.logging import get_indentation
from pip._vendor import six
from pip._vendor.progress.bar import Bar, IncrementalBar
from pip._vendor.progress.helpers import WritelnMixin
from pip._vendor.progress.spinner import Spinner
try:
from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
def _select_progress_class(preferred, fallback):
encoding = getattr(preferred.file, "encoding", None)
# If we don't know what encoding this file is in, then we'll just assume
# that it doesn't support unicode and use the ASCII bar.
if not encoding:
return fallback
# Collect all of the possible characters we want to use with the preferred
# bar.
characters = [
getattr(preferred, "empty_fill", six.text_type()),
getattr(preferred, "fill", six.text_type()),
]
characters += list(getattr(preferred, "phases", []))
# Try to decode the characters we're using for the bar using the encoding
# of the given file, if this works then we'll assume that we can use the
# fancier bar and if not we'll fall back to the plaintext bar.
try:
six.text_type().join(characters).encode(encoding)
except UnicodeEncodeError:
return fallback
else:
return preferred
_BaseBar = _select_progress_class(IncrementalBar, Bar)
class InterruptibleMixin(object):
"""
Helper to ensure that self.finish() gets called on keyboard interrupt.
This allows downloads to be interrupted without leaving temporary state
(like hidden cursors) behind.
This class is similar to the progress library's existing SigIntMixin
helper, but as of version 1.2, that helper has the following problems:
1. It calls sys.exit().
2. It discards the existing SIGINT handler completely.
3. It leaves its own handler in place even after an uninterrupted finish,
which will have unexpected delayed effects if the user triggers an
unrelated keyboard interrupt some time after a progress-displaying
download has already completed, for example.
"""
def __init__(self, *args, **kwargs):
"""
Save the original SIGINT handler for later.
"""
super(InterruptibleMixin, self).__init__(*args, **kwargs)
self.original_handler = signal(SIGINT, self.handle_sigint)
# If signal() returns None, the previous handler was not installed from
# Python, and we cannot restore it. This probably should not happen,
# but if it does, we must restore something sensible instead, at least.
# The least bad option should be Python's default SIGINT handler, which
# just raises KeyboardInterrupt.
if self.original_handler is None:
self.original_handler = default_int_handler
def finish(self):
"""
Restore the original SIGINT handler after finishing.
This should happen regardless of whether the progress display finishes
normally, or gets interrupted.
"""
super(InterruptibleMixin, self).finish()
signal(SIGINT, self.original_handler)
def handle_sigint(self, signum, frame):
"""
Call self.finish() before delegating to the original SIGINT handler.
This handler should only be in place while the progress display is
active.
"""
self.finish()
self.original_handler(signum, frame)
class DownloadProgressMixin(object):
def __init__(self, *args, **kwargs):
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
self.message = (" " * (get_indentation() + 2)) + self.message
@property
def downloaded(self):
return format_size(self.index)
@property
def download_speed(self):
# Avoid zero division errors...
if self.avg == 0.0:
return "..."
return format_size(1 / self.avg) + "/s"
@property
def pretty_eta(self):
if self.eta:
return "eta %s" % self.eta_td
return ""
def iter(self, it, n=1):
for x in it:
yield x
self.next(n)
self.finish()
class WindowsMixin(object):
def __init__(self, *args, **kwargs):
# The Windows terminal does not support the hide/show cursor ANSI codes
# even with colorama. So we'll ensure that hide_cursor is False on
# Windows.
# This call neds to go before the super() call, so that hide_cursor
# is set in time. The base progress bar class writes the "hide cursor"
# code to the terminal in its init, so if we don't set this soon
# enough, we get a "hide" with no corresponding "show"...
if WINDOWS and self.hide_cursor:
self.hide_cursor = False
super(WindowsMixin, self).__init__(*args, **kwargs)
# Check if we are running on Windows and we have the colorama module,
# if we do then wrap our file with it.
if WINDOWS and colorama:
self.file = colorama.AnsiToWin32(self.file)
# The progress code expects to be able to call self.file.isatty()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.isatty = lambda: self.file.wrapped.isatty()
# The progress code expects to be able to call self.file.flush()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.flush = lambda: self.file.wrapped.flush()
class DownloadProgressBar(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, _BaseBar):
file = sys.stdout
message = "%(percent)d%%"
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, WritelnMixin, Spinner):
file = sys.stdout
suffix = "%(downloaded)s %(download_speed)s"
def next_phase(self):
if not hasattr(self, "_phaser"):
self._phaser = itertools.cycle(self.phases)
return next(self._phaser)
def update(self):
message = self.message % self
phase = self.next_phase()
suffix = self.suffix % self
line = ''.join([
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
])
self.writeln(line)
|
mshafiq9/django
|
refs/heads/master
|
tests/admin_inlines/test_templates.py
|
285
|
from __future__ import unicode_literals
from django.template.loader import render_to_string
from django.test import SimpleTestCase
class TestTemplates(SimpleTestCase):
def test_javascript_escaping(self):
context = {
'inline_admin_formset': {
'formset': {'prefix': 'my-prefix'},
'opts': {'verbose_name': 'verbose name\\'},
},
}
output = render_to_string('admin/edit_inline/stacked.html', context)
self.assertIn('prefix: "my\\u002Dprefix",', output)
self.assertIn('addText: "Add another Verbose name\\u005C"', output)
output = render_to_string('admin/edit_inline/tabular.html', context)
self.assertIn('prefix: "my\\u002Dprefix",', output)
self.assertIn('addText: "Add another Verbose name\\u005C"', output)
|
vicky2135/lucious
|
refs/heads/master
|
lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/utf8prober.py
|
2918
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
|
wangmiao1981/PredictionIO
|
refs/heads/develop
|
examples/experimental/scala-parallel-recommendation-entitymap/data/send_query.py
|
48
|
"""
Send sample query to prediction engine
"""
import predictionio
engine_client = predictionio.EngineClient(url="http://localhost:8000")
print engine_client.send_query({"user": "uid1", "num": 4})
|
nju520/PyMySQL
|
refs/heads/master
|
pymysql/tests/__init__.py
|
8
|
from pymysql.tests.test_issues import *
from pymysql.tests.test_basic import *
from pymysql.tests.test_nextset import *
from pymysql.tests.test_DictCursor import *
from pymysql.tests.test_connection import *
from pymysql.tests.test_SSCursor import *
from pymysql.tests.test_load_local import *
from pymysql.tests.thirdparty import *
if __name__ == "__main__":
import unittest2
unittest2.main()
|
robovm/robovm-studio
|
refs/heads/master
|
python/testData/completion/noParensForDecorator.after.py
|
83
|
def my_decorator(f):
return f
@my_decorator<caret>
|
luoxufeiyan/python
|
refs/heads/master
|
ailurus1991/0011/main.py
|
40
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'jinyang'
def readFile(filePath):
file = open(filePath, 'r')
words = []
for word in file.readlines():
# print word.strip('\n')
words.append(word.strip('\n'))
return words
def check(testWord):
realWords = readFile('filtered_words.txt')
for i in realWords:
if i == testWord:
print 'Freedom'
return
else:
print 'Human Rights'
return
if __name__ == '__main__':
check('sss')
|
bamos/parsec-benchmark
|
refs/heads/master
|
pkgs/libs/glib/src/gobject/gobject.py
|
85
|
import gdb
import glib
import gdb.backtrace
import gdb.command.backtrace
# This is not quite right, as local vars may override symname
def read_global_var (symname):
return gdb.selected_frame().read_var(symname)
def g_type_to_name (gtype):
def lookup_fundamental_type (typenode):
if typenode == 0:
return None
val = read_global_var ("static_fundamental_type_nodes")
if val == None:
return None
return val[typenode >> 2].address()
gtype = long(gtype)
typenode = gtype - gtype % 4
if typenode > (255 << 2):
typenode = gdb.Value(typenode).cast (gdb.lookup_type("TypeNode").pointer())
else:
typenode = lookup_fundamental_type (typenode)
if typenode != None:
return glib.g_quark_to_string (typenode["qname"])
return None
def is_g_type_instance (val):
def is_g_type_instance_helper (type):
if str(type) == "GTypeInstance":
return True
while type.code == gdb.TYPE_CODE_TYPEDEF:
type = type.target()
if type.code != gdb.TYPE_CODE_STRUCT:
return False
fields = type.fields()
if len (fields) < 1:
return False
first_field = fields[0]
return is_g_type_instance_helper(first_field.type)
type = val.type
if type.code != gdb.TYPE_CODE_PTR:
return False
type = type.target()
return is_g_type_instance_helper (type)
def g_type_name_from_instance (instance):
if long(instance) != 0:
try:
inst = instance.cast (gdb.lookup_type("GTypeInstance").pointer())
klass = inst["g_class"]
gtype = klass["g_type"]
name = g_type_to_name (gtype)
return name
except RuntimeError:
pass
return None
class GTypePrettyPrinter:
"Prints a GType instance pointer"
def __init__ (self, val):
self.val = val
def to_string (self):
name = g_type_name_from_instance (self.val)
if name:
return ("0x%x [%s]")% (long(self.val), name)
return ("0x%x") % (long(self.val))
def pretty_printer_lookup (val):
if is_g_type_instance (val):
return GTypePrettyPrinter (val)
return None
def get_signal_name (id):
if id == None:
return None
id = long(id)
if id == 0:
return None
val = read_global_var ("g_signal_nodes")
max_s = read_global_var ("g_n_signal_nodes")
max_s = long(max_s)
if id < max_s:
return val[id]["name"].string()
return None
class GFrameWrapper:
def __init__ (self, frame):
self.frame = frame;
def name (self):
name = self.frame.name()
if name and name.startswith("IA__"):
return name[4:]
return name
def __getattr__ (self, name):
return getattr (self.frame, name)
# Monkey patch FrameWrapper to avoid IA__ in symbol names
old__init__ = gdb.command.backtrace.FrameWrapper.__init__
def monkey_patched_init(self, frame):
name = frame.name()
if name and name.startswith("IA__"):
frame = GFrameWrapper(frame)
old__init__(self,frame)
gdb.command.backtrace.FrameWrapper.__init__ = monkey_patched_init
class DummyFrame:
def __init__ (self, frame):
self.frame = frame
def name (self):
return "signal-emission-dummy"
def describe (self, stream, full):
stream.write (" <...>\n")
def __getattr__ (self, name):
return getattr (self.frame, name)
class SignalFrame:
def __init__ (self, frames):
self.frame = frames[-1]
self.frames = frames;
def name (self):
return "signal-emission"
def read_var (self, frame, name, array = None):
try:
v = frame.read_var (name)
if v == None or v.is_optimized_out:
return None
if array != None:
array.append (v)
return v
except ValueError:
return None
def read_object (self, frame, name, array = None):
try:
v = frame.read_var (name)
if v == None or v.is_optimized_out:
return None
v = v.cast (gdb.lookup_type("GObject").pointer())
# Ensure this is a somewhat correct object pointer
if v != None and g_type_name_from_instance (v):
if array != None:
array.append (v)
return v
return None
except ValueError:
return None
def append (self, array, obj):
if obj != None:
array.append (obj)
def or_join_array (self, array):
if len(array) == 0:
return "???"
v = {}
for i in range(len(array)):
v[str(array[i])] = 1
array = v.keys()
s = array[0]
for i in range(1, len(array)):
s = s + " or %s"%array[i]
return s
def describe (self, stream, full):
instances = []
signals = []
for frame in self.frames:
name = frame.name()
if name == "signal_emit_unlocked_R":
self.read_object (frame, "instance", instances)
node = self.read_var (frame, "node")
if node:
signal = node["name"].string()
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emitv":
instance_and_params = self.read_var (frame, "instance_and_params")
if instance_and_params:
instance = instance_and_params[0]["v_pointer"].cast (gdb.Type("GObject").pointer())
self.append (instances, instance)
id = self.read_var (frame, "signal_id")
signal = get_signal_name (id)
if signal:
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emit_valist" or name == "g_signal_emit":
self.read_object (frame, "instance", instances)
id = self.read_var (frame, "signal_id")
signal = get_signal_name (id)
if signal:
detail = self.read_var (frame, "detail")
detail = glib.g_quark_to_string (detail)
if detail != None:
signal = signal + ":" + detail
self.append (signals, signal)
if name == "g_signal_emit_by_name":
self.read_object (frame, "instance", instances)
self.read_var (frame, "detailed_signal", signals)
break
instance = self.or_join_array (instances)
signal = self.or_join_array (signals)
stream.write (" <emit signal %s on instance %s>\n" % (signal, instance))
def __getattr__ (self, name):
return getattr (self.frame, name)
class GFrameFilter:
def __init__ (self, iter):
self.queue = []
self.iter = iter
def __iter__ (self):
return self
def fill (self):
while len(self.queue) <= 6:
try:
f = self.iter.next ()
self.queue.append (f)
except StopIteration:
return
def find_signal_emission (self):
for i in range (min (len(self.queue), 3)):
if self.queue[i].name() == "signal_emit_unlocked_R":
return i
return -1
def next (self):
# Ensure we have enough frames for a full signal emission
self.fill()
# Are we at the end?
if len(self.queue) == 0:
raise StopIteration
emission = self.find_signal_emission ()
if emission > 0:
start = emission
while True:
if start == 0:
break
prev_name = self.queue[start-1].name()
if prev_name.find("_marshal_") or prev_name == "g_closure_invoke":
start = start - 1
else:
break
end = emission + 1
while end < len(self.queue):
if self.queue[end].name() in ["g_signal_emitv",
"g_signal_emit_valist",
"g_signal_emit",
"g_signal_emit_by_name"]:
end = end + 1
else:
break
signal_frames = self.queue[start:end]
new_frames = []
for i in range(len(signal_frames)-1):
new_frames.append(DummyFrame(signal_frames[i]))
new_frames.append(SignalFrame(signal_frames))
self.queue[start:end] = new_frames
return self.queue.pop(0)
def register (obj):
if obj == None:
obj = gdb
gdb.backtrace.push_frame_filter (GFrameFilter)
obj.pretty_printers.append(pretty_printer_lookup)
|
lexus42/40223242test
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/token.py
|
743
|
"""Token constants (from "token.h")."""
__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, 'cd' to the top directory of
# the python source tree after building the interpreter and run:
#
# ./python Lib/token.py
#--start constants--
ENDMARKER = 0
NAME = 1
NUMBER = 2
STRING = 3
NEWLINE = 4
INDENT = 5
DEDENT = 6
LPAR = 7
RPAR = 8
LSQB = 9
RSQB = 10
COLON = 11
COMMA = 12
SEMI = 13
PLUS = 14
MINUS = 15
STAR = 16
SLASH = 17
VBAR = 18
AMPER = 19
LESS = 20
GREATER = 21
EQUAL = 22
DOT = 23
PERCENT = 24
LBRACE = 25
RBRACE = 26
EQEQUAL = 27
NOTEQUAL = 28
LESSEQUAL = 29
GREATEREQUAL = 30
TILDE = 31
CIRCUMFLEX = 32
LEFTSHIFT = 33
RIGHTSHIFT = 34
DOUBLESTAR = 35
PLUSEQUAL = 36
MINEQUAL = 37
STAREQUAL = 38
SLASHEQUAL = 39
PERCENTEQUAL = 40
AMPEREQUAL = 41
VBAREQUAL = 42
CIRCUMFLEXEQUAL = 43
LEFTSHIFTEQUAL = 44
RIGHTSHIFTEQUAL = 45
DOUBLESTAREQUAL = 46
DOUBLESLASH = 47
DOUBLESLASHEQUAL = 48
AT = 49
RARROW = 50
ELLIPSIS = 51
OP = 52
ERRORTOKEN = 53
N_TOKENS = 54
NT_OFFSET = 256
#--end constants--
tok_name = {value: name
for name, value in globals().items()
if isinstance(value, int) and not name.startswith('_')}
__all__.extend(tok_name.values())
def ISTERMINAL(x):
return x < NT_OFFSET
def ISNONTERMINAL(x):
return x >= NT_OFFSET
def ISEOF(x):
return x == ENDMARKER
def _main():
import re
import sys
args = sys.argv[1:]
inFileName = args and args[0] or "Include/token.h"
outFileName = "Lib/token.py"
if len(args) > 1:
outFileName = args[1]
try:
fp = open(inFileName)
except IOError as err:
sys.stdout.write("I/O error: %s\n" % str(err))
sys.exit(1)
lines = fp.read().split("\n")
fp.close()
prog = re.compile(
"#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)",
re.IGNORECASE)
tokens = {}
for line in lines:
match = prog.match(line)
if match:
name, val = match.group(1, 2)
val = int(val)
tokens[val] = name # reverse so we can sort them...
keys = sorted(tokens.keys())
# load the output skeleton from the target:
try:
fp = open(outFileName)
except IOError as err:
sys.stderr.write("I/O error: %s\n" % str(err))
sys.exit(2)
format = fp.read().split("\n")
fp.close()
try:
start = format.index("#--start constants--") + 1
end = format.index("#--end constants--")
except ValueError:
sys.stderr.write("target does not contain format markers")
sys.exit(3)
lines = []
for val in keys:
lines.append("%s = %d" % (tokens[val], val))
format[start:end] = lines
try:
fp = open(outFileName, 'w')
except IOError as err:
sys.stderr.write("I/O error: %s\n" % str(err))
sys.exit(4)
fp.write("\n".join(format))
fp.close()
if __name__ == "__main__":
_main()
|
mconley99/themis_tritonsort
|
refs/heads/master
|
src/scripts/themis/cloud/check_cluster_status.py
|
2
|
#!/usr/bin/env python
import argparse, sys, redis, os
from instance_utils import get_cluster_status
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir)))
import utils
def check_cluster_status(cluster_ID, redis_client):
provider = redis_client.hget("cluster:%d" % cluster_ID, "provider")
zone = redis_client.hget("cluster:%d" % cluster_ID, "zone")
cluster_status = get_cluster_status(provider, cluster_ID, zone)
num_masters = len(cluster_status["online_masters"])
num_masters_booting = len(cluster_status["booting_masters"])
master = cluster_status["master"]
connectable = cluster_status["master_connectable"]
num_slaves = len(cluster_status["online_slaves"])
num_slaves_booting = len(cluster_status["booting_slaves"])
if num_masters == 1:
print "Master online"
else:
assert num_masters == 0, "Cannot have multiple masters online but "\
"found %d" % num_masters
if num_masters_booting == 1:
print "Master booting"
else:
assert num_masters_booting == 0,\
"Cannot have multiple masters booting but found %d"\
% num_masters_booting
print "Master offline"
if connectable:
print "Master connectable"
else:
print "Master not connectable"
print "%d slaves online" % num_slaves
print "%d slaves booting" % num_slaves_booting
if master != None:
print ""
print "Cluster status page: %s:4280" % master[1]
print "Master internal address: %s" % master[0]
return 0
def main():
parser = argparse.ArgumentParser(
description="Check status of a Themis cluster")
parser.add_argument("cluster_ID", help="ID of the cluster", type=int)
utils.add_redis_params(parser)
args = parser.parse_args()
redis_client = redis.StrictRedis(
host=args.redis_host, port=args.redis_port, db=args.redis_db)
return check_cluster_status(args.cluster_ID, redis_client)
if __name__ == "__main__":
sys.exit(main())
|
GeekTrainer/Flask
|
refs/heads/master
|
Work/Trivia - Module 5/env/Lib/site-packages/pip/_vendor/distlib/resources.py
|
191
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals
import bisect
import io
import logging
import os
import pkgutil
import shutil
import sys
import types
import zipimport
from . import DistlibException
from .util import cached_property, get_cache_base, path_to_cache_dir, Cache
logger = logging.getLogger(__name__)
cache = None # created when needed
class ResourceCache(Cache):
def __init__(self, base=None):
if base is None:
# Use native string to avoid issues on 2.x: see Python #20140.
base = os.path.join(get_cache_base(), str('resource-cache'))
super(ResourceCache, self).__init__(base)
def is_stale(self, resource, path):
"""
Is the cache stale for the given resource?
:param resource: The :class:`Resource` being cached.
:param path: The path of the resource in the cache.
:return: True if the cache is stale.
"""
# Cache invalidation is a hard problem :-)
return True
def get(self, resource):
"""
Get a resource into the cache,
:param resource: A :class:`Resource` instance.
:return: The pathname of the resource in the cache.
"""
prefix, path = resource.finder.get_cache_info(resource)
if prefix is None:
result = path
else:
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
dirname = os.path.dirname(result)
if not os.path.isdir(dirname):
os.makedirs(dirname)
if not os.path.exists(result):
stale = True
else:
stale = self.is_stale(resource, path)
if stale:
# write the bytes of the resource to the cache location
with open(result, 'wb') as f:
f.write(resource.bytes)
return result
class ResourceBase(object):
def __init__(self, finder, name):
self.finder = finder
self.name = name
class Resource(ResourceBase):
"""
A class representing an in-package resource, such as a data file. This is
not normally instantiated by user code, but rather by a
:class:`ResourceFinder` which manages the resource.
"""
is_container = False # Backwards compatibility
def as_stream(self):
"""
Get the resource as a stream.
This is not a property to make it obvious that it returns a new stream
each time.
"""
return self.finder.get_stream(self)
@cached_property
def file_path(self):
global cache
if cache is None:
cache = ResourceCache()
return cache.get(self)
@cached_property
def bytes(self):
return self.finder.get_bytes(self)
@cached_property
def size(self):
return self.finder.get_size(self)
class ResourceContainer(ResourceBase):
is_container = True # Backwards compatibility
@cached_property
def resources(self):
return self.finder.get_resources(self)
class ResourceFinder(object):
"""
Resource finder for file system resources.
"""
def __init__(self, module):
self.module = module
self.loader = getattr(module, '__loader__', None)
self.base = os.path.dirname(getattr(module, '__file__', ''))
def _adjust_path(self, path):
return os.path.realpath(path)
def _make_path(self, resource_name):
parts = resource_name.split('/')
parts.insert(0, self.base)
result = os.path.join(*parts)
return self._adjust_path(result)
def _find(self, path):
return os.path.exists(path)
def get_cache_info(self, resource):
return None, resource.path
def find(self, resource_name):
path = self._make_path(resource_name)
if not self._find(path):
result = None
else:
if self._is_directory(path):
result = ResourceContainer(self, resource_name)
else:
result = Resource(self, resource_name)
result.path = path
return result
def get_stream(self, resource):
return open(resource.path, 'rb')
def get_bytes(self, resource):
with open(resource.path, 'rb') as f:
return f.read()
def get_size(self, resource):
return os.path.getsize(resource.path)
def get_resources(self, resource):
def allowed(f):
return f != '__pycache__' and not f.endswith(('.pyc', '.pyo'))
return set([f for f in os.listdir(resource.path) if allowed(f)])
def is_container(self, resource):
return self._is_directory(resource.path)
_is_directory = staticmethod(os.path.isdir)
class ZipResourceFinder(ResourceFinder):
"""
Resource finder for resources in .zip files.
"""
def __init__(self, module):
super(ZipResourceFinder, self).__init__(module)
archive = self.loader.archive
self.prefix_len = 1 + len(archive)
# PyPy doesn't have a _files attr on zipimporter, and you can't set one
if hasattr(self.loader, '_files'):
self._files = self.loader._files
else:
self._files = zipimport._zip_directory_cache[archive]
self.index = sorted(self._files)
def _adjust_path(self, path):
return path
def _find(self, path):
path = path[self.prefix_len:]
if path in self._files:
result = True
else:
if path and path[-1] != os.sep:
path = path + os.sep
i = bisect.bisect(self.index, path)
try:
result = self.index[i].startswith(path)
except IndexError:
result = False
if not result:
logger.debug('_find failed: %r %r', path, self.loader.prefix)
else:
logger.debug('_find worked: %r %r', path, self.loader.prefix)
return result
def get_cache_info(self, resource):
prefix = self.loader.archive
path = resource.path[1 + len(prefix):]
return prefix, path
def get_bytes(self, resource):
return self.loader.get_data(resource.path)
def get_stream(self, resource):
return io.BytesIO(self.get_bytes(resource))
def get_size(self, resource):
path = resource.path[self.prefix_len:]
return self._files[path][3]
def get_resources(self, resource):
path = resource.path[self.prefix_len:]
if path and path[-1] != os.sep:
path += os.sep
plen = len(path)
result = set()
i = bisect.bisect(self.index, path)
while i < len(self.index):
if not self.index[i].startswith(path):
break
s = self.index[i][plen:]
result.add(s.split(os.sep, 1)[0]) # only immediate children
i += 1
return result
def _is_directory(self, path):
path = path[self.prefix_len:]
if path and path[-1] != os.sep:
path += os.sep
i = bisect.bisect(self.index, path)
try:
result = self.index[i].startswith(path)
except IndexError:
result = False
return result
_finder_registry = {
type(None): ResourceFinder,
zipimport.zipimporter: ZipResourceFinder
}
try:
import _frozen_importlib
_finder_registry[_frozen_importlib.SourceFileLoader] = ResourceFinder
_finder_registry[_frozen_importlib.FileFinder] = ResourceFinder
except (ImportError, AttributeError):
pass
def register_finder(loader, finder_maker):
_finder_registry[type(loader)] = finder_maker
_finder_cache = {}
def finder(package):
"""
Return a resource finder for a package.
:param package: The name of the package.
:return: A :class:`ResourceFinder` instance for the package.
"""
if package in _finder_cache:
result = _finder_cache[package]
else:
if package not in sys.modules:
__import__(package)
module = sys.modules[package]
path = getattr(module, '__path__', None)
if path is None:
raise DistlibException('You cannot get a finder for a module, '
'only for a package')
loader = getattr(module, '__loader__', None)
finder_maker = _finder_registry.get(type(loader))
if finder_maker is None:
raise DistlibException('Unable to locate finder for %r' % package)
result = finder_maker(module)
_finder_cache[package] = result
return result
_dummy_module = types.ModuleType(str('__dummy__'))
def finder_for_path(path):
"""
Return a resource finder for a path, which should represent a container.
:param path: The path.
:return: A :class:`ResourceFinder` instance for the path.
"""
result = None
# calls any path hooks, gets importer into cache
pkgutil.get_importer(path)
loader = sys.path_importer_cache.get(path)
finder = _finder_registry.get(type(loader))
if finder:
module = _dummy_module
module.__file__ = os.path.join(path, '')
module.__loader__ = loader
result = finder(module)
return result
|
sebrandon1/neutron
|
refs/heads/master
|
neutron/tests/unit/extensions/test_servicetype.py
|
2
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron_lib import exceptions as n_exc
from oslo_config import cfg
import webob.exc as webexc
import webtest
from neutron.api import extensions
from neutron import context
from neutron.db.models import servicetype as st_model
from neutron.db import servicetype_db as st_db
from neutron.extensions import servicetype
from neutron.plugins.common import constants
from neutron.services import provider_configuration as provconf
from neutron.tests.unit.api import test_extensions
from neutron.tests.unit.api.v2 import test_base
from neutron.tests.unit.db import test_db_base_plugin_v2
from neutron.tests.unit import dummy_plugin as dp
from neutron.tests.unit import testlib_api
_uuid = test_base._uuid
_get_path = test_base._get_path
class ServiceTypeManagerTestCase(testlib_api.SqlTestCase):
def setUp(self):
self.service_providers = mock.patch.object(
provconf.NeutronModule, 'service_providers').start()
super(ServiceTypeManagerTestCase, self).setUp()
self.ctx = context.get_admin_context()
def _set_override(self, service_providers):
self.service_providers.return_value = service_providers
st_db.ServiceTypeManager._instance = None
self.manager = st_db.ServiceTypeManager.get_instance()
for provider in service_providers:
self.manager.add_provider_configuration(
provider.split(':')[0], provconf.ProviderConfiguration())
def test_service_provider_driver_not_unique(self):
self._set_override([constants.LOADBALANCER + ':lbaas:driver'])
prov = {'service_type': constants.LOADBALANCER,
'name': 'name2',
'driver': 'driver',
'default': False}
self.assertRaises(
n_exc.Invalid,
self.manager.config['LOADBALANCER'].add_provider, prov)
def test_get_service_providers(self):
"""Test that get_service_providers filters correctly."""
self._set_override(
[constants.LOADBALANCER +
':lbaas:driver_path1',
constants.FIREWALL +
':fwaas:driver_path2'])
ctx = context.get_admin_context()
res = self.manager.get_service_providers(
ctx,
filters=dict(service_type=[constants.LOADBALANCER])
)
self.assertEqual(1, len(res))
res = self.manager.get_service_providers(
ctx,
filters=dict(service_type=[constants.FIREWALL])
)
self.assertEqual(1, len(res))
def test_multiple_default_providers_specified_for_service(self):
self.assertRaises(
n_exc.Invalid,
self._set_override,
[constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.LOADBALANCER +
':lbaas2:driver_path:default'])
def test_get_default_provider(self):
self._set_override([constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.DUMMY +
':lbaas2:driver_path2'])
# can pass None as a context
p = self.manager.get_default_service_provider(None,
constants.LOADBALANCER)
self.assertEqual({'service_type': constants.LOADBALANCER,
'name': 'lbaas1',
'driver': 'driver_path',
'default': True}, p)
self.assertRaises(
provconf.DefaultServiceProviderNotFound,
self.manager.get_default_service_provider,
None, constants.DUMMY
)
def test_get_provider_names_by_resource_ids(self):
self._set_override([constants.DUMMY + ':dummy1:driver_path',
constants.DUMMY + ':dummy2:driver_path2'])
ctx = context.get_admin_context()
self.manager.add_resource_association(ctx, constants.DUMMY,
'dummy1', '1')
self.manager.add_resource_association(ctx, constants.DUMMY,
'dummy1', '2')
self.manager.add_resource_association(ctx, constants.DUMMY,
'dummy2', '3')
names_by_id = self.manager.get_provider_names_by_resource_ids(
ctx, ['1', '2', '3', '4'])
# unmatched IDs will be excluded from the result
self.assertEqual({'1': 'dummy1', '2': 'dummy1', '3': 'dummy2'},
names_by_id)
def test_add_resource_association(self):
self._set_override([constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.DUMMY +
':lbaas2:driver_path2'])
ctx = context.get_admin_context()
self.manager.add_resource_association(ctx,
constants.LOADBALANCER,
'lbaas1', '123-123')
self.assertEqual(ctx.session.
query(st_model.ProviderResourceAssociation).count(),
1)
assoc = ctx.session.query(st_model.ProviderResourceAssociation).one()
ctx.session.delete(assoc)
def test_invalid_resource_association(self):
self._set_override([constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.DUMMY +
':lbaas2:driver_path2'])
ctx = context.get_admin_context()
self.assertRaises(provconf.ServiceProviderNotFound,
self.manager.add_resource_association,
ctx, 'BLABLA_svc', 'name', '123-123')
class TestServiceTypeExtensionManager(object):
"""Mock extensions manager."""
def get_resources(self):
return (servicetype.Servicetype.get_resources() +
dp.Dummy.get_resources())
def get_actions(self):
return []
def get_request_extensions(self):
return []
class ServiceTypeExtensionTestCaseBase(testlib_api.WebTestCase):
fmt = 'json'
def setUp(self):
# This is needed because otherwise a failure will occur due to
# nonexisting core_plugin
self.setup_coreplugin(test_db_base_plugin_v2.DB_PLUGIN_KLASS)
cfg.CONF.set_override('service_plugins',
["%s.%s" % (dp.__name__,
dp.DummyServicePlugin.__name__)])
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
ext_mgr = TestServiceTypeExtensionManager()
self.ext_mdw = test_extensions.setup_extensions_middleware(ext_mgr)
self.api = webtest.TestApp(self.ext_mdw)
self.resource_name = servicetype.RESOURCE_NAME.replace('-', '_')
super(ServiceTypeExtensionTestCaseBase, self).setUp()
class ServiceTypeExtensionTestCase(ServiceTypeExtensionTestCaseBase):
def setUp(self):
self._patcher = mock.patch(
"neutron.db.servicetype_db.ServiceTypeManager",
autospec=True)
self.mock_mgr = self._patcher.start()
self.mock_mgr.get_instance.return_value = self.mock_mgr.return_value
super(ServiceTypeExtensionTestCase, self).setUp()
def test_service_provider_list(self):
instance = self.mock_mgr.return_value
res = self.api.get(_get_path('service-providers', fmt=self.fmt))
instance.get_service_providers.assert_called_with(mock.ANY,
filters={},
fields=[])
self.assertEqual(webexc.HTTPOk.code, res.status_int)
class ServiceTypeManagerExtTestCase(ServiceTypeExtensionTestCaseBase):
"""Tests ServiceTypemanager as a public API."""
def setUp(self):
self.service_providers = mock.patch.object(
provconf.NeutronModule, 'service_providers').start()
service_providers = [
constants.LOADBALANCER + ':lbaas:driver_path',
constants.DUMMY + ':dummy:dummy_dr'
]
self.service_providers.return_value = service_providers
# Blank out service type manager instance
st_db.ServiceTypeManager._instance = None
self.manager = st_db.ServiceTypeManager.get_instance()
for provider in service_providers:
self.manager.add_provider_configuration(
provider.split(':')[0], provconf.ProviderConfiguration())
super(ServiceTypeManagerExtTestCase, self).setUp()
def _list_service_providers(self):
return self.api.get(_get_path('service-providers', fmt=self.fmt))
def test_list_service_providers(self):
res = self._list_service_providers()
self.assertEqual(webexc.HTTPOk.code, res.status_int)
data = self.deserialize(res)
self.assertIn('service_providers', data)
self.assertGreaterEqual(len(data['service_providers']), 2)
|
thevirtualfer/c172p-detailed
|
refs/heads/master
|
Models/Interior/Panel/Instruments/asi/asi.py
|
15
|
#!/usr/bin/env python
from svginstr import *
import sys
__author__ = "Melchior FRANZ < mfranz # aon : at >"
__url__ = "http://gitorious.org/svginstr/"
__version__ = "0.2"
__license__ = "GPL v2+"
__doc__ = """
"""
try:
a = Instrument("asi.svg", 512, 512, "test face; " + __version__)
a.disc(98, color = 'black')
a.disc(1)
# define mapping function: map scale value 30 - 160 to angle 0-320 degree.
# However, the values from 100-160 are compressed slightly, so the lambda function is just 120 values
a.angle = lambda x: x * 300.0 / 115.0 - 145.0
# inside line
l = 50
# compression
compress = 0.8
a.arc(44, 100 + 27 * compress, l+3, width = 8, color = "green")
a.arc(100 + 27 * compress, 100 + 58 * compress, l+3, width = 8, color = "yellow")
a.arc(33, 85, l, width = 3, color = "white")
for i in range(35, 100, 5):
a.tick(i, l, 58, 2)
for i in range(100, 152, int(5 * compress + 0.5)):
a.tick(i, l, 58, 2)
for i in range(40, 100, 10):
a.tick(i, l, 65, 2)
for i in range(100, 155, int(10 * compress + 0.5)):
a.tick(i, l, 65, 2)
a.tick(100 + 58 * compress, l, 60, color="red")
# mph conversion
mph = 0.8689
k = 30
for i in range(40, 100, int(10 * mph)):
a.tick(i, k, k + 6, 1)
for i in range(40 + int(65 * mph), 150, int(10 * mph * compress)):
a.tick(i, k, k + 6, 1)
# fc-list tells you the names of available fonts on Linux (fc ... font cache)
s = 13
a.at(0,-70).text("AIRSPEED", size = 10, font_family = "Lucida Sans", color = "white")
a.at(0,-55).text("KNOTS", size = 10, font_family = "Lucida Sans", color = "white")
a.at(60,-42).text(40, size = s, font_family = "Lucida Sans", color = "white")
a.at(75,20).text(60, size = s, font_family = "Lucida Sans", color = "white")
a.at(35,72).text(80, size = s, font_family = "Lucida Sans", color = "white")
a.at(-40,72).text(100, size = s, font_family = "Lucida Sans", color = "white")
a.at(-75,30).text(120, size = s, font_family = "Lucida Sans", color = "white")
a.at(-75,-20).text(140, size = s, font_family = "Lucida Sans", color = "white")
a.at(-45,-57).text(160, size = s, font_family = "Lucida Sans", color = "white")
# mph markings
s = 7
a.at(16,-18).text(40, size = s, font_family = "Lucida Sans", color = "white")
a.at(22,2).text(60, size = s, font_family = "Lucida Sans", color = "white")
a.at(16,20).text(80, size = s, font_family = "Lucida Sans", color = "white")
a.at(0,27).text(100, size = s, font_family = "Lucida Sans", color = "white")
a.at(-14,22).text(120, size = s, font_family = "Lucida Sans", color = "white")
a.at(-19,13).text(140, size = s, font_family = "Lucida Sans", color = "white")
a.at(-22,0).text(160, size = s, font_family = "Lucida Sans", color = "white")
a.at(-17,-14).text(180, size = s, font_family = "Lucida Sans", color = "white")
a.at(0,-20).text("MPH", size = s, font_family = "Lucida Sans", color = "white")
#a.at(75,20).text(60, size = s, font_family = "Lucida Sans", color = "white")
#a.at(35,72).text(80, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-40,72).text(100, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-75,30).text(120, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-75,-20).text(140, size = s, font_family = "Lucida Sans", color = "white")
#a.at(-45,-55).text(160, size = s, font_family = "Lucida Sans", color = "white")
except Error as e:
print >>sys.stderr, "\033[31;1m%s\033[m\n" % e
|
sdrogers/ms2ldaviz
|
refs/heads/master
|
ms2ldaviz/basicviz/migrations/0025_vizoptions_colour_topic_by_score.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('basicviz', '0024_extrausers'),
]
operations = [
migrations.AddField(
model_name='vizoptions',
name='colour_topic_by_score',
field=models.BooleanField(default=False),
preserve_default=False,
),
]
|
GoogleCloudPlatform/PerfKitBenchmarker
|
refs/heads/master
|
tests/providers/openstack/os_virtual_machine_test.py
|
1
|
"""Tests for perfkitbenchmarker.providers.openstack.os_virtual_machine_test."""
import unittest
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import configs
from perfkitbenchmarker import errors
from perfkitbenchmarker import linux_benchmarks
from perfkitbenchmarker.configs import benchmark_config_spec
from perfkitbenchmarker.providers.openstack import os_virtual_machine
from tests import pkb_common_test_case
FLAGS = flags.FLAGS
_BENCHMARK_NAME = 'iperf'
_URI = 'uri45678'
_CFG_DEFAULT_DEFAULT = """
iperf:
vm_groups:
vm_1:
cloud: OpenStack
vm_spec:
OpenStack:
image: test-image
machine_type: test_machine_type
disable_interrupt_moderation: False
"""
_network_true = {'router:external': True}
_network_external = {'router:external': 'External'}
_network_fail = {'router:external': 'Fail'}
class TestOpenStackVirtualMachine(pkb_common_test_case.TestOsMixin,
os_virtual_machine.OpenStackVirtualMachine):
pass
class BaseOpenStackNetworkTest(pkb_common_test_case.PkbCommonTestCase):
def _CreateBenchmarkSpecFromYaml(self, yaml_string,
benchmark_name=_BENCHMARK_NAME):
config = configs.LoadConfig(yaml_string, {}, benchmark_name)
spec = self._CreateBenchmarkSpecFromConfigDict(config, benchmark_name)
spec.disable_interrupt_moderation = False
spec.disable_rss = False
spec.zone = 'test-zone'
spec.cidr = '192.164.1.0/24'
spec.machine_type = 'Test_machine_type'
spec.gpu_count = '1'
spec.gpu_type = 'test-gpu-type'
spec.image = 'test-image'
spec.install_packages = 'None'
spec.background_cpu_threads = 'None'
spec.background_network_mbits_per_sec = '1'
spec.background_network_ip_type = 'None'
spec.vm_metadata = {}
return spec
def _CreateBenchmarkSpecFromConfigDict(self, config_dict, benchmark_name):
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
benchmark_name,
flag_values=FLAGS,
**config_dict)
benchmark_module = next((b for b in linux_benchmarks.BENCHMARKS
if b.BENCHMARK_NAME == benchmark_name))
return benchmark_spec.BenchmarkSpec(benchmark_module, config_spec, _URI)
def _CreateTestOpenStackVm(self):
spec = self._CreateBenchmarkSpecFromYaml(_CFG_DEFAULT_DEFAULT)
return TestOpenStackVirtualMachine(spec)
class OpenStackVirtualMachineTest(BaseOpenStackNetworkTest):
def setUp(self):
super(OpenStackVirtualMachineTest, self).setUp()
self.mock_check_network_exists = self.enter_context(mock.patch.object(
os_virtual_machine.OpenStackVirtualMachine,
'_CheckNetworkExists'))
FLAGS.ignore_package_requirements = True
self.openstack_vm = self._CreateTestOpenStackVm()
def test_CheckFloatingIPNetworkExistsWithTrue(self):
self.mock_check_network_exists.return_value = _network_true
network = self.openstack_vm._CheckFloatingIPNetworkExists('External')
self.assertEqual(_network_true, network)
def test_CheckFloatingIPNetworkExistsWithExternal(self):
self.mock_check_network_exists.return_value = _network_external
network = self.openstack_vm._CheckFloatingIPNetworkExists('External')
self.assertEqual(_network_external, network)
def test_CheckFloatingIPNetworkExistsWithFail(self):
self.mock_check_network_exists.return_value = _network_fail
with self.assertRaises(errors.Config.InvalidValue):
self.openstack_vm._CheckFloatingIPNetworkExists('External')
if __name__ == '__main__':
unittest.main()
|
openstack/glance_store
|
refs/heads/master
|
doc/source/conf.py
|
1
|
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['openstackdocstheme',
'sphinxcontrib.apidoc']
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/glance_store'
openstackdocs_auto_name = False
openstackdocs_bug_project = 'glance-store'
openstackdocs_bug_tag = ''
# sphinxcontrib.apidoc options
apidoc_module_dir = '../../glance_store'
apidoc_output_dir = 'reference/api'
apidoc_excluded_paths = [
'test',
'tests/*']
apidoc_separate_modules = True
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'glance_store'
copyright = u'2014, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
html_theme = 'openstackdocs'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
modindex_common_prefix = ['glance_store.']
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
'%s Documentation' % project,
'OpenStack Foundation', 'manual'),
]
# The autodoc module imports every module to check for import
# errors. Since the fs_mount module is self initializing, it
# requires configurations that aren't loaded till that time.
# It would never happen in a real scenario as it is only imported
# from cinder store after the config are loaded but to handle doc
# failures, we mock it here.
autodoc_mock_imports = ['glance_store.common.fs_mount']
|
DICL/cassandra
|
refs/heads/trunk
|
pylib/cqlshlib/test/test_cqlsh_parsing.py
|
86
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# to configure behavior, define $CQL_TEST_HOST to the destination address
# for Thrift connections, and $CQL_TEST_PORT to the associated port.
from unittest import TestCase
class TestCqlshParsing(TestCase):
def test_describe(self):
pass
|
andela-ifageyinbo/django
|
refs/heads/master
|
django/utils/deprecation.py
|
34
|
from __future__ import absolute_import
import inspect
import warnings
class RemovedInDjango20Warning(PendingDeprecationWarning):
pass
RemovedInNextVersionWarning = DeprecationWarning
class warn_about_renamed_method(object):
def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapped(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead." %
(self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning, 2)
return f(*args, **kwargs)
return wrapped
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super(RenameMethodsBase, cls).__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`." %
(class_name, old_method_name, new_method_name),
deprecation_warning, 2)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
|
VHAINNOVATIONS/DmD
|
refs/heads/master
|
scrubber/MIST_2_0_4/src/CherryPy-3.1.2/cherrypy/process/servers.py
|
13
|
"""Adapt an HTTP server."""
import time
class ServerAdapter(object):
"""Adapter for an HTTP server.
If you need to start more than one HTTP server (to serve on multiple
ports, or protocols, etc.), you can manually register each one and then
start them all with bus.start:
s1 = ServerAdapter(bus, MyWSGIServer(host='0.0.0.0', port=80))
s2 = ServerAdapter(bus, another.HTTPServer(host='127.0.0.1', SSL=True))
s1.subscribe()
s2.subscribe()
bus.start()
"""
def __init__(self, bus, httpserver=None, bind_addr=None):
self.bus = bus
self.httpserver = httpserver
self.bind_addr = bind_addr
self.interrupt = None
self.running = False
def subscribe(self):
self.bus.subscribe('start', self.start)
self.bus.subscribe('stop', self.stop)
def unsubscribe(self):
self.bus.unsubscribe('start', self.start)
self.bus.unsubscribe('stop', self.stop)
def start(self):
"""Start the HTTP server."""
if isinstance(self.bind_addr, tuple):
host, port = self.bind_addr
on_what = "%s:%s" % (host, port)
else:
on_what = "socket file: %s" % self.bind_addr
if self.running:
self.bus.log("Already serving on %s" % on_what)
return
self.interrupt = None
if not self.httpserver:
raise ValueError("No HTTP server has been created.")
# Start the httpserver in a new thread.
if isinstance(self.bind_addr, tuple):
wait_for_free_port(*self.bind_addr)
import threading
t = threading.Thread(target=self._start_http_thread)
t.setName("HTTPServer " + t.getName())
t.start()
self.wait()
self.running = True
self.bus.log("Serving on %s" % on_what)
start.priority = 75
def _start_http_thread(self):
"""HTTP servers MUST be running in new threads, so that the
main thread persists to receive KeyboardInterrupt's. If an
exception is raised in the httpserver's thread then it's
trapped here, and the bus (and therefore our httpserver)
are shut down.
"""
try:
self.httpserver.start()
except KeyboardInterrupt, exc:
self.bus.log("<Ctrl-C> hit: shutting down HTTP server")
self.interrupt = exc
self.bus.exit()
except SystemExit, exc:
self.bus.log("SystemExit raised: shutting down HTTP server")
self.interrupt = exc
self.bus.exit()
raise
except:
import sys
self.interrupt = sys.exc_info()[1]
self.bus.log("Error in HTTP server: shutting down",
traceback=True, level=40)
self.bus.exit()
raise
def wait(self):
"""Wait until the HTTP server is ready to receive requests."""
while not getattr(self.httpserver, "ready", False):
if self.interrupt:
raise self.interrupt
time.sleep(.1)
# Wait for port to be occupied
if isinstance(self.bind_addr, tuple):
host, port = self.bind_addr
wait_for_occupied_port(host, port)
def stop(self):
"""Stop the HTTP server."""
if self.running:
# stop() MUST block until the server is *truly* stopped.
self.httpserver.stop()
# Wait for the socket to be truly freed.
if isinstance(self.bind_addr, tuple):
wait_for_free_port(*self.bind_addr)
self.running = False
self.bus.log("HTTP Server %s shut down" % self.httpserver)
else:
self.bus.log("HTTP Server %s already shut down" % self.httpserver)
stop.priority = 25
def restart(self):
"""Restart the HTTP server."""
self.stop()
self.start()
class FlupFCGIServer(object):
"""Adapter for a flup.server.fcgi.WSGIServer."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.ready = False
def start(self):
"""Start the FCGI server."""
# We have to instantiate the server class here because its __init__
# starts a threadpool. If we do it too early, daemonize won't work.
from flup.server.fcgi import WSGIServer
self.fcgiserver = WSGIServer(*self.args, **self.kwargs)
# TODO: report this bug upstream to flup.
# If we don't set _oldSIGs on Windows, we get:
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 108, in run
# self._restoreSignalHandlers()
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 156, in _restoreSignalHandlers
# for signum,handler in self._oldSIGs:
# AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
self.fcgiserver._installSignalHandlers = lambda: None
self.fcgiserver._oldSIGs = []
self.ready = True
self.fcgiserver.run()
def stop(self):
"""Stop the HTTP server."""
# Forcibly stop the fcgi server main event loop.
self.fcgiserver._keepGoing = False
# Force all worker threads to die off.
self.fcgiserver._threadPool.maxSpare = self.fcgiserver._threadPool._idleCount
self.ready = False
class FlupSCGIServer(object):
"""Adapter for a flup.server.scgi.WSGIServer."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.ready = False
def start(self):
"""Start the SCGI server."""
# We have to instantiate the server class here because its __init__
# starts a threadpool. If we do it too early, daemonize won't work.
from flup.server.scgi import WSGIServer
self.scgiserver = WSGIServer(*self.args, **self.kwargs)
# TODO: report this bug upstream to flup.
# If we don't set _oldSIGs on Windows, we get:
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 108, in run
# self._restoreSignalHandlers()
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 156, in _restoreSignalHandlers
# for signum,handler in self._oldSIGs:
# AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
self.scgiserver._installSignalHandlers = lambda: None
self.scgiserver._oldSIGs = []
self.ready = True
self.scgiserver.run()
def stop(self):
"""Stop the HTTP server."""
self.ready = False
# Forcibly stop the scgi server main event loop.
self.scgiserver._keepGoing = False
# Force all worker threads to die off.
self.scgiserver._threadPool.maxSpare = 0
def client_host(server_host):
"""Return the host on which a client can connect to the given listener."""
if server_host == '0.0.0.0':
# 0.0.0.0 is INADDR_ANY, which should answer on localhost.
return '127.0.0.1'
if server_host == '::':
# :: is IN6ADDR_ANY, which should answer on localhost.
return '::1'
return server_host
def check_port(host, port, timeout=1.0):
"""Raise an error if the given port is not free on the given host."""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
host = client_host(host)
port = int(port)
import socket
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6 addresses)
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
s = None
try:
s = socket.socket(af, socktype, proto)
# See http://groups.google.com/group/cherrypy-users/
# browse_frm/thread/bbfe5eb39c904fe0
s.settimeout(timeout)
s.connect((host, port))
s.close()
raise IOError("Port %s is in use on %s; perhaps the previous "
"httpserver did not shut down properly." %
(repr(port), repr(host)))
except socket.error:
if s:
s.close()
def wait_for_free_port(host, port):
"""Wait for the specified port to become free (drop requests)."""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
for trial in xrange(50):
try:
# we are expecting a free port, so reduce the timeout
check_port(host, port, timeout=0.1)
except IOError:
# Give the old server thread time to free the port.
time.sleep(0.1)
else:
return
raise IOError("Port %r not free on %r" % (port, host))
def wait_for_occupied_port(host, port):
"""Wait for the specified port to become active (receive requests)."""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
for trial in xrange(50):
try:
check_port(host, port)
except IOError:
return
else:
time.sleep(.1)
raise IOError("Port %r not bound on %r" % (port, host))
|
totto82/opm-common
|
refs/heads/master
|
python/tests/test_emodel.py
|
5
|
import unittest
import sys
import numpy as np
from opm.util import EModel
try:
from tests.utils import test_path
except ImportError:
from utils import test_path
class TestEModel(unittest.TestCase):
def test_open_model(self):
refArrList = ["PORV", "CELLVOL", "DEPTH", "DX", "DY", "DZ", "PORO", "PERMX", "PERMY", "PERMZ", "NTG", "TRANX",
"TRANY", "TRANZ", "ACTNUM", "ENDNUM", "EQLNUM", "FIPNUM", "FLUXNUM", "IMBNUM", "PVTNUM",
"SATNUM", "SWL", "SWCR", "SGL", "SGU", "ISWL", "ISWCR", "ISGL", "ISGU", "PPCW", "PRESSURE",
"RS", "RV", "SGAS", "SWAT", "SOMAX", "SGMAX"]
self.assertRaises(RuntimeError, EModel, "/file/that/does_not_exists")
self.assertRaises(ValueError, EModel, test_path("data/9_EDITNNC.EGRID"))
self.assertRaises(ValueError, EModel, test_path("data/9_EDITNNC.UNRST"))
mod1 = EModel(test_path("data/9_EDITNNC.INIT"))
arrayList = mod1.get_list_of_arrays()
for n, element in enumerate(arrayList):
self.assertEqual(element[0], refArrList[n])
celvol1 = mod1.get("CELLVOL")
self.assertEqual(len(celvol1), 2794)
def test_add_filter(self):
mod1 = EModel(test_path("data/9_EDITNNC.INIT"))
celvol1 = mod1.get("CELLVOL")
depth1 = mod1.get("DEPTH")
self.assertTrue(isinstance(celvol1, np.ndarray))
self.assertEqual(celvol1.dtype, "float32")
refVol1 = 2.79083e8
self.assertTrue( abs((sum(celvol1) - refVol1)/refVol1) < 1.0e-5)
mod1.add_filter("EQLNUM","eq", 1);
mod1.add_filter("DEPTH","lt", 2645.21);
refVol2 = 1.08876e8
refPorvVol2 = 2.29061e7
porv2 = mod1.get("PORV")
celvol2 = mod1.get("CELLVOL")
self.assertTrue( abs((sum(celvol2) - refVol2)/refVol2) < 1.0e-5)
self.assertTrue( abs((sum(porv2) - refPorvVol2)/refPorvVol2) < 1.0e-5)
mod1.reset_filter()
mod1.add_filter("EQLNUM","eq", 2);
mod1.add_filter("DEPTH","in", 2584.20, 2685.21);
refPorvVol3 = 3.34803e7
porv3 = mod1.get("PORV")
self.assertTrue( abs((sum(porv3) - refPorvVol3)/refPorvVol3) < 1.0e-5)
mod1.reset_filter()
mod1.add_filter("I","lt", 10);
mod1.add_filter("J","between", 3, 15);
mod1.add_filter("K","between", 2, 9);
poro = mod1.get("PORO")
self.assertEqual(len(poro), 495)
def test_paramers(self):
mod1 = EModel(test_path("data/9_EDITNNC.INIT"))
self.assertFalse("XXX" in mod1)
self.assertTrue("PORV" in mod1)
self.assertTrue("PRESSURE" in mod1)
self.assertTrue("RS" in mod1)
self.assertTrue("RV" in mod1)
self.assertEqual(mod1.active_report_step(), 0)
rsteps = mod1.get_report_steps()
self.assertEqual(rsteps, [0, 4, 7, 10, 15, 20, 27, 32, 36, 39])
mod1.set_report_step(7)
# parameter RS and RV is missing in report step number 7
self.assertFalse("RS" in mod1)
self.assertFalse("RV" in mod1)
mod1.set_report_step(15)
self.assertTrue("RS" in mod1)
self.assertTrue("RV" in mod1)
arrayList = mod1.get_list_of_arrays()
def test_rsteps_steps(self):
pres_ref_4_1_10 = [272.608, 244.461, 228.503, 214.118, 201.147, 194.563, 178.02, 181.839, 163.465, 148.677]
mod1 = EModel(test_path("data/9_EDITNNC.INIT"))
mod1.add_filter("I","eq", 4);
mod1.add_filter("J","eq", 1);
mod1.add_filter("K","eq", 10);
self.assertTrue(mod1.has_report_step(4))
self.assertFalse(mod1.has_report_step(2))
rsteps = mod1.get_report_steps()
for n, step in enumerate(rsteps):
mod1.set_report_step(step)
pres = mod1.get("PRESSURE")
self.assertTrue(abs(pres[0] - pres_ref_4_1_10[n])/pres_ref_4_1_10[n] < 1.0e-5)
def test_grid_props(self):
mod1 = EModel(test_path("data/9_EDITNNC.INIT"))
nI,nJ,nK = mod1.grid_dims()
self.assertEqual((nI,nJ,nK), (13, 22, 11))
nAct = mod1.active_cells()
self.assertEqual(nAct, 2794)
def test_hc_filter(self):
nAct_hc_eqln1 = 1090
nAct_hc_eqln2 = 1694
mod1 = EModel(test_path("data/9_EDITNNC.INIT"))
porv = mod1.get("PORV")
mod1.set_depth_fwl([2645.21, 2685.21])
mod1.add_hc_filter()
porv = mod1.get("PORV")
self.assertEqual(len(porv), nAct_hc_eqln1 + nAct_hc_eqln2)
mod1.reset_filter()
mod1.add_filter("EQLNUM","eq", 1);
mod1.add_filter("DEPTH","lt", 2645.21);
porv1 = mod1.get("PORV")
self.assertEqual(len(porv1), nAct_hc_eqln1)
mod1.reset_filter()
mod1.add_filter("EQLNUM","eq", 2);
mod1.add_filter("DEPTH","lt", 2685.21);
porv2 = mod1.get("PORV")
self.assertEqual(len(porv2), nAct_hc_eqln2)
ivect = mod1.get("I")
if __name__ == "__main__":
unittest.main()
|
angelapper/edx-platform
|
refs/heads/master
|
lms/djangoapps/courseware/features/registration.py
|
5
|
# pylint: disable=missing-docstring
import time
from lettuce import step, world
from lettuce.django import django_url
@step('I register for the course "([^"]*)"$')
def i_register_for_the_course(_step, course):
url = django_url('courses/%s/about' % world.scenario_dict['COURSE'].id.to_deprecated_string())
world.browser.visit(url)
world.css_click('.intro a.register')
assert world.is_css_present('.container.dashboard')
@step('I register to audit the course$')
def i_register_to_audit_the_course(_step):
url = django_url('courses/%s/about' % world.scenario_dict['COURSE'].id.to_deprecated_string())
world.browser.visit(url)
world.css_click('.intro a.register')
# When the page first loads some animation needs to
# complete before this button is in a stable location
world.retry_on_exception(
lambda: world.browser.find_by_name("honor_mode").click(),
max_attempts=10,
ignored_exceptions=AttributeError
)
time.sleep(1)
assert world.is_css_present('.container.dashboard')
@step(u'I should see an empty dashboard message')
def i_should_see_empty_dashboard(_step):
empty_dash_css = '.empty-dashboard-message'
assert world.is_css_present(empty_dash_css)
@step(u'I should( NOT)? see the course numbered "([^"]*)" in my dashboard$')
def i_should_see_that_course_in_my_dashboard(_step, doesnt_appear, course):
course_link_css = '.my-courses a[href*="%s"]' % course
if doesnt_appear:
assert world.is_css_not_present(course_link_css)
else:
assert world.is_css_present(course_link_css)
@step(u'I unenroll from the course numbered "([^"]*)"')
def i_unenroll_from_that_course(_step, course):
more_actions_dropdown_link_selector = '[id*=actions-dropdown-link-0]'
assert world.is_css_present(more_actions_dropdown_link_selector)
world.css_click(more_actions_dropdown_link_selector)
unregister_css = 'li.actions-item a.action-unenroll[data-course-number*="{course_number}"][href*=unenroll-modal]'.format(course_number=course)
assert world.is_css_present(unregister_css)
world.css_click(unregister_css)
button_css = '#unenroll-modal input[value="Unenroll"]'
assert world.is_css_present(button_css)
world.css_click(button_css)
|
resmo/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/fortimanager/fmgr_script.py
|
39
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_script
version_added: "2.5"
notes:
- Full Documentation at U(https://ftnt-ansible-docs.readthedocs.io/en/latest/).
author: Andrew Welsh (@Ghilli3)
short_description: Add/Edit/Delete and execute scripts
description: Create/edit/delete scripts and execute the scripts on the FortiManager using jsonrpc API
options:
adom:
description:
- The administrative domain (admon) the configuration belongs to
required: true
vdom:
description:
- The virtual domain (vdom) the configuration belongs to
mode:
description:
- The desired mode of the specified object. Execute will run the script.
required: false
default: "add"
choices: ["add", "delete", "execute", "set"]
version_added: "2.8"
script_name:
description:
- The name of the script.
required: True
script_type:
description:
- The type of script (CLI or TCL).
required: false
script_target:
description:
- The target of the script to be run.
required: false
script_description:
description:
- The description of the script.
required: false
script_content:
description:
- The script content that will be executed.
required: false
script_scope:
description:
- (datasource) The devices that the script will run on, can have both device member and device group member.
required: false
script_package:
description:
- (datasource) Policy package object to run the script against
required: false
'''
EXAMPLES = '''
- name: CREATE SCRIPT
fmgr_script:
adom: "root"
script_name: "TestScript"
script_type: "cli"
script_target: "remote_device"
script_description: "Create by Ansible"
script_content: "get system status"
- name: EXECUTE SCRIPT
fmgr_script:
adom: "root"
script_name: "TestScript"
mode: "execute"
script_scope: "FGT1,FGT2"
- name: DELETE SCRIPT
fmgr_script:
adom: "root"
script_name: "TestScript"
mode: "delete"
'''
RETURN = """
api_result:
description: full API response, includes status code and message
returned: always
type: str
"""
from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortimanager.fortimanager import FortiManagerHandler
from ansible.module_utils.network.fortimanager.common import FMGBaseException
from ansible.module_utils.network.fortimanager.common import FMGRCommon
from ansible.module_utils.network.fortimanager.common import FMGRMethods
from ansible.module_utils.network.fortimanager.common import DEFAULT_RESULT_OBJ
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def set_script(fmgr, paramgram):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
datagram = {
'content': paramgram["script_content"],
'desc': paramgram["script_description"],
'name': paramgram["script_name"],
'target': paramgram["script_target"],
'type': paramgram["script_type"],
}
url = '/dvmdb/adom/{adom}/script/'.format(adom=paramgram["adom"])
response = fmgr.process_request(url, datagram, FMGRMethods.SET)
return response
def delete_script(fmgr, paramgram):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
datagram = {
'name': paramgram["script_name"],
}
url = '/dvmdb/adom/{adom}/script/{script_name}'.format(adom=paramgram["adom"], script_name=paramgram["script_name"])
response = fmgr.process_request(url, datagram, FMGRMethods.DELETE)
return response
def execute_script(fmgr, paramgram):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
scope_list = list()
scope = paramgram["script_scope"].replace(' ', '')
scope = scope.split(',')
for dev_name in scope:
scope_list.append({'name': dev_name, 'vdom': paramgram["vdom"]})
datagram = {
'adom': paramgram["adom"],
'script': paramgram["script_name"],
'package': paramgram["script_package"],
'scope': scope_list,
}
url = '/dvmdb/adom/{adom}/script/execute'.format(adom=paramgram["adom"])
response = fmgr.process_request(url, datagram, FMGRMethods.EXEC)
return response
def main():
argument_spec = dict(
adom=dict(required=False, type="str", default="root"),
vdom=dict(required=False, type="str", default="root"),
mode=dict(choices=["add", "execute", "set", "delete"], type="str", default="add"),
script_name=dict(required=True, type="str"),
script_type=dict(required=False, type="str"),
script_target=dict(required=False, type="str"),
script_description=dict(required=False, type="str"),
script_content=dict(required=False, type="str"),
script_scope=dict(required=False, type="str"),
script_package=dict(required=False, type="str"),
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, )
paramgram = {
"script_name": module.params["script_name"],
"script_type": module.params["script_type"],
"script_target": module.params["script_target"],
"script_description": module.params["script_description"],
"script_content": module.params["script_content"],
"script_scope": module.params["script_scope"],
"script_package": module.params["script_package"],
"adom": module.params["adom"],
"vdom": module.params["vdom"],
"mode": module.params["mode"],
}
module.paramgram = paramgram
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = FortiManagerHandler(connection, module)
fmgr.tools = FMGRCommon()
else:
module.fail_json(**FAIL_SOCKET_MSG)
results = DEFAULT_RESULT_OBJ
try:
if paramgram["mode"] in ['add', 'set']:
results = set_script(fmgr, paramgram)
fmgr.govern_response(module=module, results=results, msg="Operation Finished",
ansible_facts=fmgr.construct_ansible_facts(results, module.params, module.params))
except Exception as err:
raise FMGBaseException(err)
try:
if paramgram["mode"] == "execute":
results = execute_script(fmgr, paramgram)
fmgr.govern_response(module=module, results=results, msg="Operation Finished",
ansible_facts=fmgr.construct_ansible_facts(results, module.params, module.params))
except Exception as err:
raise FMGBaseException(err)
try:
if paramgram["mode"] == "delete":
results = delete_script(fmgr, paramgram)
fmgr.govern_response(module=module, results=results, msg="Operation Finished",
ansible_facts=fmgr.construct_ansible_facts(results, module.params, module.params))
except Exception as err:
raise FMGBaseException(err)
return module.exit_json(**results[1])
if __name__ == "__main__":
main()
|
skulldreamz/bullhead_kernel
|
refs/heads/master
|
tools/perf/python/twatch.py
|
7370
|
#! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
|
norayr/unisubs
|
refs/heads/staging
|
apps/videos/migrations/0151_rename_pal_to_palc.py
|
5
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
db.rename_column('videos_video', 'primary_audio_language',
'primary_audio_language_code')
def backwards(self, orm):
db.rename_column('videos_video', 'primary_audio_language_code',
'primary_audio_language')
models = {
'accountlinker.thirdpartyaccount': {
'Meta': {'unique_together': "(('type', 'username'),)", 'object_name': 'ThirdPartyAccount'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'oauth_access_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'oauth_refresh_token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'can_send_messages': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_partner': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'notify_by_email': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'notify_by_message': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Partner']", 'null': 'True', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 26, 15, 0, 22, 695763)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 26, 15, 0, 22, 695610)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'comments.comment': {
'Meta': {'object_name': 'Comment'},
'content': ('django.db.models.fields.TextField', [], {'max_length': '3000'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'content_type_set_for_comment'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_pk': ('django.db.models.fields.TextField', [], {}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['comments.Comment']", 'null': 'True', 'blank': 'True'}),
'submit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'teams.application': {
'Meta': {'unique_together': "(('team', 'user', 'status'),)", 'object_name': 'Application'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"})
},
'teams.partner': {
'Meta': {'object_name': 'Partner'},
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'managed_partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.CustomUser']"}),
'can_request_paid_captions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'teams.project': {
'Meta': {'unique_together': "(('team', 'name'), ('team', 'slug'))", 'object_name': 'Project'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'guidelines': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'teams.team': {
'Meta': {'object_name': 'Team'},
'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}),
'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'auth_provider_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'autocrop': True}", 'max_length': '100', 'blank': 'True'}),
'max_tasks_per_member': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'teams'", 'null': 'True', 'to': "orm['teams.Partner']"}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'projects_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'subtitle_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_assign_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_expiration': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'third_party_accounts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'to': "orm['accountlinker.ThirdPartyAccount']"}),
'translate_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['teams.TeamMember']", 'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intro_for_teams'", 'null': 'True', 'to': "orm['videos.Video']"}),
'video_policy': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'through': "orm['teams.TeamVideo']", 'symmetrical': 'False'}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'teams.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'default': "'contributor'", 'max_length': '16', 'db_index': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_members'", 'to': "orm['auth.CustomUser']"})
},
'teams.teamvideo': {
'Meta': {'unique_together': "(('team', 'video'),)", 'object_name': 'TeamVideo'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'all_languages': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Project']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'null': 'True', 'thumb_sizes': '((290, 165), (120, 90))', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['videos.Video']", 'unique': 'True'})
},
'videos.action': {
'Meta': {'object_name': 'Action'},
'action_type': ('django.db.models.fields.IntegerField', [], {}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['comments.Comment']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.TeamMember']", 'null': 'True', 'blank': 'True'}),
'new_video_title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']", 'null': 'True', 'blank': 'True'})
},
'videos.subtitle': {
'Meta': {'unique_together': "(('version', 'subtitle_id'),)", 'object_name': 'Subtitle'},
'end_time': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'db_column': "'end_time_ms'"}),
'end_time_seconds': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'end_time'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_of_paragraph': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'start_time': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'db_column': "'start_time_ms'"}),
'start_time_seconds': ('django.db.models.fields.FloatField', [], {'null': 'True', 'db_column': "'start_time'"}),
'subtitle_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'subtitle_order': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'subtitle_text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True'})
},
'videos.subtitlelanguage': {
'Meta': {'unique_together': "(('video', 'language', 'standard_language'),)", 'object_name': 'SubtitleLanguage'},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_languages'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'had_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'standard_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'subtitle_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.subtitlemetadata': {
'Meta': {'object_name': 'SubtitleMetadata'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.PositiveIntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'subtitle': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Subtitle']"})
},
'videos.subtitleversion': {
'Meta': {'unique_together': "(('language', 'version_no'),)", 'object_name': 'SubtitleVersion'},
'datetime_started': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'forked_from': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']"}),
'moderation_status': ('django.db.models.fields.CharField', [], {'default': "'not__under_moderation'", 'max_length': '32', 'db_index': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'notification_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'result_of_rollback': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'text_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'version_no': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'videos.subtitleversionmetadata': {
'Meta': {'unique_together': "(('key', 'subtitle_version'),)", 'object_name': 'SubtitleVersionMetadata'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.PositiveIntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'subtitle_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'metadata'", 'to': "orm['videos.SubtitleVersion']"})
},
'videos.usertestresult': {
'Meta': {'object_name': 'UserTestResult'},
'browser': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'get_updates': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task1': ('django.db.models.fields.TextField', [], {}),
'task2': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'task3': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'videos.video': {
'Meta': {'object_name': 'Video'},
'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'allow_video_urls_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'edited': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'featured': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_videos'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'languages_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'moderated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'moderating'", 'null': 'True', 'to': "orm['teams.Team']"}),
'primary_audio_language_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '16', 'blank': 'True'}),
's3_thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'thumb_options': "{'upscale': True, 'crop': 'smart'}", 'max_length': '100', 'thumb_sizes': '((290, 165), (120, 90))', 'blank': 'True'}),
'small_thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'widget_views_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.videofeed': {
'Meta': {'object_name': 'VideoFeed'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'})
},
'videos.videometadata': {
'Meta': {'object_name': 'VideoMetadata'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.PositiveIntegerField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.videourl': {
'Meta': {'object_name': 'VideoUrl'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'owner_username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'primary': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'videoid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
}
}
complete_apps = ['videos']
|
be-cloud-be/horizon-addons
|
refs/heads/9.0
|
partner-contact/base_location/models/better_zip.py
|
26
|
# -*- coding: utf-8 -*-
#
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Contributor: Pedro Manuel Baeza <pedro.baeza@serviciosbaeza.com>
# Ignacio Ibeas <ignacio@acysos.com>
# Alejandro Santana <alejandrosantana@anubia.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from openerp import models, fields, api
class BetterZip(models.Model):
'''City/locations completion object'''
_name = "res.better.zip"
_description = __doc__
_order = "name asc"
_rec_name = "display_name"
display_name = fields.Char('Name', compute='_get_display_name', store=True)
name = fields.Char('ZIP')
code = fields.Char('City Code', size=64,
help="The official code for the city")
city = fields.Char('City', required=True)
state_id = fields.Many2one('res.country.state', 'State')
country_id = fields.Many2one('res.country', 'Country')
@api.one
@api.depends(
'name',
'city',
'state_id',
'country_id',
)
def _get_display_name(self):
if self.name:
name = [self.name, self.city]
else:
name = [self.city]
if self.state_id:
name.append(self.state_id.name)
if self.country_id:
name.append(self.country_id.name)
self.display_name = ", ".join(name)
@api.onchange('state_id')
def onchange_state_id(self):
if self.state_id:
self.country_id = self.state_id.country_id
|
nzavagli/UnrealPy
|
refs/heads/master
|
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/tests/fixtures/models.py
|
128
|
"""
Fixtures.
Fixtures are a way of loading data into the database in bulk. Fixure data
can be stored in any serializable format (including JSON and XML). Fixtures
are identified by name, and are stored in either a directory named 'fixtures'
in the application directory, or in one of the directories named in the
``FIXTURE_DIRS`` setting.
"""
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Category(models.Model):
title = models.CharField(max_length=100)
description = models.TextField()
def __str__(self):
return self.title
class Meta:
ordering = ('title',)
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100, default='Default headline')
pub_date = models.DateTimeField()
def __str__(self):
return self.headline
class Meta:
ordering = ('-pub_date', 'headline')
@python_2_unicode_compatible
class Blog(models.Model):
name = models.CharField(max_length=100)
featured = models.ForeignKey(Article, related_name='fixtures_featured_set')
articles = models.ManyToManyField(Article, blank=True,
related_name='fixtures_articles_set')
def __str__(self):
return self.name
@python_2_unicode_compatible
class Tag(models.Model):
name = models.CharField(max_length=100)
tagged_type = models.ForeignKey(ContentType, related_name="fixtures_tag_set")
tagged_id = models.PositiveIntegerField(default=0)
tagged = GenericForeignKey(ct_field='tagged_type', fk_field='tagged_id')
def __str__(self):
return '<%s: %s> tagged "%s"' % (self.tagged.__class__.__name__,
self.tagged, self.name)
class PersonManager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name=name)
@python_2_unicode_compatible
class Person(models.Model):
objects = PersonManager()
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
def natural_key(self):
return (self.name,)
class SpyManager(PersonManager):
def get_queryset(self):
return super(SpyManager, self).get_queryset().filter(cover_blown=False)
class Spy(Person):
objects = SpyManager()
cover_blown = models.BooleanField(default=False)
@python_2_unicode_compatible
class Visa(models.Model):
person = models.ForeignKey(Person)
permissions = models.ManyToManyField(Permission, blank=True)
def __str__(self):
return '%s %s' % (self.person.name,
', '.join(p.name for p in self.permissions.all()))
@python_2_unicode_compatible
class Book(models.Model):
name = models.CharField(max_length=100)
authors = models.ManyToManyField(Person)
def __str__(self):
authors = ' and '.join(a.name for a in self.authors.all())
return '%s by %s' % (self.name, authors) if authors else self.name
class Meta:
ordering = ('name',)
|
vladmm/intellij-community
|
refs/heads/master
|
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_getcwdu.py
|
327
|
"""
Fixer that changes os.getcwdu() to os.getcwd().
"""
# Author: Victor Stinner
# Local imports
from .. import fixer_base
from ..fixer_util import Name
class FixGetcwdu(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< 'os' trailer< dot='.' name='getcwdu' > any* >
"""
def transform(self, node, results):
name = results["name"]
name.replace(Name(u"getcwd", prefix=name.prefix))
|
drayanaindra/inasafe
|
refs/heads/develop
|
safe_qgis/batch/__init__.py
|
246
|
__author__ = 'timlinux'
|
sycurfriend00/ghost
|
refs/heads/master
|
node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/lexers/web.py
|
197
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.web
~~~~~~~~~~~~~~~~~~~
Lexers for web-related languages and markup.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import copy
from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
include, this
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Other, Punctuation, Literal
from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
html_doctype_matches, unirange
from pygments.lexers.agile import RubyLexer
from pygments.lexers.compiled import ScalaLexer
__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JsonLexer', 'CssLexer',
'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
'ObjectiveJLexer', 'CoffeeScriptLexer', 'LiveScriptLexer',
'DuelLexer', 'ScamlLexer', 'JadeLexer', 'XQueryLexer',
'DtdLexer', 'DartLexer', 'LassoLexer', 'QmlLexer', 'TypeScriptLexer']
class JavascriptLexer(RegexLexer):
"""
For JavaScript source code.
"""
name = 'JavaScript'
aliases = ['js', 'javascript']
filenames = ['*.js', ]
mimetypes = ['application/javascript', 'application/x-javascript',
'text/x-javascript', 'text/javascript', ]
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'<!--', Comment),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
(r'', Text, '#pop')
],
'badregex': [
(r'\n', Text, '#pop')
],
'root': [
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|'
r'this)\b', Keyword, 'slashstartsregex'),
(r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
(r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
r'extends|final|float|goto|implements|import|int|interface|long|native|'
r'package|private|protected|public|short|static|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Reserved),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
r'window)\b', Name.Builtin),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class JsonLexer(RegexLexer):
"""
For JSON data structures.
*New in Pygments 1.5.*
"""
name = 'JSON'
aliases = ['json']
filenames = ['*.json']
mimetypes = [ 'application/json', ]
# integer part of a number
int_part = r'-?(0|[1-9]\d*)'
# fractional part of a number
frac_part = r'\.\d+'
# exponential part of a number
exp_part = r'[eE](\+|-)?\d+'
flags = re.DOTALL
tokens = {
'whitespace': [
(r'\s+', Text),
],
# represents a simple terminal value
'simplevalue': [
(r'(true|false|null)\b', Keyword.Constant),
(('%(int_part)s(%(frac_part)s%(exp_part)s|'
'%(exp_part)s|%(frac_part)s)') % vars(),
Number.Float),
(int_part, Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
],
# the right hand side of an object, after the attribute name
'objectattribute': [
include('value'),
(r':', Punctuation),
# comma terminates the attribute but expects more
(r',', Punctuation, '#pop'),
# a closing bracket terminates the entire object, so pop twice
(r'}', Punctuation, ('#pop', '#pop')),
],
# a json object - { attr, attr, ... }
'objectvalue': [
include('whitespace'),
(r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
(r'}', Punctuation, '#pop'),
],
# json array - [ value, value, ... }
'arrayvalue': [
include('whitespace'),
include('value'),
(r',', Punctuation),
(r']', Punctuation, '#pop'),
],
# a json value - either a simple value or a complex value (object or array)
'value': [
include('whitespace'),
include('simplevalue'),
(r'{', Punctuation, 'objectvalue'),
(r'\[', Punctuation, 'arrayvalue'),
],
# the root of a json document whould be a value
'root': [
include('value'),
],
}
JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
class ActionScriptLexer(RegexLexer):
"""
For ActionScript source code.
*New in Pygments 0.9.*
"""
name = 'ActionScript'
aliases = ['as', 'actionscript']
filenames = ['*.as']
mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
'text/actionscript3']
flags = re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
(r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|var|with|new|typeof|arguments|instanceof|this|'
r'switch)\b', Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
Keyword.Constant),
(r'(Accessibility|AccessibilityProperties|ActionScriptVersion|'
r'ActivityEvent|AntiAliasType|ApplicationDomain|AsBroadcaster|Array|'
r'AsyncErrorEvent|AVM1Movie|BevelFilter|Bitmap|BitmapData|'
r'BitmapDataChannel|BitmapFilter|BitmapFilterQuality|BitmapFilterType|'
r'BlendMode|BlurFilter|Boolean|ByteArray|Camera|Capabilities|CapsStyle|'
r'Class|Color|ColorMatrixFilter|ColorTransform|ContextMenu|'
r'ContextMenuBuiltInItems|ContextMenuEvent|ContextMenuItem|'
r'ConvultionFilter|CSMSettings|DataEvent|Date|DefinitionError|'
r'DeleteObjectSample|Dictionary|DisplacmentMapFilter|DisplayObject|'
r'DisplacmentMapFilterMode|DisplayObjectContainer|DropShadowFilter|'
r'Endian|EOFError|Error|ErrorEvent|EvalError|Event|EventDispatcher|'
r'EventPhase|ExternalInterface|FileFilter|FileReference|'
r'FileReferenceList|FocusDirection|FocusEvent|Font|FontStyle|FontType|'
r'FrameLabel|FullScreenEvent|Function|GlowFilter|GradientBevelFilter|'
r'GradientGlowFilter|GradientType|Graphics|GridFitType|HTTPStatusEvent|'
r'IBitmapDrawable|ID3Info|IDataInput|IDataOutput|IDynamicPropertyOutput'
r'IDynamicPropertyWriter|IEventDispatcher|IExternalizable|'
r'IllegalOperationError|IME|IMEConversionMode|IMEEvent|int|'
r'InteractiveObject|InterpolationMethod|InvalidSWFError|InvokeEvent|'
r'IOError|IOErrorEvent|JointStyle|Key|Keyboard|KeyboardEvent|KeyLocation|'
r'LineScaleMode|Loader|LoaderContext|LoaderInfo|LoadVars|LocalConnection|'
r'Locale|Math|Matrix|MemoryError|Microphone|MorphShape|Mouse|MouseEvent|'
r'MovieClip|MovieClipLoader|Namespace|NetConnection|NetStatusEvent|'
r'NetStream|NewObjectSample|Number|Object|ObjectEncoding|PixelSnapping|'
r'Point|PrintJob|PrintJobOptions|PrintJobOrientation|ProgressEvent|Proxy|'
r'QName|RangeError|Rectangle|ReferenceError|RegExp|Responder|Sample|Scene|'
r'ScriptTimeoutError|Security|SecurityDomain|SecurityError|'
r'SecurityErrorEvent|SecurityPanel|Selection|Shape|SharedObject|'
r'SharedObjectFlushStatus|SimpleButton|Socket|Sound|SoundChannel|'
r'SoundLoaderContext|SoundMixer|SoundTransform|SpreadMethod|Sprite|'
r'StackFrame|StackOverflowError|Stage|StageAlign|StageDisplayState|'
r'StageQuality|StageScaleMode|StaticText|StatusEvent|String|StyleSheet|'
r'SWFVersion|SyncEvent|SyntaxError|System|TextColorType|TextField|'
r'TextFieldAutoSize|TextFieldType|TextFormat|TextFormatAlign|'
r'TextLineMetrics|TextRenderer|TextSnapshot|Timer|TimerEvent|Transform|'
r'TypeError|uint|URIError|URLLoader|URLLoaderDataFormat|URLRequest|'
r'URLRequestHeader|URLRequestMethod|URLStream|URLVariabeles|VerifyError|'
r'Video|XML|XMLDocument|XMLList|XMLNode|XMLNodeType|XMLSocket|XMLUI)\b',
Name.Builtin),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b',Name.Function),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class ActionScript3Lexer(RegexLexer):
"""
For ActionScript 3 source code.
*New in Pygments 0.11.*
"""
name = 'ActionScript 3'
aliases = ['as3', 'actionscript3']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
identifier = r'[$a-zA-Z_][a-zA-Z0-9_]*'
typeidentifier = identifier + '(?:\.<\w+>)?'
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'\s+', Text),
(r'(function\s+)(' + identifier + r')(\s*)(\()',
bygroups(Keyword.Declaration, Name.Function, Text, Operator),
'funcparams'),
(r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r')',
bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
Keyword.Type)),
(r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
bygroups(Keyword, Text, Name.Namespace, Text)),
(r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
(r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
r'switch|import|include|as|is)\b',
Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
Keyword.Constant),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b', Name.Function),
(identifier, Name),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]().-]+', Operator),
],
'funcparams': [
(r'\s+', Text),
(r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r'|\*)(\s*)',
bygroups(Text, Punctuation, Name, Text, Operator, Text,
Keyword.Type, Text), 'defval'),
(r'\)', Operator, 'type')
],
'type': [
(r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
(r'\s*', Text, '#pop:2')
],
'defval': [
(r'(=)(\s*)([^(),]+)(\s*)(,?)',
bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
(r',?', Operator, '#pop')
]
}
def analyse_text(text):
if re.match(r'\w+\s*:\s*\w', text):
return 0.3
return 0
class CssLexer(RegexLexer):
"""
For CSS (Cascading Style Sheets).
"""
name = 'CSS'
aliases = ['css']
filenames = ['*.css']
mimetypes = ['text/css']
tokens = {
'root': [
include('basics'),
],
'basics': [
(r'\s+', Text),
(r'/\*(?:.|\n)*?\*/', Comment),
(r'{', Punctuation, 'content'),
(r'\:[a-zA-Z0-9_-]+', Name.Decorator),
(r'\.[a-zA-Z0-9_-]+', Name.Class),
(r'\#[a-zA-Z0-9_-]+', Name.Function),
(r'@[a-zA-Z0-9_-]+', Keyword, 'atrule'),
(r'[a-zA-Z0-9_-]+', Name.Tag),
(r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single)
],
'atrule': [
(r'{', Punctuation, 'atcontent'),
(r';', Punctuation, '#pop'),
include('basics'),
],
'atcontent': [
include('basics'),
(r'}', Punctuation, '#pop:2'),
],
'content': [
(r'\s+', Text),
(r'}', Punctuation, '#pop'),
(r'url\(.*?\)', String.Other),
(r'^@.*?$', Comment.Preproc),
(r'(azimuth|background-attachment|background-color|'
r'background-image|background-position|background-repeat|'
r'background|border-bottom-color|border-bottom-style|'
r'border-bottom-width|border-left-color|border-left-style|'
r'border-left-width|border-right|border-right-color|'
r'border-right-style|border-right-width|border-top-color|'
r'border-top-style|border-top-width|border-bottom|'
r'border-collapse|border-left|border-width|border-color|'
r'border-spacing|border-style|border-top|border|caption-side|'
r'clear|clip|color|content|counter-increment|counter-reset|'
r'cue-after|cue-before|cue|cursor|direction|display|'
r'elevation|empty-cells|float|font-family|font-size|'
r'font-size-adjust|font-stretch|font-style|font-variant|'
r'font-weight|font|height|letter-spacing|line-height|'
r'list-style-type|list-style-image|list-style-position|'
r'list-style|margin-bottom|margin-left|margin-right|'
r'margin-top|margin|marker-offset|marks|max-height|max-width|'
r'min-height|min-width|opacity|orphans|outline|outline-color|'
r'outline-style|outline-width|overflow(?:-x|-y)?|padding-bottom|'
r'padding-left|padding-right|padding-top|padding|page|'
r'page-break-after|page-break-before|page-break-inside|'
r'pause-after|pause-before|pause|pitch|pitch-range|'
r'play-during|position|quotes|richness|right|size|'
r'speak-header|speak-numeral|speak-punctuation|speak|'
r'speech-rate|stress|table-layout|text-align|text-decoration|'
r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
r'vertical-align|visibility|voice-family|volume|white-space|'
r'widows|width|word-spacing|z-index|bottom|left|'
r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
r'behind|below|bidi-override|blink|block|bold|bolder|both|'
r'capitalize|center-left|center-right|center|circle|'
r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
r'inherit|inline-table|inline|inset|inside|invert|italic|'
r'justify|katakana-iroha|katakana|landscape|larger|large|'
r'left-side|leftwards|level|lighter|line-through|list-item|'
r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
r'lower|low|medium|message-box|middle|mix|monospace|'
r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
r'open-quote|outset|outside|overline|pointer|portrait|px|'
r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
r'rightwards|s-resize|sans-serif|scroll|se-resize|'
r'semi-condensed|semi-expanded|separate|serif|show|silent|'
r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
r'spell-out|square|static|status-bar|super|sw-resize|'
r'table-caption|table-cell|table-column|table-column-group|'
r'table-footer-group|table-header-group|table-row|'
r'table-row-group|text|text-bottom|text-top|thick|thin|'
r'transparent|ultra-condensed|ultra-expanded|underline|'
r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword),
(r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
r'blueviolet|peachpuff)\b', Name.Builtin),
(r'\!important', Comment.Preproc),
(r'/\*(?:.|\n)*?\*/', Comment),
(r'\#[a-zA-Z0-9]{1,6}', Number),
(r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex|s)\b', Number),
(r'-?[0-9]+', Number),
(r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
(r'[\[\]();]+', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name)
]
}
class ObjectiveJLexer(RegexLexer):
"""
For Objective-J source code with preprocessor directives.
*New in Pygments 1.3.*
"""
name = 'Objective-J'
aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
filenames = ['*.j']
mimetypes = ['text/x-objective-j']
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
# function definition
(r'^(' + _ws + r'[\+-]' + _ws + r')([\(a-zA-Z_].*?[^\(])(' + _ws + '{)',
bygroups(using(this), using(this, state='function_signature'),
using(this))),
# class definition
(r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
'classname'),
(r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
'forward_classname'),
(r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
include('statements'),
('[{\(\)}]', Punctuation),
(';', Punctuation),
],
'whitespace': [
(r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
bygroups(Comment.Preproc, Text, String.Double)),
(r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
bygroups(Comment.Preproc, Text, String.Double)),
(r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
bygroups(Comment.Preproc, Text, String.Double)),
(r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
bygroups(Comment.Preproc, Text, String.Double)),
(r'#if\s+0', Comment.Preproc, 'if0'),
(r'#', Comment.Preproc, 'macro'),
(r'\n', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r'<!--', Comment),
],
'slashstartsregex': [
include('whitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
(r'', Text, '#pop'),
],
'badregex': [
(r'\n', Text, '#pop'),
],
'statements': [
(r'(L|@)?"', String, 'string'),
(r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
String.Char),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
(r'0[0-7]+[Ll]?', Number.Oct),
(r'\d+[Ll]?', Number.Integer),
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?',
Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(for|in|while|do|break|return|continue|switch|case|default|if|'
r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
(r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
(r'(@selector|@private|@protected|@public|@encode|'
r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
(r'(int|long|float|short|double|char|unsigned|signed|void|'
r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
Keyword.Type),
(r'(self|super)\b', Name.Builtin),
(r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
r'SQRT2)\b', Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
r'window)\b', Name.Builtin),
(r'([$a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r')(?=\()',
bygroups(Name.Function, using(this))),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name),
],
'classname' : [
# interface definition that inherits
(r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r':' + _ws +
r')([a-zA-Z_][a-zA-Z0-9_]*)?',
bygroups(Name.Class, using(this), Name.Class), '#pop'),
# interface definition for a category
(r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r'\()([a-zA-Z_][a-zA-Z0-9_]*)(\))',
bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
# simple interface / implementation
(r'([a-zA-Z_][a-zA-Z0-9_]*)', Name.Class, '#pop'),
],
'forward_classname' : [
(r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*,\s*)',
bygroups(Name.Class, Text), '#push'),
(r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*;?)',
bygroups(Name.Class, Text), '#pop'),
],
'function_signature': [
include('whitespace'),
# start of a selector w/ parameters
(r'(\(' + _ws + r')' # open paren
r'([a-zA-Z_][a-zA-Z0-9_]+)' # return type
r'(' + _ws + r'\)' + _ws + r')' # close paren
r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
bygroups(using(this), Keyword.Type, using(this),
Name.Function), 'function_parameters'),
# no-param function
(r'(\(' + _ws + r')' # open paren
r'([a-zA-Z_][a-zA-Z0-9_]+)' # return type
r'(' + _ws + r'\)' + _ws + r')' # close paren
r'([$a-zA-Z_][a-zA-Z0-9_]+)', # function name
bygroups(using(this), Keyword.Type, using(this),
Name.Function), "#pop"),
# no return type given, start of a selector w/ parameters
(r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
bygroups (Name.Function), 'function_parameters'),
# no return type given, no-param function
(r'([$a-zA-Z_][a-zA-Z0-9_]+)', # function name
bygroups(Name.Function), "#pop"),
('', Text, '#pop'),
],
'function_parameters': [
include('whitespace'),
# parameters
(r'(\(' + _ws + ')' # open paren
r'([^\)]+)' # type
r'(' + _ws + r'\)' + _ws + r')' # close paren
r'([$a-zA-Z_][a-zA-Z0-9_]+)', # param name
bygroups(using(this), Keyword.Type, using(this), Text)),
# one piece of a selector name
(r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name
Name.Function),
# smallest possible selector piece
(r'(:)', Name.Function),
# var args
(r'(,' + _ws + r'\.\.\.)', using(this)),
# param name
(r'([$a-zA-Z_][a-zA-Z0-9_]+)', Text),
],
'expression' : [
(r'([$a-zA-Z_][a-zA-Z0-9_]*)(\()', bygroups(Name.Function,
Punctuation)),
(r'(\))', Punctuation, "#pop"),
],
'string': [
(r'"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
(r'[^\\"\n]+', String), # all other characters
(r'\\\n', String), # line continuation
(r'\\', String), # stray backslash
],
'macro': [
(r'[^/\n]+', Comment.Preproc),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'//.*?\n', Comment.Single, '#pop'),
(r'/', Comment.Preproc),
(r'(?<=\\)\n', Comment.Preproc),
(r'\n', Comment.Preproc, '#pop'),
],
'if0': [
(r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
(r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
(r'.*?\n', Comment),
]
}
def analyse_text(text):
if re.search('^\s*@import\s+[<"]', text, re.MULTILINE):
# special directive found in most Objective-J files
return True
return False
class HtmlLexer(RegexLexer):
"""
For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
by the appropriate lexer.
"""
name = 'HTML'
aliases = ['html']
filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
mimetypes = ['text/html', 'application/xhtml+xml']
flags = re.IGNORECASE | re.DOTALL
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
(r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
# note: this allows tag names not used in HTML like <x:with-dash>,
# this is to support yet-unknown template engines and the like
(r'<\s*[\w:.-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
(r'[a-zA-Z0-9_:-]+', Name.Attribute),
(r'/?\s*>', Name.Tag, '#pop'),
],
'script-content': [
(r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
(r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
],
'style-content': [
(r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
(r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
def analyse_text(text):
if html_doctype_matches(text):
return 0.5
class PhpLexer(RegexLexer):
"""
For `PHP <http://www.php.net/>`_ source code.
For PHP embedded in HTML, use the `HtmlPhpLexer`.
Additional options accepted:
`startinline`
If given and ``True`` the lexer starts highlighting with
php code (i.e.: no starting ``<?php`` required). The default
is ``False``.
`funcnamehighlighting`
If given and ``True``, highlight builtin function names
(default: ``True``).
`disabledmodules`
If given, must be a list of module names whose function names
should not be highlighted. By default all modules are highlighted
except the special ``'unknown'`` module that includes functions
that are known to php but are undocumented.
To get a list of allowed modules have a look into the
`_phpbuiltins` module:
.. sourcecode:: pycon
>>> from pygments.lexers._phpbuiltins import MODULES
>>> MODULES.keys()
['PHP Options/Info', 'Zip', 'dba', ...]
In fact the names of those modules match the module names from
the php documentation.
"""
name = 'PHP'
aliases = ['php', 'php3', 'php4', 'php5']
filenames = ['*.php', '*.php[345]', '*.inc']
mimetypes = ['text/x-php']
flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'<\?(php)?', Comment.Preproc, 'php'),
(r'[^<]+', Other),
(r'<', Other)
],
'php': [
(r'\?>', Comment.Preproc, '#pop'),
(r'<<<(\'?)([a-zA-Z_][a-zA-Z0-9_]*)\1\n.*?\n\2\;?\n', String),
(r'\s+', Text),
(r'#.*?\n', Comment.Single),
(r'//.*?\n', Comment.Single),
# put the empty comment here, it is otherwise seen as
# the start of a docstring
(r'/\*\*/', Comment.Multiline),
(r'/\*\*.*?\*/', String.Doc),
(r'/\*.*?\*/', Comment.Multiline),
(r'(->|::)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Text, Name.Attribute)),
(r'[~!%^&*+=|:.<>/?@-]+', Operator),
(r'[\[\]{}();,]+', Punctuation),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
(r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
(r'(function)(\s+)(&?)(\s*)',
bygroups(Keyword, Text, Operator, Text), 'functionname'),
(r'(const)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Name.Constant)),
(r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
r'FALSE|print|for|require|continue|foreach|require_once|'
r'declare|return|default|static|do|switch|die|stdClass|'
r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
r'implements|public|private|protected|abstract|clone|try|'
r'catch|throw|this|use|namespace|trait)\b', Keyword),
(r'(true|false|null)\b', Keyword.Constant),
(r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
(r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
(r'[\\a-zA-Z_][\\a-zA-Z0-9_]*', Name.Other),
(r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'\d+[eE][+-]?[0-9]+', Number.Float),
(r'0[0-7]+', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'\d+', Number.Integer),
(r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
(r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
(r'"', String.Double, 'string'),
],
'classname': [
(r'[a-zA-Z_][\\a-zA-Z0-9_]*', Name.Class, '#pop')
],
'functionname': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
],
'string': [
(r'"', String.Double, '#pop'),
(r'[^{$"\\]+', String.Double),
(r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
(r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?',
String.Interpol),
(r'(\{\$\{)(.*?)(\}\})',
bygroups(String.Interpol, using(this, _startinline=True),
String.Interpol)),
(r'(\{)(\$.*?)(\})',
bygroups(String.Interpol, using(this, _startinline=True),
String.Interpol)),
(r'(\$\{)(\S+)(\})',
bygroups(String.Interpol, Name.Variable, String.Interpol)),
(r'[${\\]+', String.Double)
],
}
def __init__(self, **options):
self.funcnamehighlighting = get_bool_opt(
options, 'funcnamehighlighting', True)
self.disabledmodules = get_list_opt(
options, 'disabledmodules', ['unknown'])
self.startinline = get_bool_opt(options, 'startinline', False)
# private option argument for the lexer itself
if '_startinline' in options:
self.startinline = options.pop('_startinline')
# collect activated functions in a set
self._functions = set()
if self.funcnamehighlighting:
from pygments.lexers._phpbuiltins import MODULES
for key, value in MODULES.iteritems():
if key not in self.disabledmodules:
self._functions.update(value)
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
stack = ['root']
if self.startinline:
stack.append('php')
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text, stack):
if token is Name.Other:
if value in self._functions:
yield index, Name.Builtin, value
continue
yield index, token, value
def analyse_text(text):
rv = 0.0
if re.search(r'<\?(?!xml)', text):
rv += 0.3
if '?>' in text:
rv += 0.1
return rv
class DtdLexer(RegexLexer):
"""
A lexer for DTDs (Document Type Definitions).
*New in Pygments 1.5.*
"""
flags = re.MULTILINE | re.DOTALL
name = 'DTD'
aliases = ['dtd']
filenames = ['*.dtd']
mimetypes = ['application/xml-dtd']
tokens = {
'root': [
include('common'),
(r'(<!ELEMENT)(\s+)(\S+)',
bygroups(Keyword, Text, Name.Tag), 'element'),
(r'(<!ATTLIST)(\s+)(\S+)',
bygroups(Keyword, Text, Name.Tag), 'attlist'),
(r'(<!ENTITY)(\s+)(\S+)',
bygroups(Keyword, Text, Name.Entity), 'entity'),
(r'(<!NOTATION)(\s+)(\S+)',
bygroups(Keyword, Text, Name.Tag), 'notation'),
(r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
bygroups(Keyword, Name.Entity, Text, Keyword)),
(r'(<!DOCTYPE)(\s+)([^>\s]+)',
bygroups(Keyword, Text, Name.Tag)),
(r'PUBLIC|SYSTEM', Keyword.Constant),
(r'[\[\]>]', Keyword),
],
'common': [
(r'\s+', Text),
(r'(%|&)[^;]*;', Name.Entity),
('<!--', Comment, 'comment'),
(r'[(|)*,?+]', Operator),
(r'"[^"]*"', String.Double),
(r'\'[^\']*\'', String.Single),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'element': [
include('common'),
(r'EMPTY|ANY|#PCDATA', Keyword.Constant),
(r'[^>\s\|()?+*,]+', Name.Tag),
(r'>', Keyword, '#pop'),
],
'attlist': [
include('common'),
(r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
Keyword.Constant),
(r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
(r'xml:space|xml:lang', Keyword.Reserved),
(r'[^>\s\|()?+*,]+', Name.Attribute),
(r'>', Keyword, '#pop'),
],
'entity': [
include('common'),
(r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
(r'[^>\s\|()?+*,]+', Name.Entity),
(r'>', Keyword, '#pop'),
],
'notation': [
include('common'),
(r'SYSTEM|PUBLIC', Keyword.Constant),
(r'[^>\s\|()?+*,]+', Name.Attribute),
(r'>', Keyword, '#pop'),
],
}
def analyse_text(text):
if not looks_like_xml(text) and \
('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
return 0.8
class XmlLexer(RegexLexer):
"""
Generic lexer for XML (eXtensible Markup Language).
"""
flags = re.MULTILINE | re.DOTALL | re.UNICODE
name = 'XML'
aliases = ['xml']
filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
'*.wsdl', '*.wsf']
mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
'application/rss+xml', 'application/atom+xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*[\w:.-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
def analyse_text(text):
if looks_like_xml(text):
return 0.5
class XsltLexer(XmlLexer):
'''
A lexer for XSLT.
*New in Pygments 0.10.*
'''
name = 'XSLT'
aliases = ['xslt']
filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
mimetypes = ['application/xsl+xml', 'application/xslt+xml']
EXTRA_KEYWORDS = set([
'apply-imports', 'apply-templates', 'attribute',
'attribute-set', 'call-template', 'choose', 'comment',
'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
'for-each', 'if', 'import', 'include', 'key', 'message',
'namespace-alias', 'number', 'otherwise', 'output', 'param',
'preserve-space', 'processing-instruction', 'sort',
'strip-space', 'stylesheet', 'template', 'text', 'transform',
'value-of', 'variable', 'when', 'with-param'
])
def get_tokens_unprocessed(self, text):
for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
m = re.match('</?xsl:([^>]*)/?>?', value)
if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
yield index, Keyword, value
else:
yield index, token, value
def analyse_text(text):
if looks_like_xml(text) and '<xsl' in text:
return 0.8
class MxmlLexer(RegexLexer):
"""
For MXML markup.
Nested AS3 in <script> tags is highlighted by the appropriate lexer.
*New in Pygments 1.1.*
"""
flags = re.MULTILINE | re.DOTALL
name = 'MXML'
aliases = ['mxml']
filenames = ['*.mxml']
mimetimes = ['text/xml', 'application/xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
bygroups(String, using(ActionScript3Lexer), String)),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*[a-zA-Z0-9:._-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[a-zA-Z0-9:._-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
class HaxeLexer(ExtendedRegexLexer):
"""
For Haxe source code (http://haxe.org/).
*New in Pygments 1.3.*
"""
name = 'Haxe'
aliases = ['hx', 'Haxe', 'haxe', 'haXe', 'hxsl']
filenames = ['*.hx', '*.hxsl']
mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
# keywords extracted from lexer.mll in the haxe compiler source
keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
r'break|return|continue|extends|implements|import|'
r'switch|case|default|public|private|try|untyped|'
r'catch|new|this|throw|extern|enum|in|interface|'
r'cast|override|dynamic|typedef|package|'
r'inline|using|null|true|false|abstract)\b')
# idtype in lexer.mll
typeid = r'_*[A-Z][_a-zA-Z0-9]*'
# combined ident and dollar and idtype
ident = r'(?:_*[a-z][_a-zA-Z0-9]*|_+[0-9][_a-zA-Z0-9]*|' + typeid + \
'|_+|\$[_a-zA-Z0-9]+)'
binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
r'/|\-|=>|=)')
# ident except keywords
ident_no_keyword = r'(?!' + keyword + ')' + ident
flags = re.DOTALL | re.MULTILINE
preproc_stack = []
def preproc_callback(self, match, ctx):
proc = match.group(2)
if proc == 'if':
# store the current stack
self.preproc_stack.append(ctx.stack[:])
elif proc in ['else', 'elseif']:
# restore the stack back to right before #if
if self.preproc_stack: ctx.stack = self.preproc_stack[-1][:]
elif proc == 'end':
# remove the saved stack of previous #if
if self.preproc_stack: self.preproc_stack.pop()
# #if and #elseif should follow by an expr
if proc in ['if', 'elseif']:
ctx.stack.append('preproc-expr')
# #error can be optionally follow by the error msg
if proc in ['error']:
ctx.stack.append('preproc-error')
yield match.start(), Comment.Preproc, '#' + proc
ctx.pos = match.end()
tokens = {
'root': [
include('spaces'),
include('meta'),
(r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
(r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
(r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
(r'(?:extern|private)\b', Keyword.Declaration),
(r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
(r'(?:class|interface)\b', Keyword.Declaration, 'class'),
(r'(?:enum)\b', Keyword.Declaration, 'enum'),
(r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
# top-level expression
# although it is not supported in haxe, but it is common to write
# expression in web pages the positive lookahead here is to prevent
# an infinite loop at the EOF
(r'(?=.)', Text, 'expr-statement'),
],
# space/tab/comment/preproc
'spaces': [
(r'\s+', Text),
(r'//[^\n\r]*', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'(#)(if|elseif|else|end|error)\b', preproc_callback),
],
'string-single-interpol': [
(r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
(r'\$\$', String.Escape),
(r'\$(?=' + ident + ')', String.Interpol, 'ident'),
include('string-single'),
],
'string-single': [
(r"'", String.Single, '#pop'),
(r'\\.', String.Escape),
(r'.', String.Single),
],
'string-double': [
(r'"', String.Double, '#pop'),
(r'\\.', String.Escape),
(r'.', String.Double),
],
'string-interpol-close': [
(r'\$'+ident, String.Interpol),
(r'\}', String.Interpol, '#pop'),
],
'package': [
include('spaces'),
(ident, Name.Namespace),
(r'\.', Punctuation, 'import-ident'),
(r'', Text, '#pop'),
],
'import': [
include('spaces'),
(ident, Name.Namespace),
(r'\*', Keyword), # wildcard import
(r'\.', Punctuation, 'import-ident'),
(r'in', Keyword.Namespace, 'ident'),
(r'', Text, '#pop'),
],
'import-ident': [
include('spaces'),
(r'\*', Keyword, '#pop'), # wildcard import
(ident, Name.Namespace, '#pop'),
],
'using': [
include('spaces'),
(ident, Name.Namespace),
(r'\.', Punctuation, 'import-ident'),
(r'', Text, '#pop'),
],
'preproc-error': [
(r'\s+', Comment.Preproc),
(r"'", String.Single, ('#pop', 'string-single')),
(r'"', String.Double, ('#pop', 'string-double')),
(r'', Text, '#pop'),
],
'preproc-expr': [
(r'\s+', Comment.Preproc),
(r'\!', Comment.Preproc),
(r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
(ident, Comment.Preproc, '#pop'),
(r"'", String.Single, ('#pop', 'string-single')),
(r'"', String.Double, ('#pop', 'string-double')),
],
'preproc-parenthesis': [
(r'\s+', Comment.Preproc),
(r'\)', Comment.Preproc, '#pop'),
('', Text, 'preproc-expr-in-parenthesis'),
],
'preproc-expr-chain': [
(r'\s+', Comment.Preproc),
(binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
(r'', Text, '#pop'),
],
# same as 'preproc-expr' but able to chain 'preproc-expr-chain'
'preproc-expr-in-parenthesis': [
(r'\s+', Comment.Preproc),
(r'\!', Comment.Preproc),
(r'\(', Comment.Preproc,
('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
(ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
(r"'", String.Single,
('#pop', 'preproc-expr-chain', 'string-single')),
(r'"', String.Double,
('#pop', 'preproc-expr-chain', 'string-double')),
],
'abstract' : [
include('spaces'),
(r'', Text, ('#pop', 'abstract-body', 'abstract-relation',
'abstract-opaque', 'type-param-constraint', 'type-name')),
],
'abstract-body' : [
include('spaces'),
(r'\{', Punctuation, ('#pop', 'class-body')),
],
'abstract-opaque' : [
include('spaces'),
(r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
(r'', Text, '#pop'),
],
'abstract-relation': [
include('spaces'),
(r'(?:to|from)', Keyword.Declaration, 'type'),
(r',', Punctuation),
(r'', Text, '#pop'),
],
'meta': [
include('spaces'),
(r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
],
# optional colon
'meta-colon': [
include('spaces'),
(r':', Name.Decorator, '#pop'),
(r'', Text, '#pop'),
],
# same as 'ident' but set token as Name.Decorator instead of Name
'meta-ident': [
include('spaces'),
(ident, Name.Decorator, '#pop'),
],
'meta-body': [
include('spaces'),
(r'\(', Name.Decorator, ('#pop', 'meta-call')),
(r'', Text, '#pop'),
],
'meta-call': [
include('spaces'),
(r'\)', Name.Decorator, '#pop'),
(r'', Text, ('#pop', 'meta-call-sep', 'expr')),
],
'meta-call-sep': [
include('spaces'),
(r'\)', Name.Decorator, '#pop'),
(r',', Punctuation, ('#pop', 'meta-call')),
],
'typedef': [
include('spaces'),
(r'', Text, ('#pop', 'typedef-body', 'type-param-constraint',
'type-name')),
],
'typedef-body': [
include('spaces'),
(r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
],
'enum': [
include('spaces'),
(r'', Text, ('#pop', 'enum-body', 'bracket-open',
'type-param-constraint', 'type-name')),
],
'enum-body': [
include('spaces'),
include('meta'),
(r'\}', Punctuation, '#pop'),
(ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
],
'enum-member': [
include('spaces'),
(r'\(', Punctuation,
('#pop', 'semicolon', 'flag', 'function-param')),
(r'', Punctuation, ('#pop', 'semicolon', 'flag')),
],
'class': [
include('spaces'),
(r'', Text, ('#pop', 'class-body', 'bracket-open', 'extends',
'type-param-constraint', 'type-name')),
],
'extends': [
include('spaces'),
(r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
(r',', Punctuation), # the comma is made optional here, since haxe2
# requires the comma but haxe3 does not allow it
(r'', Text, '#pop'),
],
'bracket-open': [
include('spaces'),
(r'\{', Punctuation, '#pop'),
],
'bracket-close': [
include('spaces'),
(r'\}', Punctuation, '#pop'),
],
'class-body': [
include('spaces'),
include('meta'),
(r'\}', Punctuation, '#pop'),
(r'(?:static|public|private|override|dynamic|inline|macro)\b',
Keyword.Declaration),
(r'', Text, 'class-member'),
],
'class-member': [
include('spaces'),
(r'(var)\b', Keyword.Declaration,
('#pop', 'optional-semicolon', 'prop')),
(r'(function)\b', Keyword.Declaration,
('#pop', 'optional-semicolon', 'class-method')),
],
# local function, anonymous or not
'function-local': [
include('spaces'),
(r'(' + ident_no_keyword + ')?', Name.Function,
('#pop', 'expr', 'flag', 'function-param',
'parenthesis-open', 'type-param-constraint')),
],
'optional-expr': [
include('spaces'),
include('expr'),
(r'', Text, '#pop'),
],
'class-method': [
include('spaces'),
(ident, Name.Function, ('#pop', 'optional-expr', 'flag',
'function-param', 'parenthesis-open',
'type-param-constraint')),
],
# function arguments
'function-param': [
include('spaces'),
(r'\)', Punctuation, '#pop'),
(r'\?', Punctuation),
(ident_no_keyword, Name,
('#pop', 'function-param-sep', 'assign', 'flag')),
],
'function-param-sep': [
include('spaces'),
(r'\)', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'function-param')),
],
# class property
# eg. var prop(default, null):String;
'prop': [
include('spaces'),
(ident_no_keyword, Name, ('#pop', 'assign', 'flag', 'prop-get-set')),
],
'prop-get-set': [
include('spaces'),
(r'\(', Punctuation, ('#pop', 'parenthesis-close',
'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
(r'', Text, '#pop'),
],
'prop-get-set-opt': [
include('spaces'),
(r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
(ident_no_keyword, Text, '#pop'), #custom getter/setter
],
'expr-statement': [
include('spaces'),
# makes semicolon optional here, just to avoid checking the last
# one is bracket or not.
(r'', Text, ('#pop', 'optional-semicolon', 'expr')),
],
'expr': [
include('spaces'),
(r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
'meta-ident', 'meta-colon')),
(r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
(r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
(r'(?:inline)\b', Keyword.Declaration),
(r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
'function-local')),
(r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
(r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
(r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
(r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
(r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
(r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
(r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
(r'(?:switch)\b', Keyword, ('#pop', 'switch')),
(r'(?:if)\b', Keyword, ('#pop', 'if')),
(r'(?:do)\b', Keyword, ('#pop', 'do')),
(r'(?:while)\b', Keyword, ('#pop', 'while')),
(r'(?:for)\b', Keyword, ('#pop', 'for')),
(r'(?:untyped|throw)\b', Keyword),
(r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
(r'(?:macro)\b', Keyword, ('#pop', 'macro')),
(r'(?:continue|break)\b', Keyword, '#pop'),
(r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
(ident_no_keyword, Name, ('#pop', 'expr-chain')),
# Float
(r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')),
# Int
(r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
(r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
# String
(r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
(r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
# EReg
(r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
# Array
(r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
],
'expr-chain': [
include('spaces'),
(r'(?:\+\+|\-\-)', Operator),
(binop, Operator, ('#pop', 'expr')),
(r'(?:in)\b', Keyword, ('#pop', 'expr')),
(r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
(r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
(r'\[', Punctuation, 'array-access'),
(r'\(', Punctuation, 'call'),
(r'', Text, '#pop'),
],
# macro reification
'macro': [
include('spaces'),
(r':', Punctuation, ('#pop', 'type')),
(r'', Text, ('#pop', 'expr')),
],
# cast can be written as "cast expr" or "cast(expr, type)"
'cast': [
include('spaces'),
(r'\(', Punctuation, ('#pop', 'parenthesis-close',
'cast-type', 'expr')),
(r'', Text, ('#pop', 'expr')),
],
# optionally give a type as the 2nd argument of cast()
'cast-type': [
include('spaces'),
(r',', Punctuation, ('#pop', 'type')),
(r'', Text, '#pop'),
],
'catch': [
include('spaces'),
(r'(?:catch)\b', Keyword, ('expr', 'function-param',
'parenthesis-open')),
(r'', Text, '#pop'),
],
# do-while loop
'do': [
include('spaces'),
(r'', Punctuation, ('#pop', 'do-while', 'expr')),
],
# the while after do
'do-while': [
include('spaces'),
(r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
'parenthesis-open')),
],
'while': [
include('spaces'),
(r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
],
'for': [
include('spaces'),
(r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
],
'if': [
include('spaces'),
(r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
'parenthesis')),
],
'else': [
include('spaces'),
(r'(?:else)\b', Keyword, ('#pop', 'expr')),
(r'', Text, '#pop'),
],
'switch': [
include('spaces'),
(r'', Text, ('#pop', 'switch-body', 'bracket-open', 'expr')),
],
'switch-body': [
include('spaces'),
(r'(?:case|default)\b', Keyword, ('case-block', 'case')),
(r'\}', Punctuation, '#pop'),
],
'case': [
include('spaces'),
(r':', Punctuation, '#pop'),
(r'', Text, ('#pop', 'case-sep', 'case-guard', 'expr')),
],
'case-sep': [
include('spaces'),
(r':', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'case')),
],
'case-guard': [
include('spaces'),
(r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
(r'', Text, '#pop'),
],
# optional multiple expr under a case
'case-block': [
include('spaces'),
(r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
(r'', Text, '#pop'),
],
'new': [
include('spaces'),
(r'', Text, ('#pop', 'call', 'parenthesis-open', 'type')),
],
'array-decl': [
include('spaces'),
(r'\]', Punctuation, '#pop'),
(r'', Text, ('#pop', 'array-decl-sep', 'expr')),
],
'array-decl-sep': [
include('spaces'),
(r'\]', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'array-decl')),
],
'array-access': [
include('spaces'),
(r'', Text, ('#pop', 'array-access-close', 'expr')),
],
'array-access-close': [
include('spaces'),
(r'\]', Punctuation, '#pop'),
],
'comma': [
include('spaces'),
(r',', Punctuation, '#pop'),
],
'colon': [
include('spaces'),
(r':', Punctuation, '#pop'),
],
'semicolon': [
include('spaces'),
(r';', Punctuation, '#pop'),
],
'optional-semicolon': [
include('spaces'),
(r';', Punctuation, '#pop'),
(r'', Text, '#pop'),
],
# identity that CAN be a Haxe keyword
'ident': [
include('spaces'),
(ident, Name, '#pop'),
],
'dollar': [
include('spaces'),
(r'\{', Keyword, ('#pop', 'bracket-close', 'expr')),
(r'', Text, ('#pop', 'expr-chain')),
],
'type-name': [
include('spaces'),
(typeid, Name, '#pop'),
],
'type-full-name': [
include('spaces'),
(r'\.', Punctuation, 'ident'),
(r'', Text, '#pop'),
],
'type': [
include('spaces'),
(r'\?', Punctuation),
(ident, Name, ('#pop', 'type-check', 'type-full-name')),
(r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
(r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
],
'type-parenthesis': [
include('spaces'),
(r'', Text, ('#pop', 'parenthesis-close', 'type')),
],
'type-check': [
include('spaces'),
(r'->', Punctuation, ('#pop', 'type')),
(r'<(?!=)', Punctuation, 'type-param'),
(r'', Text, '#pop'),
],
'type-struct': [
include('spaces'),
(r'\}', Punctuation, '#pop'),
(r'\?', Punctuation),
(r'>', Punctuation, ('comma', 'type')),
(ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
include('class-body'),
],
'type-struct-sep': [
include('spaces'),
(r'\}', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'type-struct')),
],
# type-param can be a normal type or a constant literal...
'type-param-type': [
# Float
(r'\.[0-9]+', Number.Float, '#pop'),
(r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
(r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
(r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
(r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'),
# Int
(r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
(r'[0-9]+', Number.Integer, '#pop'),
# String
(r"'", String.Single, ('#pop', 'string-single')),
(r'"', String.Double, ('#pop', 'string-double')),
# EReg
(r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
# Array
(r'\[', Operator, ('#pop', 'array-decl')),
include('type'),
],
# type-param part of a type
# ie. the <A,B> path in Map<A,B>
'type-param': [
include('spaces'),
(r'', Text, ('#pop', 'type-param-sep', 'type-param-type')),
],
'type-param-sep': [
include('spaces'),
(r'>', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'type-param')),
],
# optional type-param that may include constraint
# ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
'type-param-constraint': [
include('spaces'),
(r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
'type-param-constraint-flag', 'type-name')),
(r'', Text, '#pop'),
],
'type-param-constraint-sep': [
include('spaces'),
(r'>', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'type-param-constraint-sep',
'type-param-constraint-flag', 'type-name')),
],
# the optional constraint inside type-param
'type-param-constraint-flag': [
include('spaces'),
(r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
(r'', Text, '#pop'),
],
'type-param-constraint-flag-type': [
include('spaces'),
(r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
'type')),
(r'', Text, ('#pop', 'type')),
],
'type-param-constraint-flag-type-sep': [
include('spaces'),
(r'\)', Punctuation, '#pop'),
(r',', Punctuation, 'type'),
],
# a parenthesis expr that contain exactly one expr
'parenthesis': [
include('spaces'),
(r'', Text, ('#pop', 'parenthesis-close', 'expr')),
],
'parenthesis-open': [
include('spaces'),
(r'\(', Punctuation, '#pop'),
],
'parenthesis-close': [
include('spaces'),
(r'\)', Punctuation, '#pop'),
],
'var': [
include('spaces'),
(ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag')),
],
# optional more var decl.
'var-sep': [
include('spaces'),
(r',', Punctuation, ('#pop', 'var')),
(r'', Text, '#pop'),
],
# optional assignment
'assign': [
include('spaces'),
(r'=', Operator, ('#pop', 'expr')),
(r'', Text, '#pop'),
],
# optional type flag
'flag': [
include('spaces'),
(r':', Punctuation, ('#pop', 'type')),
(r'', Text, '#pop'),
],
# colon as part of a ternary operator (?:)
'ternary': [
include('spaces'),
(r':', Operator, '#pop'),
],
# function call
'call': [
include('spaces'),
(r'\)', Punctuation, '#pop'),
(r'', Text, ('#pop', 'call-sep', 'expr')),
],
# after a call param
'call-sep': [
include('spaces'),
(r'\)', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'call')),
],
# bracket can be block or object
'bracket': [
include('spaces'),
(r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
('#pop', 'bracket-check')),
(r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
(r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
(r'', Text, ('#pop', 'block')),
],
'bracket-check': [
include('spaces'),
(r':', Punctuation, ('#pop', 'object-sep', 'expr')), #is object
(r'', Text, ('#pop', 'block', 'optional-semicolon', 'expr-chain')), #is block
],
# code block
'block': [
include('spaces'),
(r'\}', Punctuation, '#pop'),
(r'', Text, 'expr-statement'),
],
# object in key-value pairs
'object': [
include('spaces'),
(r'\}', Punctuation, '#pop'),
(r'', Text, ('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
],
# a key of an object
'ident-or-string': [
include('spaces'),
(ident_no_keyword, Name, '#pop'),
(r"'", String.Single, ('#pop', 'string-single')),
(r'"', String.Double, ('#pop', 'string-double')),
],
# after a key-value pair in object
'object-sep': [
include('spaces'),
(r'\}', Punctuation, '#pop'),
(r',', Punctuation, ('#pop', 'object')),
],
}
def analyse_text(text):
if re.match(r'\w+\s*:\s*\w', text): return 0.3
def _indentation(lexer, match, ctx):
indentation = match.group(0)
yield match.start(), Text, indentation
ctx.last_indentation = indentation
ctx.pos = match.end()
if hasattr(ctx, 'block_state') and ctx.block_state and \
indentation.startswith(ctx.block_indentation) and \
indentation != ctx.block_indentation:
ctx.stack.append(ctx.block_state)
else:
ctx.block_state = None
ctx.block_indentation = None
ctx.stack.append('content')
def _starts_block(token, state):
def callback(lexer, match, ctx):
yield match.start(), token, match.group(0)
if hasattr(ctx, 'last_indentation'):
ctx.block_indentation = ctx.last_indentation
else:
ctx.block_indentation = ''
ctx.block_state = state
ctx.pos = match.end()
return callback
class HamlLexer(ExtendedRegexLexer):
"""
For Haml markup.
*New in Pygments 1.3.*
"""
name = 'Haml'
aliases = ['haml', 'HAML']
filenames = ['*.haml']
mimetypes = ['text/x-haml']
flags = re.IGNORECASE
# Haml can include " |\n" anywhere,
# which is ignored and used to wrap long lines.
# To accomodate this, use this custom faux dot instead.
_dot = r'(?: \|\n(?=.* \|)|.)'
# In certain places, a comma at the end of the line
# allows line wrapping as well.
_comma_dot = r'(?:,\s*\n|' + _dot + ')'
tokens = {
'root': [
(r'[ \t]*\n', Text),
(r'[ \t]*', _indentation),
],
'css': [
(r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
(r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
],
'eval-or-plain': [
(r'[&!]?==', Punctuation, 'plain'),
(r'([&!]?[=~])(' + _comma_dot + r'*\n)',
bygroups(Punctuation, using(RubyLexer)),
'root'),
(r'', Text, 'plain'),
],
'content': [
include('css'),
(r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
(r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
bygroups(Comment, Comment.Special, Comment),
'#pop'),
(r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
'#pop'),
(r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
'haml-comment-block'), '#pop'),
(r'(-)(' + _comma_dot + r'*\n)',
bygroups(Punctuation, using(RubyLexer)),
'#pop'),
(r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
'#pop'),
include('eval-or-plain'),
],
'tag': [
include('css'),
(r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
(r'\[' + _dot + '*?\]', using(RubyLexer)),
(r'\(', Text, 'html-attributes'),
(r'/[ \t]*\n', Punctuation, '#pop:2'),
(r'[<>]{1,2}(?=[ \t=])', Punctuation),
include('eval-or-plain'),
],
'plain': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
'html-attributes': [
(r'\s+', Text),
(r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
(r'[a-z0-9_:-]+', Name.Attribute),
(r'\)', Text, '#pop'),
],
'html-attribute-value': [
(r'[ \t]+', Text),
(r'[a-z0-9_]+', Name.Variable, '#pop'),
(r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
(r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
(r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
(r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
],
'html-comment-block': [
(_dot + '+', Comment),
(r'\n', Text, 'root'),
],
'haml-comment-block': [
(_dot + '+', Comment.Preproc),
(r'\n', Text, 'root'),
],
'filter-block': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
}
common_sass_tokens = {
'value': [
(r'[ \t]+', Text),
(r'[!$][\w-]+', Name.Variable),
(r'url\(', String.Other, 'string-url'),
(r'[a-z_-][\w-]*(?=\()', Name.Function),
(r'(azimuth|background-attachment|background-color|'
r'background-image|background-position|background-repeat|'
r'background|border-bottom-color|border-bottom-style|'
r'border-bottom-width|border-left-color|border-left-style|'
r'border-left-width|border-right|border-right-color|'
r'border-right-style|border-right-width|border-top-color|'
r'border-top-style|border-top-width|border-bottom|'
r'border-collapse|border-left|border-width|border-color|'
r'border-spacing|border-style|border-top|border|caption-side|'
r'clear|clip|color|content|counter-increment|counter-reset|'
r'cue-after|cue-before|cue|cursor|direction|display|'
r'elevation|empty-cells|float|font-family|font-size|'
r'font-size-adjust|font-stretch|font-style|font-variant|'
r'font-weight|font|height|letter-spacing|line-height|'
r'list-style-type|list-style-image|list-style-position|'
r'list-style|margin-bottom|margin-left|margin-right|'
r'margin-top|margin|marker-offset|marks|max-height|max-width|'
r'min-height|min-width|opacity|orphans|outline|outline-color|'
r'outline-style|outline-width|overflow|padding-bottom|'
r'padding-left|padding-right|padding-top|padding|page|'
r'page-break-after|page-break-before|page-break-inside|'
r'pause-after|pause-before|pause|pitch|pitch-range|'
r'play-during|position|quotes|richness|right|size|'
r'speak-header|speak-numeral|speak-punctuation|speak|'
r'speech-rate|stress|table-layout|text-align|text-decoration|'
r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
r'vertical-align|visibility|voice-family|volume|white-space|'
r'widows|width|word-spacing|z-index|bottom|left|'
r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
r'behind|below|bidi-override|blink|block|bold|bolder|both|'
r'capitalize|center-left|center-right|center|circle|'
r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
r'inherit|inline-table|inline|inset|inside|invert|italic|'
r'justify|katakana-iroha|katakana|landscape|larger|large|'
r'left-side|leftwards|level|lighter|line-through|list-item|'
r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
r'lower|low|medium|message-box|middle|mix|monospace|'
r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
r'open-quote|outset|outside|overline|pointer|portrait|px|'
r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
r'rightwards|s-resize|sans-serif|scroll|se-resize|'
r'semi-condensed|semi-expanded|separate|serif|show|silent|'
r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
r'spell-out|square|static|status-bar|super|sw-resize|'
r'table-caption|table-cell|table-column|table-column-group|'
r'table-footer-group|table-header-group|table-row|'
r'table-row-group|text|text-bottom|text-top|thick|thin|'
r'transparent|ultra-condensed|ultra-expanded|underline|'
r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
(r'(indigo|gold|firebrick|indianred|darkolivegreen|'
r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
r'mediumslateblue|springgreen|crimson|lightsalmon|brown|'
r'turquoise|olivedrab|cyan|skyblue|darkturquoise|'
r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|'
r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
r'violet|orchid|ghostwhite|honeydew|cornflowerblue|'
r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|'
r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|'
r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
r'lightcoral|orangered|navajowhite|palegreen|burlywood|'
r'seashell|mediumspringgreen|papayawhip|blanchedalmond|'
r'peru|aquamarine|darkslategray|ivory|dodgerblue|'
r'lemonchiffon|chocolate|orange|forestgreen|slateblue|'
r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
r'plum|darkgoldenrod|sandybrown|magenta|tan|'
r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
r'lightyellow|lavenderblush|linen|mediumaquamarine|'
r'blueviolet|peachpuff)\b', Name.Entity),
(r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
(r'\!(important|default)', Name.Exception),
(r'(true|false)', Name.Pseudo),
(r'(and|or|not)', Operator.Word),
(r'/\*', Comment.Multiline, 'inline-comment'),
(r'//[^\n]*', Comment.Single),
(r'\#[a-z0-9]{1,6}', Number.Hex),
(r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
(r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
(r'#{', String.Interpol, 'interpolation'),
(r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
(r'[\[\]()]+', Punctuation),
(r'"', String.Double, 'string-double'),
(r"'", String.Single, 'string-single'),
(r'[a-z_-][\w-]*', Name),
],
'interpolation': [
(r'\}', String.Interpol, '#pop'),
include('value'),
],
'selector': [
(r'[ \t]+', Text),
(r'\:', Name.Decorator, 'pseudo-class'),
(r'\.', Name.Class, 'class'),
(r'\#', Name.Namespace, 'id'),
(r'[a-zA-Z0-9_-]+', Name.Tag),
(r'#\{', String.Interpol, 'interpolation'),
(r'&', Keyword),
(r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
(r'"', String.Double, 'string-double'),
(r"'", String.Single, 'string-single'),
],
'string-double': [
(r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
(r'#\{', String.Interpol, 'interpolation'),
(r'"', String.Double, '#pop'),
],
'string-single': [
(r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
(r'#\{', String.Interpol, 'interpolation'),
(r"'", String.Double, '#pop'),
],
'string-url': [
(r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
(r'#\{', String.Interpol, 'interpolation'),
(r'\)', String.Other, '#pop'),
],
'pseudo-class': [
(r'[\w-]+', Name.Decorator),
(r'#\{', String.Interpol, 'interpolation'),
(r'', Text, '#pop'),
],
'class': [
(r'[\w-]+', Name.Class),
(r'#\{', String.Interpol, 'interpolation'),
(r'', Text, '#pop'),
],
'id': [
(r'[\w-]+', Name.Namespace),
(r'#\{', String.Interpol, 'interpolation'),
(r'', Text, '#pop'),
],
'for': [
(r'(from|to|through)', Operator.Word),
include('value'),
],
}
class SassLexer(ExtendedRegexLexer):
"""
For Sass stylesheets.
*New in Pygments 1.3.*
"""
name = 'Sass'
aliases = ['sass', 'SASS']
filenames = ['*.sass']
mimetypes = ['text/x-sass']
flags = re.IGNORECASE
tokens = {
'root': [
(r'[ \t]*\n', Text),
(r'[ \t]*', _indentation),
],
'content': [
(r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
'root'),
(r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
'root'),
(r'@import', Keyword, 'import'),
(r'@for', Keyword, 'for'),
(r'@(debug|warn|if|while)', Keyword, 'value'),
(r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
(r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
(r'@extend', Keyword, 'selector'),
(r'@[a-z0-9_-]+', Keyword, 'selector'),
(r'=[\w-]+', Name.Function, 'value'),
(r'\+[\w-]+', Name.Decorator, 'value'),
(r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
bygroups(Name.Variable, Operator), 'value'),
(r':', Name.Attribute, 'old-style-attr'),
(r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
(r'', Text, 'selector'),
],
'single-comment': [
(r'.+', Comment.Single),
(r'\n', Text, 'root'),
],
'multi-comment': [
(r'.+', Comment.Multiline),
(r'\n', Text, 'root'),
],
'import': [
(r'[ \t]+', Text),
(r'\S+', String),
(r'\n', Text, 'root'),
],
'old-style-attr': [
(r'[^\s:="\[]+', Name.Attribute),
(r'#{', String.Interpol, 'interpolation'),
(r'[ \t]*=', Operator, 'value'),
(r'', Text, 'value'),
],
'new-style-attr': [
(r'[^\s:="\[]+', Name.Attribute),
(r'#{', String.Interpol, 'interpolation'),
(r'[ \t]*[=:]', Operator, 'value'),
],
'inline-comment': [
(r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
(r'#\{', String.Interpol, 'interpolation'),
(r"\*/", Comment, '#pop'),
],
}
for group, common in common_sass_tokens.iteritems():
tokens[group] = copy.copy(common)
tokens['value'].append((r'\n', Text, 'root'))
tokens['selector'].append((r'\n', Text, 'root'))
class ScssLexer(RegexLexer):
"""
For SCSS stylesheets.
"""
name = 'SCSS'
aliases = ['scss']
filenames = ['*.scss']
mimetypes = ['text/x-scss']
flags = re.IGNORECASE | re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@import', Keyword, 'value'),
(r'@for', Keyword, 'for'),
(r'@(debug|warn|if|while)', Keyword, 'value'),
(r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
(r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
(r'@extend', Keyword, 'selector'),
(r'@[a-z0-9_-]+', Keyword, 'selector'),
(r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
(r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
(r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
(r'', Text, 'selector'),
],
'attr': [
(r'[^\s:="\[]+', Name.Attribute),
(r'#{', String.Interpol, 'interpolation'),
(r'[ \t]*:', Operator, 'value'),
],
'inline-comment': [
(r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
(r'#\{', String.Interpol, 'interpolation'),
(r"\*/", Comment, '#pop'),
],
}
for group, common in common_sass_tokens.iteritems():
tokens[group] = copy.copy(common)
tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
class CoffeeScriptLexer(RegexLexer):
"""
For `CoffeeScript`_ source code.
.. _CoffeeScript: http://coffeescript.org
*New in Pygments 1.3.*
"""
name = 'CoffeeScript'
aliases = ['coffee-script', 'coffeescript', 'coffee']
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'###[^#].*?###', Comment.Multiline),
(r'#(?!##[^#]).*?\n', Comment.Single),
],
'multilineregex': [
(r'[^/#]+', String.Regex),
(r'///([gim]+\b|\B)', String.Regex, '#pop'),
(r'#{', String.Interpol, 'interpoling_string'),
(r'[/#]', String.Regex),
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'///', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'', Text, '#pop'),
],
'root': [
# this next expr leads to infinite loops root -> slashstartsregex
#(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
r'=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?',
Operator, 'slashstartsregex'),
(r'(?:\([^()]+\))?\s*[=-]>', Name.Function),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(?<![\.\$])(for|own|in|of|while|until|'
r'loop|break|return|continue|'
r'switch|when|then|if|unless|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
(r'(?<![\.\$])(true|false|yes|no|on|off|null|'
r'NaN|Infinity|undefined)\b',
Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
Name.Builtin),
(r'[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable,
'slashstartsregex'),
(r'@[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable.Instance,
'slashstartsregex'),
(r'@', Name.Other, 'slashstartsregex'),
(r'@?[$a-zA-Z_][a-zA-Z0-9_\$]*', Name.Other, 'slashstartsregex'),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
('"""', String, 'tdqs'),
("'''", String, 'tsqs'),
('"', String, 'dqs'),
("'", String, 'sqs'),
],
'strings': [
(r'[^#\\\'"]+', String),
# note that all coffee script strings are multi-line.
# hashmarks, quotes and backslashes must be parsed one at a time
],
'interpoling_string' : [
(r'}', String.Interpol, "#pop"),
include('root')
],
'dqs': [
(r'"', String, '#pop'),
(r'\\.|\'', String), # double-quoted string don't need ' escapes
(r'#{', String.Interpol, "interpoling_string"),
include('strings')
],
'sqs': [
(r"'", String, '#pop'),
(r'#|\\.|"', String), # single quoted strings don't need " escapses
include('strings')
],
'tdqs': [
(r'"""', String, '#pop'),
(r'\\.|\'|"', String), # no need to escape quotes in triple-string
(r'#{', String.Interpol, "interpoling_string"),
include('strings'),
],
'tsqs': [
(r"'''", String, '#pop'),
(r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
include('strings')
],
}
class LiveScriptLexer(RegexLexer):
"""
For `LiveScript`_ source code.
.. _LiveScript: http://gkz.github.com/LiveScript/
New in Pygments 1.6.
"""
name = 'LiveScript'
aliases = ['live-script', 'livescript']
filenames = ['*.ls']
mimetypes = ['text/livescript']
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'/\*.*?\*/', Comment.Multiline),
(r'#.*?\n', Comment.Single),
],
'multilineregex': [
include('commentsandwhitespace'),
(r'//([gim]+\b|\B)', String.Regex, '#pop'),
(r'/', String.Regex),
(r'[^/#]+', String.Regex)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'//', String.Regex, ('#pop', 'multilineregex')),
(r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'', Text, '#pop'),
],
'root': [
# this next expr leads to infinite loops root -> slashstartsregex
#(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
(r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
r'[+*`%&\|\^/])=?',
Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(?<![\.\$])(for|own|in|of|while|until|loop|break|'
r'return|continue|switch|when|then|if|unless|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
r'extends|this|class|by|const|var|to|til)\b', Keyword,
'slashstartsregex'),
(r'(?<![\.\$])(true|false|yes|no|on|off|'
r'null|NaN|Infinity|undefined|void)\b',
Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
Name.Builtin),
(r'[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable,
'slashstartsregex'),
(r'@[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable.Instance,
'slashstartsregex'),
(r'@', Name.Other, 'slashstartsregex'),
(r'@?[$a-zA-Z_][a-zA-Z0-9_\-]*', Name.Other, 'slashstartsregex'),
(r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
(r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
('"""', String, 'tdqs'),
("'''", String, 'tsqs'),
('"', String, 'dqs'),
("'", String, 'sqs'),
(r'\\[\w$-]+', String),
(r'<\[.*\]>', String),
],
'strings': [
(r'[^#\\\'"]+', String),
# note that all coffee script strings are multi-line.
# hashmarks, quotes and backslashes must be parsed one at a time
],
'interpoling_string' : [
(r'}', String.Interpol, "#pop"),
include('root')
],
'dqs': [
(r'"', String, '#pop'),
(r'\\.|\'', String), # double-quoted string don't need ' escapes
(r'#{', String.Interpol, "interpoling_string"),
(r'#', String),
include('strings')
],
'sqs': [
(r"'", String, '#pop'),
(r'#|\\.|"', String), # single quoted strings don't need " escapses
include('strings')
],
'tdqs': [
(r'"""', String, '#pop'),
(r'\\.|\'|"', String), # no need to escape quotes in triple-string
(r'#{', String.Interpol, "interpoling_string"),
(r'#', String),
include('strings'),
],
'tsqs': [
(r"'''", String, '#pop'),
(r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
include('strings')
],
}
class DuelLexer(RegexLexer):
"""
Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
See http://duelengine.org/.
See http://jsonml.org/jbst/.
*New in Pygments 1.4.*
"""
name = 'Duel'
aliases = ['duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST']
filenames = ['*.duel','*.jbst']
mimetypes = ['text/x-duel','text/x-jbst']
flags = re.DOTALL
tokens = {
'root': [
(r'(<%[@=#!:]?)(.*?)(%>)',
bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
(r'(<%\$)(.*?)(:)(.*?)(%>)',
bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
(r'(<%--)(.*?)(--%>)',
bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
(r'(<script.*?>)(.*?)(</script>)',
bygroups(using(HtmlLexer),
using(JavascriptLexer), using(HtmlLexer))),
(r'(.+?)(?=<)', using(HtmlLexer)),
(r'.+', using(HtmlLexer)),
],
}
class ScamlLexer(ExtendedRegexLexer):
"""
For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
*New in Pygments 1.4.*
"""
name = 'Scaml'
aliases = ['scaml', 'SCAML']
filenames = ['*.scaml']
mimetypes = ['text/x-scaml']
flags = re.IGNORECASE
# Scaml does not yet support the " |\n" notation to
# wrap long lines. Once it does, use the custom faux
# dot instead.
# _dot = r'(?: \|\n(?=.* \|)|.)'
_dot = r'.'
tokens = {
'root': [
(r'[ \t]*\n', Text),
(r'[ \t]*', _indentation),
],
'css': [
(r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
(r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
],
'eval-or-plain': [
(r'[&!]?==', Punctuation, 'plain'),
(r'([&!]?[=~])(' + _dot + r'*\n)',
bygroups(Punctuation, using(ScalaLexer)),
'root'),
(r'', Text, 'plain'),
],
'content': [
include('css'),
(r'%[a-z0-9_:-]+', Name.Tag, 'tag'),
(r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
bygroups(Comment, Comment.Special, Comment),
'#pop'),
(r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
'#pop'),
(r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
'scaml-comment-block'), '#pop'),
(r'(-@\s*)(import)?(' + _dot + r'*\n)',
bygroups(Punctuation, Keyword, using(ScalaLexer)),
'#pop'),
(r'(-)(' + _dot + r'*\n)',
bygroups(Punctuation, using(ScalaLexer)),
'#pop'),
(r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
'#pop'),
include('eval-or-plain'),
],
'tag': [
include('css'),
(r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
(r'\[' + _dot + '*?\]', using(ScalaLexer)),
(r'\(', Text, 'html-attributes'),
(r'/[ \t]*\n', Punctuation, '#pop:2'),
(r'[<>]{1,2}(?=[ \t=])', Punctuation),
include('eval-or-plain'),
],
'plain': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
'html-attributes': [
(r'\s+', Text),
(r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
(r'[a-z0-9_:-]+', Name.Attribute),
(r'\)', Text, '#pop'),
],
'html-attribute-value': [
(r'[ \t]+', Text),
(r'[a-z0-9_]+', Name.Variable, '#pop'),
(r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
(r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
(r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
(r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
],
'html-comment-block': [
(_dot + '+', Comment),
(r'\n', Text, 'root'),
],
'scaml-comment-block': [
(_dot + '+', Comment.Preproc),
(r'\n', Text, 'root'),
],
'filter-block': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
}
class JadeLexer(ExtendedRegexLexer):
"""
For Jade markup.
Jade is a variant of Scaml, see:
http://scalate.fusesource.org/documentation/scaml-reference.html
*New in Pygments 1.4.*
"""
name = 'Jade'
aliases = ['jade', 'JADE']
filenames = ['*.jade']
mimetypes = ['text/x-jade']
flags = re.IGNORECASE
_dot = r'.'
tokens = {
'root': [
(r'[ \t]*\n', Text),
(r'[ \t]*', _indentation),
],
'css': [
(r'\.[a-z0-9_:-]+', Name.Class, 'tag'),
(r'\#[a-z0-9_:-]+', Name.Function, 'tag'),
],
'eval-or-plain': [
(r'[&!]?==', Punctuation, 'plain'),
(r'([&!]?[=~])(' + _dot + r'*\n)',
bygroups(Punctuation, using(ScalaLexer)), 'root'),
(r'', Text, 'plain'),
],
'content': [
include('css'),
(r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
bygroups(Comment, Comment.Special, Comment),
'#pop'),
(r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
'#pop'),
(r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
'scaml-comment-block'), '#pop'),
(r'(-@\s*)(import)?(' + _dot + r'*\n)',
bygroups(Punctuation, Keyword, using(ScalaLexer)),
'#pop'),
(r'(-)(' + _dot + r'*\n)',
bygroups(Punctuation, using(ScalaLexer)),
'#pop'),
(r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
'#pop'),
(r'[a-z0-9_:-]+', Name.Tag, 'tag'),
(r'\|', Text, 'eval-or-plain'),
],
'tag': [
include('css'),
(r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
(r'\[' + _dot + '*?\]', using(ScalaLexer)),
(r'\(', Text, 'html-attributes'),
(r'/[ \t]*\n', Punctuation, '#pop:2'),
(r'[<>]{1,2}(?=[ \t=])', Punctuation),
include('eval-or-plain'),
],
'plain': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
'html-attributes': [
(r'\s+', Text),
(r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
(r'[a-z0-9_:-]+', Name.Attribute),
(r'\)', Text, '#pop'),
],
'html-attribute-value': [
(r'[ \t]+', Text),
(r'[a-z0-9_]+', Name.Variable, '#pop'),
(r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'),
(r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'),
(r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
(r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
],
'html-comment-block': [
(_dot + '+', Comment),
(r'\n', Text, 'root'),
],
'scaml-comment-block': [
(_dot + '+', Comment.Preproc),
(r'\n', Text, 'root'),
],
'filter-block': [
(r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + '*?)(\})',
bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
(r'\n', Text, 'root'),
],
}
class XQueryLexer(ExtendedRegexLexer):
"""
An XQuery lexer, parsing a stream and outputting the tokens needed to
highlight xquery code.
*New in Pygments 1.4.*
"""
name = 'XQuery'
aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
mimetypes = ['text/xquery', 'application/xquery']
xquery_parse_state = []
# FIX UNICODE LATER
#ncnamestartchar = (
# ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
# ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
# ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
# ur"[\u10000-\uEFFFF]"
#)
ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
# FIX UNICODE LATER
#ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
# ur"[\u203F-\u2040]")
ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
pitarget_namestartchar = r"(?:[A-KN-WY-Z]|_|:|[a-kn-wy-z])"
pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
prefixedname = "%s:%s" % (ncname, ncname)
unprefixedname = ncname
qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
# FIX UNICODE LATER
#elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
# ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
#quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
# ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
#aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
# ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]'
# CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
# aposattrcontentchar
#x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
flags = re.DOTALL | re.MULTILINE | re.UNICODE
def punctuation_root_callback(lexer, match, ctx):
yield match.start(), Punctuation, match.group(1)
# transition to root always - don't pop off stack
ctx.stack = ['root']
ctx.pos = match.end()
def operator_root_callback(lexer, match, ctx):
yield match.start(), Operator, match.group(1)
# transition to root always - don't pop off stack
ctx.stack = ['root']
ctx.pos = match.end()
def popstate_tag_callback(lexer, match, ctx):
yield match.start(), Name.Tag, match.group(1)
ctx.stack.append(lexer.xquery_parse_state.pop())
ctx.pos = match.end()
def popstate_xmlcomment_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append(lexer.xquery_parse_state.pop())
ctx.pos = match.end()
def popstate_kindtest_callback(lexer, match, ctx):
yield match.start(), Punctuation, match.group(1)
next_state = lexer.xquery_parse_state.pop()
if next_state == 'occurrenceindicator':
if re.match("[?*+]+", match.group(2)):
yield match.start(), Punctuation, match.group(2)
ctx.stack.append('operator')
ctx.pos = match.end()
else:
ctx.stack.append('operator')
ctx.pos = match.end(1)
else:
ctx.stack.append(next_state)
ctx.pos = match.end(1)
def popstate_callback(lexer, match, ctx):
yield match.start(), Punctuation, match.group(1)
# if we have run out of our state stack, pop whatever is on the pygments
# state stack
if len(lexer.xquery_parse_state) == 0:
ctx.stack.pop()
elif len(ctx.stack) > 1:
ctx.stack.append(lexer.xquery_parse_state.pop())
else:
# i don't know if i'll need this, but in case, default back to root
ctx.stack = ['root']
ctx.pos = match.end()
def pushstate_element_content_starttag_callback(lexer, match, ctx):
yield match.start(), Name.Tag, match.group(1)
lexer.xquery_parse_state.append('element_content')
ctx.stack.append('start_tag')
ctx.pos = match.end()
def pushstate_cdata_section_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append('cdata_section')
lexer.xquery_parse_state.append(ctx.state.pop)
ctx.pos = match.end()
def pushstate_starttag_callback(lexer, match, ctx):
yield match.start(), Name.Tag, match.group(1)
lexer.xquery_parse_state.append(ctx.state.pop)
ctx.stack.append('start_tag')
ctx.pos = match.end()
def pushstate_operator_order_callback(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
ctx.stack = ['root']
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
def pushstate_operator_root_validate(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
ctx.stack = ['root']
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
def pushstate_operator_root_validate_withmode(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Keyword, match.group(3)
ctx.stack = ['root']
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append('processing_instruction')
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append('processing_instruction')
lexer.xquery_parse_state.append('element_content')
ctx.pos = match.end()
def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append('cdata_section')
lexer.xquery_parse_state.append('element_content')
ctx.pos = match.end()
def pushstate_operator_cdata_section_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append('cdata_section')
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append('xml_comment')
lexer.xquery_parse_state.append('element_content')
ctx.pos = match.end()
def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
yield match.start(), String.Doc, match.group(1)
ctx.stack.append('xml_comment')
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
def pushstate_kindtest_callback(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
lexer.xquery_parse_state.append('kindtest')
ctx.stack.append('kindtest')
ctx.pos = match.end()
def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
lexer.xquery_parse_state.append('operator')
ctx.stack.append('kindtestforpi')
ctx.pos = match.end()
def pushstate_operator_kindtest_callback(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
lexer.xquery_parse_state.append('operator')
ctx.stack.append('kindtest')
ctx.pos = match.end()
def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
yield match.start(), Name.Tag, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
lexer.xquery_parse_state.append('occurrenceindicator')
ctx.stack.append('kindtest')
ctx.pos = match.end()
def pushstate_operator_starttag_callback(lexer, match, ctx):
yield match.start(), Name.Tag, match.group(1)
lexer.xquery_parse_state.append('operator')
ctx.stack.append('start_tag')
ctx.pos = match.end()
def pushstate_operator_root_callback(lexer, match, ctx):
yield match.start(), Punctuation, match.group(1)
lexer.xquery_parse_state.append('operator')
ctx.stack = ['root']#.append('root')
ctx.pos = match.end()
def pushstate_operator_root_construct_callback(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
lexer.xquery_parse_state.append('operator')
ctx.stack = ['root']
ctx.pos = match.end()
def pushstate_root_callback(lexer, match, ctx):
yield match.start(), Punctuation, match.group(1)
cur_state = ctx.stack.pop()
lexer.xquery_parse_state.append(cur_state)
ctx.stack = ['root']#.append('root')
ctx.pos = match.end()
def pushstate_operator_attribute_callback(lexer, match, ctx):
yield match.start(), Name.Attribute, match.group(1)
ctx.stack.append('operator')
ctx.pos = match.end()
def pushstate_operator_callback(lexer, match, ctx):
yield match.start(), Keyword, match.group(1)
yield match.start(), Text, match.group(2)
yield match.start(), Punctuation, match.group(3)
lexer.xquery_parse_state.append('operator')
ctx.pos = match.end()
tokens = {
'comment': [
# xquery comments
(r'(:\))', Comment, '#pop'),
(r'(\(:)', Comment, '#push'),
(r'[^:)]', Comment),
(r'([^:)]|:|\))', Comment),
],
'whitespace': [
(r'\s+', Text),
],
'operator': [
include('whitespace'),
(r'(\})', popstate_callback),
(r'\(:', Comment, 'comment'),
(r'(\{)', pushstate_root_callback),
(r'then|else|external|at|div|except', Keyword, 'root'),
(r'order by', Keyword, 'root'),
(r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
(r'and|or', Operator.Word, 'root'),
(r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
Operator.Word, 'root'),
(r'return|satisfies|to|union|where|preserve\s+strip',
Keyword, 'root'),
(r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\||:=|=)',
operator_root_callback),
(r'(::|;|\[|//|/|,)',
punctuation_root_callback),
(r'(castable|cast)(\s+)(as)\b',
bygroups(Keyword, Text, Keyword), 'singletype'),
(r'(instance)(\s+)(of)\b',
bygroups(Keyword, Text, Keyword), 'itemtype'),
(r'(treat)(\s+)(as)\b',
bygroups(Keyword, Text, Keyword), 'itemtype'),
(r'(case|as)\b', Keyword, 'itemtype'),
(r'(\))(\s*)(as)',
bygroups(Punctuation, Text, Keyword), 'itemtype'),
(r'\$', Name.Variable, 'varname'),
(r'(for|let)(\s+)(\$)',
bygroups(Keyword, Text, Name.Variable), 'varname'),
#(r'\)|\?|\]', Punctuation, '#push'),
(r'\)|\?|\]', Punctuation),
(r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
(r'ascending|descending|default', Keyword, '#push'),
(r'external', Keyword),
(r'collation', Keyword, 'uritooperator'),
# finally catch all string literals and stay in operator state
(stringdouble, String.Double),
(stringsingle, String.Single),
(r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
],
'uritooperator': [
(stringdouble, String.Double, '#pop'),
(stringsingle, String.Single, '#pop'),
],
'namespacedecl': [
include('whitespace'),
(r'\(:', Comment, 'comment'),
(r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
(r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
(stringdouble, String.Double),
(stringsingle, String.Single),
(r',', Punctuation),
(r'=', Operator),
(r';', Punctuation, 'root'),
(ncname, Name.Namespace),
],
'namespacekeyword': [
include('whitespace'),
(r'\(:', Comment, 'comment'),
(stringdouble, String.Double, 'namespacedecl'),
(stringsingle, String.Single, 'namespacedecl'),
(r'inherit|no-inherit', Keyword, 'root'),
(r'namespace', Keyword, 'namespacedecl'),
(r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
(r'preserve|no-preserve', Keyword),
(r',', Punctuation),
],
'varname': [
(r'\(:', Comment, 'comment'),
(qname, Name.Variable, 'operator'),
],
'singletype': [
(r'\(:', Comment, 'comment'),
(ncname + r'(:\*)', Name.Variable, 'operator'),
(qname, Name.Variable, 'operator'),
],
'itemtype': [
include('whitespace'),
(r'\(:', Comment, 'comment'),
(r'\$', Punctuation, 'varname'),
(r'(void)(\s*)(\()(\s*)(\))',
bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
(r'(element|attribute|schema-element|schema-attribute|comment|text|'
r'node|binary|document-node|empty-sequence)(\s*)(\()',
pushstate_occurrenceindicator_kindtest_callback),
# Marklogic specific type?
(r'(processing-instruction)(\s*)(\()',
bygroups(Keyword, Text, Punctuation),
('occurrenceindicator', 'kindtestforpi')),
(r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
bygroups(Keyword, Text, Punctuation, Text, Punctuation),
'occurrenceindicator'),
(r'\(\#', Punctuation, 'pragma'),
(r';', Punctuation, '#pop'),
(r'then|else', Keyword, '#pop'),
(r'(at)(\s+)(' + stringdouble + ')',
bygroups(Keyword, Text, String.Double), 'namespacedecl'),
(r'(at)(\s+)(' + stringsingle + ')',
bygroups(Keyword, Text, String.Single), 'namespacedecl'),
(r'except|intersect|in|is|return|satisfies|to|union|where',
Keyword, 'root'),
(r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
(r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'),
(r'external|at', Keyword, 'root'),
(r'(stable)(\s+)(order)(\s+)(by)',
bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
(r'(castable|cast)(\s+)(as)',
bygroups(Keyword, Text, Keyword), 'singletype'),
(r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
(r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
(r'case|as', Keyword, 'itemtype'),
(r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
(ncname + r':\*', Keyword.Type, 'operator'),
(qname, Keyword.Type, 'occurrenceindicator'),
],
'kindtest': [
(r'\(:', Comment, 'comment'),
(r'{', Punctuation, 'root'),
(r'(\))([*+?]?)', popstate_kindtest_callback),
(r'\*', Name, 'closekindtest'),
(qname, Name, 'closekindtest'),
(r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
],
'kindtestforpi': [
(r'\(:', Comment, 'comment'),
(r'\)', Punctuation, '#pop'),
(ncname, Name.Variable),
(stringdouble, String.Double),
(stringsingle, String.Single),
],
'closekindtest': [
(r'\(:', Comment, 'comment'),
(r'(\))', popstate_callback),
(r',', Punctuation),
(r'(\{)', pushstate_operator_root_callback),
(r'\?', Punctuation),
],
'xml_comment': [
(r'(-->)', popstate_xmlcomment_callback),
(r'[^-]{1,2}', Literal),
(ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
unirange(0x10000, 0x10ffff), Literal),
],
'processing_instruction': [
(r'\s+', Text, 'processing_instruction_content'),
(r'\?>', String.Doc, '#pop'),
(pitarget, Name),
],
'processing_instruction_content': [
(r'\?>', String.Doc, '#pop'),
(ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
unirange(0x10000, 0x10ffff), Literal),
],
'cdata_section': [
(r']]>', String.Doc, '#pop'),
(ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
unirange(0x10000, 0x10ffff), Literal),
],
'start_tag': [
include('whitespace'),
(r'(/>)', popstate_tag_callback),
(r'>', Name.Tag, 'element_content'),
(r'"', Punctuation, 'quot_attribute_content'),
(r"'", Punctuation, 'apos_attribute_content'),
(r'=', Operator),
(qname, Name.Tag),
],
'quot_attribute_content': [
(r'"', Punctuation, 'start_tag'),
(r'(\{)', pushstate_root_callback),
(r'""', Name.Attribute),
(quotattrcontentchar, Name.Attribute),
(entityref, Name.Attribute),
(charref, Name.Attribute),
(r'\{\{|\}\}', Name.Attribute),
],
'apos_attribute_content': [
(r"'", Punctuation, 'start_tag'),
(r'\{', Punctuation, 'root'),
(r"''", Name.Attribute),
(aposattrcontentchar, Name.Attribute),
(entityref, Name.Attribute),
(charref, Name.Attribute),
(r'\{\{|\}\}', Name.Attribute),
],
'element_content': [
(r'</', Name.Tag, 'end_tag'),
(r'(\{)', pushstate_root_callback),
(r'(<!--)', pushstate_element_content_xmlcomment_callback),
(r'(<\?)', pushstate_element_content_processing_instruction_callback),
(r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
(r'(<)', pushstate_element_content_starttag_callback),
(elementcontentchar, Literal),
(entityref, Literal),
(charref, Literal),
(r'\{\{|\}\}', Literal),
],
'end_tag': [
include('whitespace'),
(r'(>)', popstate_tag_callback),
(qname, Name.Tag),
],
'xmlspace_decl': [
(r'\(:', Comment, 'comment'),
(r'preserve|strip', Keyword, '#pop'),
],
'declareordering': [
(r'\(:', Comment, 'comment'),
include('whitespace'),
(r'ordered|unordered', Keyword, '#pop'),
],
'xqueryversion': [
include('whitespace'),
(r'\(:', Comment, 'comment'),
(stringdouble, String.Double),
(stringsingle, String.Single),
(r'encoding', Keyword),
(r';', Punctuation, '#pop'),
],
'pragma': [
(qname, Name.Variable, 'pragmacontents'),
],
'pragmacontents': [
(r'#\)', Punctuation, 'operator'),
(ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
unirange(0x10000, 0x10ffff), Literal),
(r'(\s+)', Text),
],
'occurrenceindicator': [
include('whitespace'),
(r'\(:', Comment, 'comment'),
(r'\*|\?|\+', Operator, 'operator'),
(r':=', Operator, 'root'),
(r'', Text, 'operator'),
],
'option': [
include('whitespace'),
(qname, Name.Variable, '#pop'),
],
'qname_braren': [
include('whitespace'),
(r'(\{)', pushstate_operator_root_callback),
(r'(\()', Punctuation, 'root'),
],
'element_qname': [
(qname, Name.Variable, 'root'),
],
'attribute_qname': [
(qname, Name.Variable, 'root'),
],
'root': [
include('whitespace'),
(r'\(:', Comment, 'comment'),
# handle operator state
# order on numbers matters - handle most complex first
(r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Double, 'operator'),
(r'(\.\d+)[eE][\+\-]?\d+', Number.Double, 'operator'),
(r'(\.\d+|\d+\.\d*)', Number, 'operator'),
(r'(\d+)', Number.Integer, 'operator'),
(r'(\.\.|\.|\))', Punctuation, 'operator'),
(r'(declare)(\s+)(construction)',
bygroups(Keyword, Text, Keyword), 'operator'),
(r'(declare)(\s+)(default)(\s+)(order)',
bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
(ncname + ':\*', Name, 'operator'),
('\*:'+ncname, Name.Tag, 'operator'),
('\*', Name.Tag, 'operator'),
(stringdouble, String.Double, 'operator'),
(stringsingle, String.Single, 'operator'),
(r'(\})', popstate_callback),
#NAMESPACE DECL
(r'(declare)(\s+)(default)(\s+)(collation)',
bygroups(Keyword, Text, Keyword, Text, Keyword)),
(r'(module|declare)(\s+)(namespace)',
bygroups(Keyword, Text, Keyword), 'namespacedecl'),
(r'(declare)(\s+)(base-uri)',
bygroups(Keyword, Text, Keyword), 'namespacedecl'),
#NAMESPACE KEYWORD
(r'(declare)(\s+)(default)(\s+)(element|function)',
bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
(r'(import)(\s+)(schema|module)',
bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
(r'(declare)(\s+)(copy-namespaces)',
bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
#VARNAMEs
(r'(for|let|some|every)(\s+)(\$)',
bygroups(Keyword, Text, Name.Variable), 'varname'),
(r'\$', Name.Variable, 'varname'),
(r'(declare)(\s+)(variable)(\s+)(\$)',
bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
#ITEMTYPE
(r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
(r'(element|attribute|schema-element|schema-attribute|comment|'
r'text|node|document-node|empty-sequence)(\s+)(\()',
pushstate_operator_kindtest_callback),
(r'(processing-instruction)(\s+)(\()',
pushstate_operator_kindtestforpi_callback),
(r'(<!--)', pushstate_operator_xmlcomment_callback),
(r'(<\?)', pushstate_operator_processing_instruction_callback),
(r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
# (r'</', Name.Tag, 'end_tag'),
(r'(<)', pushstate_operator_starttag_callback),
(r'(declare)(\s+)(boundary-space)',
bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
(r'(validate)(\s+)(lax|strict)',
pushstate_operator_root_validate_withmode),
(r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
(r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
(r'(element|attribute)(\s*)(\{)',
pushstate_operator_root_construct_callback),
(r'(document|text|processing-instruction|comment)(\s*)(\{)',
pushstate_operator_root_construct_callback),
#ATTRIBUTE
(r'(attribute)(\s+)(?=' + qname + r')',
bygroups(Keyword, Text), 'attribute_qname'),
#ELEMENT
(r'(element)(\s+)(?=' +qname+ r')',
bygroups(Keyword, Text), 'element_qname'),
#PROCESSING_INSTRUCTION
(r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)',
bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
'operator'),
(r'(declare|define)(\s+)(function)',
bygroups(Keyword, Text, Keyword)),
(r'(\{)', pushstate_operator_root_callback),
(r'(unordered|ordered)(\s*)(\{)',
pushstate_operator_order_callback),
(r'(declare)(\s+)(ordering)',
bygroups(Keyword, Text, Keyword), 'declareordering'),
(r'(xquery)(\s+)(version)',
bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
(r'(\(#)', Punctuation, 'pragma'),
# sometimes return can occur in root state
(r'return', Keyword),
(r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
'option'),
#URI LITERALS - single and double quoted
(r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
(r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
(r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
bygroups(Keyword, Punctuation)),
(r'(descendant|following-sibling|following|parent|preceding-sibling'
r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
(r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
(r'then|else', Keyword),
# ML specific
(r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
(r'(catch)(\s*)(\()(\$)',
bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
(r'(@'+qname+')', Name.Attribute),
(r'(@'+ncname+')', Name.Attribute),
(r'@\*:'+ncname, Name.Attribute),
(r'(@)', Name.Attribute),
(r'//|/|\+|-|;|,|\(|\)', Punctuation),
# STANDALONE QNAMES
(qname + r'(?=\s*{)', Name.Tag, 'qname_braren'),
(qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
(qname, Name.Tag, 'operator'),
]
}
class DartLexer(RegexLexer):
"""
For `Dart <http://dartlang.org/>`_ source code.
*New in Pygments 1.5.*
"""
name = 'Dart'
aliases = ['dart']
filenames = ['*.dart']
mimetypes = ['text/x-dart']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
include('string_literal'),
(r'#!(.*?)$', Comment.Preproc),
(r'\b(import|export)\b', Keyword, 'import_decl'),
(r'\b(library|source|part of|part)\b', Keyword),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'\b(class)\b(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
(r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
Keyword),
(r'\b(abstract|const|extends|factory|final|get|implements|'
r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
(r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
(r'\b(false|null|true)\b', Keyword.Constant),
(r'[~!%^&*+=|?:<>/-]|as', Operator),
(r'[a-zA-Z_$][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
(r'[(){}\[\],.;]', Punctuation),
(r'0[xX][0-9a-fA-F]+', Number.Hex),
# DIGIT+ (‘.’ DIGIT*)? EXPONENT?
(r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
(r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
(r'\n', Text)
# pseudo-keyword negate intentionally left out
],
'class': [
(r'[a-zA-Z_$][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import_decl': [
include('string_literal'),
(r'\s+', Text),
(r'\b(as|show|hide)\b', Keyword),
(r'[a-zA-Z_$][a-zA-Z0-9_]*', Name),
(r'\,', Punctuation),
(r'\;', Punctuation, '#pop')
],
'string_literal': [
# Raw strings.
(r'r"""([\s|\S]*?)"""', String.Double),
(r"r'''([\s|\S]*?)'''", String.Single),
(r'r"(.*?)"', String.Double),
(r"r'(.*?)'", String.Single),
# Normal Strings.
(r'"""', String.Double, 'string_double_multiline'),
(r"'''", String.Single, 'string_single_multiline'),
(r'"', String.Double, 'string_double'),
(r"'", String.Single, 'string_single')
],
'string_common': [
(r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z\'\"$\\])",
String.Escape),
(r'(\$)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(String.Interpol, Name)),
(r'(\$\{)(.*?)(\})',
bygroups(String.Interpol, using(this), String.Interpol))
],
'string_double': [
(r'"', String.Double, '#pop'),
(r'[^\"$\\\n]+', String.Double),
include('string_common'),
(r'\$+', String.Double)
],
'string_double_multiline': [
(r'"""', String.Double, '#pop'),
(r'[^\"$\\]+', String.Double),
include('string_common'),
(r'(\$|\")+', String.Double)
],
'string_single': [
(r"'", String.Single, '#pop'),
(r"[^\'$\\\n]+", String.Single),
include('string_common'),
(r'\$+', String.Single)
],
'string_single_multiline': [
(r"'''", String.Single, '#pop'),
(r'[^\'$\\]+', String.Single),
include('string_common'),
(r'(\$|\')+', String.Single)
]
}
class TypeScriptLexer(RegexLexer):
"""
For `TypeScript <http://www.python.org>`_ source code.
*New in Pygments 1.6.*
"""
name = 'TypeScript'
aliases = ['ts']
filenames = ['*.ts']
mimetypes = ['text/x-typescript']
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'<!--', Comment),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
(r'', Text, '#pop')
],
'badregex': [
(r'\n', Text, '#pop')
],
'root': [
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
r'this)\b', Keyword, 'slashstartsregex'),
(r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
(r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
r'extends|final|float|goto|implements|import|int|interface|long|native|'
r'package|private|protected|public|short|static|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Reserved),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
r'window)\b', Name.Builtin),
# Match stuff like: module name {...}
(r'\b(module)(\s*)(\s*[a-zA-Z0-9_?.$][\w?.$]*)(\s*)',
bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
# Match variable type keywords
(r'\b(string|bool|number)\b', Keyword.Type),
# Match stuff like: constructor
(r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
# Match stuff like: super(argument, list)
(r'(super)(\s*)(\([a-zA-Z0-9,_?.$\s]+\s*\))',
bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
# Match stuff like: function() {...}
(r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
# Match stuff like: (function: return type)
(r'([a-zA-Z0-9_?.$][\w?.$]*)(\s*:\s*)([a-zA-Z0-9_?.$][\w?.$]*)',
bygroups(Name.Other, Text, Keyword.Type)),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class LassoLexer(RegexLexer):
"""
For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
HTML, use the `LassoHtmlLexer`.
Additional options accepted:
`builtinshighlighting`
If given and ``True``, highlight builtin types, traits, methods, and
members (default: ``True``).
`requiredelimiters`
If given and ``True``, only highlight code between delimiters as Lasso
(default: ``False``).
*New in Pygments 1.6.*
"""
name = 'Lasso'
aliases = ['lasso', 'lassoscript']
filenames = ['*.lasso', '*.lasso[89]']
alias_filenames = ['*.incl', '*.inc', '*.las']
mimetypes = ['text/x-lasso']
flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'^#!.+lasso9\b', Comment.Preproc, 'lasso'),
(r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
(r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
(r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
(r'<\?(LassoScript|lasso|=)', Comment.Preproc,
('delimiters', 'anglebrackets')),
(r'<', Other, 'delimiters'),
(r'\s+', Other),
(r'', Other, ('delimiters', 'lassofile')),
],
'delimiters': [
(r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
(r'\[noprocess\]', Comment.Preproc, 'noprocess'),
(r'\[', Comment.Preproc, 'squarebrackets'),
(r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
(r'<', Other),
(r'[^[<]+', Other),
],
'nosquarebrackets': [
(r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
(r'<', Other),
(r'[^<]+', Other),
],
'noprocess': [
(r'\[/noprocess\]', Comment.Preproc, '#pop'),
(r'\[', Other),
(r'[^[]', Other),
],
'squarebrackets': [
(r'\]', Comment.Preproc, '#pop'),
include('lasso'),
],
'anglebrackets': [
(r'\?>', Comment.Preproc, '#pop'),
include('lasso'),
],
'lassofile': [
(r'\]', Comment.Preproc, '#pop'),
(r'\?>', Comment.Preproc, '#pop'),
include('lasso'),
],
'whitespacecomments': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*\*!.*?\*/', String.Doc),
(r'/\*.*?\*/', Comment.Multiline),
],
'lasso': [
# whitespace/comments
include('whitespacecomments'),
# literals
(r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
(r'0x[\da-f]+', Number.Hex),
(r'\d+', Number.Integer),
(r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)),
(r"'", String.Single, 'singlestring'),
(r'"', String.Double, 'doublestring'),
(r'`[^`]*`', String.Backtick),
# names
(r'\$[a-z_][\w.]*', Name.Variable),
(r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
(r"(\.)('[a-z_][\w.]*')",
bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
(r"(self)(\s*->\s*)('[a-z_][\w.]*')",
bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
(r'(\.\.?)([a-z_][\w.]*)',
bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
(r'(->\\?\s*|&\s*)([a-z_][\w.]*)',
bygroups(Operator, Name.Other.Member)),
(r'(self|inherited|global|void)\b', Name.Builtin.Pseudo),
(r'-[a-z_][\w.]*', Name.Attribute),
(r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
(r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
r'Error_InvalidDatabase|Error_InvalidPassword|'
r'Error_InvalidUsername|Error_ModuleNotFound|'
r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
r'Error_UpdateError)\b', Name.Exception),
# definitions
(r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
(r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%<>]|==)',
bygroups(Keyword.Declaration, Text, Name.Class, Operator,
Name.Function), 'signature'),
(r'(define)(\s+)([a-z_][\w.]*)',
bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
(r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|'
r'[-+*/%<>]|==)(?=\s*\())', bygroups(Keyword, Text, Name.Function),
'signature'),
(r'(public|protected|private)(\s+)([a-z_][\w.]*)',
bygroups(Keyword, Text, Name.Function)),
# keywords
(r'(true|false|none|minimal|full|all)\b', Keyword.Constant),
(r'(local|var|variable|data(?=\s))\b', Keyword.Declaration),
(r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
r'null|list|queue|set|stack|staticarray)\b', Keyword.Type),
(r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
(r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
(r'require\b', Keyword, 'requiresection'),
(r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
(r'(/?)(Cache|Database_Names|Database_SchemaNames|'
r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
r'Tag_Name|ascending|average|by|define|descending|do|equals|'
r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
r'min|on|order|parent|protected|provide|public|require|skip|'
r'split_thread|sum|take|thread|to|trait|type|where|with|yield)\b',
bygroups(Punctuation, Keyword)),
# other
(r',', Punctuation, 'commamember'),
(r'(and|or|not)\b', Operator.Word),
(r'([a-z_][\w.]*)(\s*::\s*)?([a-z_][\w.]*)?(\s*=(?!=))',
bygroups(Name, Punctuation, Name.Label, Operator)),
(r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
(r'(=)(bw|ew|cn|lte?|gte?|n?eq|ft|n?rx)\b',
bygroups(Operator, Operator.Word)),
(r':=|[-+*/%=<>&|!?\\]+', Operator),
(r'[{}():;,@^]', Punctuation),
],
'singlestring': [
(r"'", String.Single, '#pop'),
(r"[^'\\]+", String.Single),
include('escape'),
(r"\\+", String.Single),
],
'doublestring': [
(r'"', String.Double, '#pop'),
(r'[^"\\]+', String.Double),
include('escape'),
(r'\\+', String.Double),
],
'escape': [
(r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|'
r'[abefnrtv?\"\'\\]|$)', String.Escape),
],
'signature': [
(r'=>', Operator, '#pop'),
(r'\)', Punctuation, '#pop'),
(r'[(,]', Punctuation, 'parameter'),
include('lasso'),
],
'parameter': [
(r'\)', Punctuation, '#pop'),
(r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
(r'\.\.\.', Name.Builtin.Pseudo),
include('lasso'),
],
'requiresection': [
(r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=\s*\())', Name, 'requiresignature'),
(r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=(\s*::\s*[\w.]+)?\s*,))', Name),
(r'[a-z_][\w.]*=?|[-+*/%<>]|==', Name, '#pop'),
(r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
(r',', Punctuation),
include('whitespacecomments'),
],
'requiresignature': [
(r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
(r'\)', Punctuation, '#pop:2'),
(r'-?[a-z_][\w.]*', Name.Attribute),
(r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)),
(r'\.\.\.', Name.Builtin.Pseudo),
(r'[(,]', Punctuation),
include('whitespacecomments'),
],
'commamember': [
(r'(([a-z_][\w.]*=?|[-+*/%<>]|==)'
r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
Name.Function, 'signature'),
include('whitespacecomments'),
(r'', Text, '#pop'),
],
}
def __init__(self, **options):
self.builtinshighlighting = get_bool_opt(
options, 'builtinshighlighting', True)
self.requiredelimiters = get_bool_opt(
options, 'requiredelimiters', False)
self._builtins = set()
self._members = set()
if self.builtinshighlighting:
from pygments.lexers._lassobuiltins import BUILTINS, MEMBERS
for key, value in BUILTINS.iteritems():
self._builtins.update(value)
for key, value in MEMBERS.iteritems():
self._members.update(value)
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
stack = ['root']
if self.requiredelimiters:
stack.append('delimiters')
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text, stack):
if (token is Name.Other and value.lower() in self._builtins or
token is Name.Other.Member and value.lower() in self._members):
yield index, Name.Builtin, value
continue
yield index, token, value
def analyse_text(text):
rv = 0.0
if 'bin/lasso9' in text:
rv += 0.8
if re.search(r'<\?(=|lasso)|\A\[', text, re.I):
rv += 0.4
if re.search(r'local\(', text, re.I):
rv += 0.4
if '?>' in text:
rv += 0.1
return rv
class QmlLexer(RegexLexer):
"""
For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
*New in Pygments 1.6.*
"""
# QML is based on javascript, so much of this is taken from the
# JavascriptLexer above.
name = 'QML'
aliases = ['qml', 'Qt Meta Language', 'Qt modeling Language']
filenames = ['*.qml',]
mimetypes = [ 'application/x-qml',]
# pasted from JavascriptLexer, with some additions
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'<!--', Comment),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
(r'', Text, '#pop')
],
'badregex': [
(r'\n', Text, '#pop')
],
'root' : [
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
# QML insertions
(r'\bid\s*:\s*[A-Za-z][_A-Za-z.0-9]*',Keyword.Declaration,
'slashstartsregex'),
(r'\b[A-Za-z][_A-Za-z.0-9]*\s*:',Keyword, 'slashstartsregex'),
# the rest from JavascriptLexer
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
r'this)\b', Keyword, 'slashstartsregex'),
(r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
(r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
r'extends|final|float|goto|implements|import|int|interface|long|native|'
r'package|private|protected|public|short|static|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Reserved),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
r'window)\b', Name.Builtin),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
|
mrfuxi/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_complex/1_auto.py
|
1155
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
operations = [
migrations.RunPython(migrations.RunPython.noop)
]
|
garbled1/ansible
|
refs/heads/devel
|
lib/ansible/plugins/lookup/mongodb.py
|
19
|
# (c) 2016, Marcos Diez <marcos@unitron.com.br>
# https://github.com/marcosdiez/
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
from ansible.module_utils.six import string_types, integer_types
__metaclass__ = type
DOCUMENTATION = '''
author: 'Marcos Diez <marcos (at) unitron.com.br>'
lookup: mongodb
version_added: "2.3"
short_description: lookup info from MongoDB
description:
- 'The ``MongoDB`` lookup runs the *find()* command on a given *collection* on a given *MongoDB* server.'
- 'The result is a list of jsons, so slightly different from what PyMongo returns. In particular, *timestamps* are converted to epoch integers.'
options:
connect_string:
description:
- Can be any valid MongoDB connection string, supporting authentication, replicasets, etc.
- "More info at https://docs.mongodb.org/manual/reference/connection-string/"
default: "mongodb://localhost/"
database:
description:
- Name of the database which the query will be made
required: True
collection:
description:
- Name of the collection which the query will be made
required: True
filter:
description:
- Criteria of the output
type: 'dict'
default: '{}'
projection:
description:
- Fields you want returned
type: dict
default: "{}"
skip:
description:
- How many results should be skept
type: integer
limit:
description:
- How many results should be shown
type: integer
sort:
description:
- Sorting rules. Please notice the constats are replaced by strings.
type: list
default: "[]"
notes:
- "Please check https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find for more detais."
requirements:
- pymongo >= 2.4 (python library)
'''
EXAMPLES = '''
- hosts: all
gather_facts: false
vars:
mongodb_parameters:
#mandatory parameters
database: 'local'
#optional
collection: "startup_log"
connection_string: "mongodb://localhost/"
extra_connection_parameters: { "ssl" : True , "ssl_certfile": /etc/self_signed_certificate.pem" }
#optional query parameters, we accept any parameter from the normal mongodb query.
filter: { "hostname": "batman" }
projection: { "pid": True , "_id" : False , "hostname" : True }
skip: 0
limit: 1
sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ]
tasks:
- debug: msg="Mongo has already started with the following PID [{{ item.pid }}]"
with_mongodb: "{{mongodb_parameters}}"
'''
import datetime
try:
from pymongo import ASCENDING, DESCENDING
from pymongo.errors import ConnectionFailure
from pymongo import MongoClient
except ImportError:
try: # for older PyMongo 2.2
from pymongo import Connection as MongoClient
except ImportError:
pymongo_found = False
else:
pymongo_found = True
else:
pymongo_found = True
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def _fix_sort_parameter(self, sort_parameter):
if sort_parameter is None:
return sort_parameter
if not isinstance(sort_parameter, list):
raise AnsibleError(u"Error. Sort parameters must be a list, not [ {} ]".format(sort_parameter))
for item in sort_parameter:
self._convert_sort_string_to_constant(item)
return sort_parameter
def _convert_sort_string_to_constant(self, item):
original_sort_order = item[1]
sort_order = original_sort_order.upper()
if sort_order == u"ASCENDING":
item[1] = ASCENDING
elif sort_order == u"DESCENDING":
item[1] = DESCENDING
# else the user knows what s/he is doing and we won't predict. PyMongo will return an error if necessary
def convert_mongo_result_to_valid_json(self, result):
if result is None:
return result
if isinstance(result, integer_types + (float, bool)):
return result
if isinstance(result, string_types):
return result
elif isinstance(result, list):
new_list = []
for elem in result:
new_list.append(self.convert_mongo_result_to_valid_json(elem))
return new_list
elif isinstance(result, dict):
new_dict = {}
for key in result.keys():
value = result[key] # python2 and 3 compatible....
new_dict[key] = self.convert_mongo_result_to_valid_json(value)
return new_dict
elif isinstance(result, datetime.datetime):
# epoch
return (result - datetime.datetime(1970, 1, 1)). total_seconds()
else:
# failsafe
return u"{}".format(result)
def run(self, terms, variables, **kwargs):
ret = []
for term in terms:
u'''
Makes a MongoDB query and returns the output as a valid list of json.
Timestamps are converted to epoch integers/longs.
Here is a sample playbook that uses it:
-------------------------------------------------------------------------------
- hosts: all
gather_facts: false
vars:
mongodb_parameters:
#optional parameter, default = "mongodb://localhost/"
# connection_string: "mongodb://localhost/"
#mandatory parameters
database: 'local'
collection: "startup_log"
#optional query parameters
#we accept any parameter from the normal mongodb query.
# the official documentation is here
# https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find
# filter: { "hostname": "batman" }
# projection: { "pid": True , "_id" : False , "hostname" : True }
# skip: 0
# limit: 1
# sort: [ [ "startTime" , "ASCENDING" ] , [ "age", "DESCENDING" ] ]
# extra_connection_parameters = { }
# dictionary with extra parameters like ssl, ssl_keyfile, maxPoolSize etc...
# the full list is available here. It varies from PyMongo version
# https://api.mongodb.org/python/current/api/pymongo/mongo_client.html#pymongo.mongo_client.MongoClient
tasks:
- debug: msg="Mongo has already started with the following PID [{{ item.pid }}] - full_data {{ item }} "
with_items:
- "{{ lookup('mongodb', mongodb_parameters) }}"
-------------------------------------------------------------------------------
'''
connection_string = term.get(u'connection_string', u"mongodb://localhost")
database = term[u"database"]
collection = term[u'collection']
extra_connection_parameters = term.get(u'extra_connection_parameters', {})
if u"extra_connection_parameters" in term:
del term[u"extra_connection_parameters"]
if u"connection_string" in term:
del term[u"connection_string"]
del term[u"database"]
del term[u"collection"]
if u"sort" in term:
term[u"sort"] = self._fix_sort_parameter(term[u"sort"])
# all other parameters are sent to mongo, so we are future and past proof
try:
client = MongoClient(connection_string, **extra_connection_parameters)
results = client[database][collection].find(**term)
for result in results:
result = self.convert_mongo_result_to_valid_json(result)
ret.append(result)
except ConnectionFailure as e:
raise AnsibleError(u'unable to connect to database: %s' % str(e))
return ret
|
jdramani/servo
|
refs/heads/master
|
components/script/dom/bindings/codegen/parser/tests/test_dictionary.py
|
134
|
def WebIDLTest(parser, harness):
parser.parse("""
dictionary Dict2 : Dict1 {
long child = 5;
Dict1 aaandAnother;
};
dictionary Dict1 {
long parent;
double otherParent;
};
""")
results = parser.finish()
dict1 = results[1];
dict2 = results[0];
harness.check(len(dict1.members), 2, "Dict1 has two members")
harness.check(len(dict2.members), 2, "Dict2 has four members")
harness.check(dict1.members[0].identifier.name, "otherParent",
"'o' comes before 'p'")
harness.check(dict1.members[1].identifier.name, "parent",
"'o' really comes before 'p'")
harness.check(dict2.members[0].identifier.name, "aaandAnother",
"'a' comes before 'c'")
harness.check(dict2.members[1].identifier.name, "child",
"'a' really comes before 'c'")
# Now reset our parser
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary Dict {
long prop = 5;
long prop;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow name duplication in a dictionary")
# Now reset our parser again
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary Dict1 : Dict2 {
long prop = 5;
};
dictionary Dict2 : Dict3 {
long prop2;
};
dictionary Dict3 {
double prop;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow name duplication in a dictionary and "
"its ancestor")
# More reset
parser = parser.reset()
threw = False
try:
parser.parse("""
interface Iface {};
dictionary Dict : Iface {
long prop;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow non-dictionary parents for dictionaries")
# Even more reset
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A : B {};
dictionary B : A {};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow cycles in dictionary inheritance chains")
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
[TreatNullAs=EmptyString] DOMString foo;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow [TreatNullAs] on dictionary members");
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
[TreatUndefinedAs=EmptyString] DOMString foo;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should not allow [TreatUndefinedAs] on dictionary members");
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
};
interface X {
void doFoo(A arg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Trailing dictionary arg must be optional")
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
};
interface X {
void doFoo(A arg1, optional long arg2);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Dictionary arg followed by optional arg must be optional")
parser = parser.reset()
parser.parse("""
dictionary A {
};
interface X {
void doFoo(A arg1, long arg2);
};
""")
results = parser.finish()
harness.ok(True, "Dictionary arg followed by required arg can be required")
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
};
interface X {
void doFoo(optional A? arg1);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Dictionary arg must not be nullable")
parser = parser.reset()
threw = False
try:
parser.parse("""
dictionary A {
};
interface X {
void doFoo((A or long)? arg1);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Dictionary arg must not be in a nullable union")
|
julot/sphinxcontrib-dd
|
refs/heads/master
|
tests/database-diagram/relationship.py
|
1
|
from sphinxcontrib.dd.database_diagram import Relationship
def test():
relations = {
'E1 |--| E2': (
'"E1" -> "E2" '
'[dir="both", arrowhead="nonetee", arrowtail="nonetee"]'
),
'E1 ||--|| E2': (
'"E1" -> "E2" '
'[dir="both", arrowhead="noneteetee", arrowtail="noneteetee"]'
),
'E1 |0--0| E2': (
'"E1" -> "E2" '
'[dir="both", arrowhead="noneteeodot", arrowtail="noneteeodot"]'
),
'E1 >--< E2': (
'"E1" -> "E2" '
'[dir="both", arrowhead="crow", arrowtail="crow"]'
),
'E1 >0--0< E2': (
'"E1" -> "E2" '
'[dir="both", arrowhead="crowodot", arrowtail="crowodot"]'
),
'E1 >|--|< E2': (
'"E1" -> "E2" '
'[dir="both", arrowhead="crowtee", arrowtail="crowtee"]'
),
}
for relation, dot in relations.items():
assert Relationship(relation).dot() == dot
|
ktan2020/legacy-automation
|
refs/heads/master
|
win/Lib/site-packages/nose-1.2.1-py2.7.egg/nose/plugins/base.py
|
8
|
import os
import textwrap
from optparse import OptionConflictError
from warnings import warn
from nose.util import tolist
class Plugin(object):
"""Base class for nose plugins. It's recommended but not *necessary* to
subclass this class to create a plugin, but all plugins *must* implement
`options(self, parser, env)` and `configure(self, options, conf)`, and
must have the attributes `enabled`, `name` and `score`. The `name`
attribute may contain hyphens ('-').
Plugins should not be enabled by default.
Subclassing Plugin (and calling the superclass methods in
__init__, configure, and options, if you override them) will give
your plugin some friendly default behavior:
* A --with-$name option will be added to the command line interface
to enable the plugin, and a corresponding environment variable
will be used as the default value. The plugin class's docstring
will be used as the help for this option.
* The plugin will not be enabled unless this option is selected by
the user.
"""
can_configure = False
enabled = False
enableOpt = None
name = None
score = 100
def __init__(self):
if self.name is None:
self.name = self.__class__.__name__.lower()
if self.enableOpt is None:
self.enableOpt = "enable_plugin_%s" % self.name.replace('-', '_')
def addOptions(self, parser, env=None):
"""Add command-line options for this plugin.
The base plugin class adds --with-$name by default, used to enable the
plugin.
.. warning :: Don't implement addOptions unless you want to override
all default option handling behavior, including
warnings for conflicting options. Implement
:meth:`options
<nose.plugins.base.IPluginInterface.options>`
instead.
"""
self.add_options(parser, env)
def add_options(self, parser, env=None):
"""Non-camel-case version of func name for backwards compatibility.
.. warning ::
DEPRECATED: Do not use this method,
use :meth:`options <nose.plugins.base.IPluginInterface.options>`
instead.
"""
# FIXME raise deprecation warning if wasn't called by wrapper
if env is None:
env = os.environ
try:
self.options(parser, env)
self.can_configure = True
except OptionConflictError, e:
warn("Plugin %s has conflicting option string: %s and will "
"be disabled" % (self, e), RuntimeWarning)
self.enabled = False
self.can_configure = False
def options(self, parser, env):
"""Register commandline options.
Implement this method for normal options behavior with protection from
OptionConflictErrors. If you override this method and want the default
--with-$name option to be registered, be sure to call super().
"""
env_opt = 'NOSE_WITH_%s' % self.name.upper()
env_opt = env_opt.replace('-', '_')
parser.add_option("--with-%s" % self.name,
action="store_true",
dest=self.enableOpt,
default=env.get(env_opt),
help="Enable plugin %s: %s [%s]" %
(self.__class__.__name__, self.help(), env_opt))
def configure(self, options, conf):
"""Configure the plugin and system, based on selected options.
The base plugin class sets the plugin to enabled if the enable option
for the plugin (self.enableOpt) is true.
"""
if not self.can_configure:
return
self.conf = conf
if hasattr(options, self.enableOpt):
self.enabled = getattr(options, self.enableOpt)
def help(self):
"""Return help for this plugin. This will be output as the help
section of the --with-$name option that enables the plugin.
"""
if self.__class__.__doc__:
# doc sections are often indented; compress the spaces
return textwrap.dedent(self.__class__.__doc__)
return "(no help available)"
# Compatiblity shim
def tolist(self, val):
warn("Plugin.tolist is deprecated. Use nose.util.tolist instead",
DeprecationWarning)
return tolist(val)
class IPluginInterface(object):
"""
IPluginInterface describes the plugin API. Do not subclass or use this
class directly.
"""
def __new__(cls, *arg, **kw):
raise TypeError("IPluginInterface class is for documentation only")
def addOptions(self, parser, env):
"""Called to allow plugin to register command-line options with the
parser. DO NOT return a value from this method unless you want to stop
all other plugins from setting their options.
.. warning ::
DEPRECATED -- implement
:meth:`options <nose.plugins.base.IPluginInterface.options>` instead.
"""
pass
add_options = addOptions
add_options.deprecated = True
def addDeprecated(self, test):
"""Called when a deprecated test is seen. DO NOT return a value
unless you want to stop other plugins from seeing the deprecated
test.
.. warning :: DEPRECATED -- check error class in addError instead
"""
pass
addDeprecated.deprecated = True
def addError(self, test, err):
"""Called when a test raises an uncaught exception. DO NOT return a
value unless you want to stop other plugins from seeing that the
test has raised an error.
:param test: the test case
:type test: :class:`nose.case.Test`
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
addError.changed = True
def addFailure(self, test, err):
"""Called when a test fails. DO NOT return a value unless you
want to stop other plugins from seeing that the test has failed.
:param test: the test case
:type test: :class:`nose.case.Test`
:param err: 3-tuple
:type err: sys.exc_info() tuple
"""
pass
addFailure.changed = True
def addSkip(self, test):
"""Called when a test is skipped. DO NOT return a value unless
you want to stop other plugins from seeing the skipped test.
.. warning:: DEPRECATED -- check error class in addError instead
"""
pass
addSkip.deprecated = True
def addSuccess(self, test):
"""Called when a test passes. DO NOT return a value unless you
want to stop other plugins from seeing the passing test.
:param test: the test case
:type test: :class:`nose.case.Test`
"""
pass
addSuccess.changed = True
def afterContext(self):
"""Called after a context (generally a module) has been
lazy-loaded, imported, setup, had its tests loaded and
executed, and torn down.
"""
pass
afterContext._new = True
def afterDirectory(self, path):
"""Called after all tests have been loaded from directory at path
and run.
:param path: the directory that has finished processing
:type path: string
"""
pass
afterDirectory._new = True
def afterImport(self, filename, module):
"""Called after module is imported from filename. afterImport
is called even if the import failed.
:param filename: The file that was loaded
:type filename: string
:param filename: The name of the module
:type module: string
"""
pass
afterImport._new = True
def afterTest(self, test):
"""Called after the test has been run and the result recorded
(after stopTest).
:param test: the test case
:type test: :class:`nose.case.Test`
"""
pass
afterTest._new = True
def beforeContext(self):
"""Called before a context (generally a module) is
examined. Because the context is not yet loaded, plugins don't
get to know what the context is; so any context operations
should use a stack that is pushed in `beforeContext` and popped
in `afterContext` to ensure they operate symmetrically.
`beforeContext` and `afterContext` are mainly useful for tracking
and restoring global state around possible changes from within a
context, whatever the context may be. If you need to operate on
contexts themselves, see `startContext` and `stopContext`, which
are passed the context in question, but are called after
it has been loaded (imported in the module case).
"""
pass
beforeContext._new = True
def beforeDirectory(self, path):
"""Called before tests are loaded from directory at path.
:param path: the directory that is about to be processed
"""
pass
beforeDirectory._new = True
def beforeImport(self, filename, module):
"""Called before module is imported from filename.
:param filename: The file that will be loaded
:param module: The name of the module found in file
:type module: string
"""
beforeImport._new = True
def beforeTest(self, test):
"""Called before the test is run (before startTest).
:param test: the test case
:type test: :class:`nose.case.Test`
"""
pass
beforeTest._new = True
def begin(self):
"""Called before any tests are collected or run. Use this to
perform any setup needed before testing begins.
"""
pass
def configure(self, options, conf):
"""Called after the command line has been parsed, with the
parsed options and the config container. Here, implement any
config storage or changes to state or operation that are set
by command line options.
DO NOT return a value from this method unless you want to
stop all other plugins from being configured.
"""
pass
def finalize(self, result):
"""Called after all report output, including output from all
plugins, has been sent to the stream. Use this to print final
test results or perform final cleanup. Return None to allow
other plugins to continue printing, or any other value to stop
them.
:param result: test result object
.. Note:: When tests are run under a test runner other than
:class:`nose.core.TextTestRunner`, such as
via ``python setup.py test``, this method may be called
**before** the default report output is sent.
"""
pass
def describeTest(self, test):
"""Return a test description.
Called by :meth:`nose.case.Test.shortDescription`.
:param test: the test case
:type test: :class:`nose.case.Test`
"""
pass
describeTest._new = True
def formatError(self, test, err):
"""Called in result.addError, before plugin.addError. If you
want to replace or modify the error tuple, return a new error
tuple.
:param test: the test case
:type test: :class:`nose.case.Test`
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
formatError._new = True
formatError.chainable = True
# test arg is not chainable
formatError.static_args = (True, False)
def formatFailure(self, test, err):
"""Called in result.addFailure, before plugin.addFailure. If you
want to replace or modify the error tuple, return a new error
tuple. Because this method is chainable, you must return the
test as well, so you'll return something like::
return (test, err)
:param test: the test case
:type test: :class:`nose.case.Test`
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
formatFailure._new = True
formatFailure.chainable = True
# test arg is not chainable
formatFailure.static_args = (True, False)
def handleError(self, test, err):
"""Called on addError. To handle the error yourself and prevent normal
error processing, return a true value.
:param test: the test case
:type test: :class:`nose.case.Test`
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
handleError._new = True
def handleFailure(self, test, err):
"""Called on addFailure. To handle the failure yourself and
prevent normal failure processing, return a true value.
:param test: the test case
:type test: :class:`nose.case.Test`
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
handleFailure._new = True
def loadTestsFromDir(self, path):
"""Return iterable of tests from a directory. May be a
generator. Each item returned must be a runnable
unittest.TestCase (or subclass) instance or suite instance.
Return None if your plugin cannot collect any tests from
directory.
:param path: The path to the directory.
"""
pass
loadTestsFromDir.generative = True
loadTestsFromDir._new = True
def loadTestsFromModule(self, module, path=None):
"""Return iterable of tests in a module. May be a
generator. Each item returned must be a runnable
unittest.TestCase (or subclass) instance.
Return None if your plugin cannot
collect any tests from module.
:param module: The module object
:type module: python module
:param path: the path of the module to search, to distinguish from
namespace package modules
.. note::
NEW. The ``path`` parameter will only be passed by nose 0.11
or above.
"""
pass
loadTestsFromModule.generative = True
def loadTestsFromName(self, name, module=None, importPath=None):
"""Return tests in this file or module. Return None if you are not able
to load any tests, or an iterable if you are. May be a
generator.
:param name: The test name. May be a file or module name plus a test
callable. Use split_test_name to split into parts. Or it might
be some crazy name of your own devising, in which case, do
whatever you want.
:param module: Module from which the name is to be loaded
:param importPath: Path from which file (must be a python module) was
found
.. warning:: DEPRECATED: this argument will NOT be passed.
"""
pass
loadTestsFromName.generative = True
def loadTestsFromNames(self, names, module=None):
"""Return a tuple of (tests loaded, remaining names). Return
None if you are not able to load any tests. Multiple plugins
may implement loadTestsFromNames; the remaining name list from
each will be passed to the next as input.
:param names: List of test names.
:type names: iterable
:param module: Module from which the names are to be loaded
"""
pass
loadTestsFromNames._new = True
loadTestsFromNames.chainable = True
def loadTestsFromFile(self, filename):
"""Return tests in this file. Return None if you are not
interested in loading any tests, or an iterable if you are and
can load some. May be a generator. *If you are interested in
loading tests from the file and encounter no errors, but find
no tests, yield False or return [False].*
.. Note:: This method replaces loadTestsFromPath from the 0.9
API.
:param filename: The full path to the file or directory.
"""
pass
loadTestsFromFile.generative = True
loadTestsFromFile._new = True
def loadTestsFromPath(self, path):
"""
.. warning:: DEPRECATED -- use loadTestsFromFile instead
"""
pass
loadTestsFromPath.deprecated = True
def loadTestsFromTestCase(self, cls):
"""Return tests in this test case class. Return None if you are
not able to load any tests, or an iterable if you are. May be a
generator.
:param cls: The test case class. Must be subclass of
:class:`unittest.TestCase`.
"""
pass
loadTestsFromTestCase.generative = True
def loadTestsFromTestClass(self, cls):
"""Return tests in this test class. Class will *not* be a
unittest.TestCase subclass. Return None if you are not able to
load any tests, an iterable if you are. May be a generator.
:param cls: The test case class. Must be **not** be subclass of
:class:`unittest.TestCase`.
"""
pass
loadTestsFromTestClass._new = True
loadTestsFromTestClass.generative = True
def makeTest(self, obj, parent):
"""Given an object and its parent, return or yield one or more
test cases. Each test must be a unittest.TestCase (or subclass)
instance. This is called before default test loading to allow
plugins to load an alternate test case or cases for an
object. May be a generator.
:param obj: The object to be made into a test
:param parent: The parent of obj (eg, for a method, the class)
"""
pass
makeTest._new = True
makeTest.generative = True
def options(self, parser, env):
"""Called to allow plugin to register command line
options with the parser.
DO NOT return a value from this method unless you want to stop
all other plugins from setting their options.
:param parser: options parser instance
:type parser: :class:`ConfigParser.ConfigParser`
:param env: environment, default is os.environ
"""
pass
options._new = True
def prepareTest(self, test):
"""Called before the test is run by the test runner. Please
note the article *the* in the previous sentence: prepareTest
is called *only once*, and is passed the test case or test
suite that the test runner will execute. It is *not* called
for each individual test case. If you return a non-None value,
that return value will be run as the test. Use this hook to
wrap or decorate the test with another function. If you need
to modify or wrap individual test cases, use `prepareTestCase`
instead.
:param test: the test case
:type test: :class:`nose.case.Test`
"""
pass
def prepareTestCase(self, test):
"""Prepare or wrap an individual test case. Called before
execution of the test. The test passed here is a
nose.case.Test instance; the case to be executed is in the
test attribute of the passed case. To modify the test to be
run, you should return a callable that takes one argument (the
test result object) -- it is recommended that you *do not*
side-effect the nose.case.Test instance you have been passed.
Keep in mind that when you replace the test callable you are
replacing the run() method of the test case -- including the
exception handling and result calls, etc.
:param test: the test case
:type test: :class:`nose.case.Test`
"""
pass
prepareTestCase._new = True
def prepareTestLoader(self, loader):
"""Called before tests are loaded. To replace the test loader,
return a test loader. To allow other plugins to process the
test loader, return None. Only one plugin may replace the test
loader. Only valid when using nose.TestProgram.
:param loader: :class:`nose.loader.TestLoader`
(or other loader) instance
"""
pass
prepareTestLoader._new = True
def prepareTestResult(self, result):
"""Called before the first test is run. To use a different
test result handler for all tests than the given result,
return a test result handler. NOTE however that this handler
will only be seen by tests, that is, inside of the result
proxy system. The TestRunner and TestProgram -- whether nose's
or other -- will continue to see the original result
handler. For this reason, it is usually better to monkeypatch
the result (for instance, if you want to handle some
exceptions in a unique way). Only one plugin may replace the
result, but many may monkeypatch it. If you want to
monkeypatch and stop other plugins from doing so, monkeypatch
and return the patched result.
:param result: :class:`nose.result.TextTestResult`
(or other result) instance
"""
pass
prepareTestResult._new = True
def prepareTestRunner(self, runner):
"""Called before tests are run. To replace the test runner,
return a test runner. To allow other plugins to process the
test runner, return None. Only valid when using nose.TestProgram.
:param runner: :class:`nose.core.TextTestRunner`
(or other runner) instance
"""
pass
prepareTestRunner._new = True
def report(self, stream):
"""Called after all error output has been printed. Print your
plugin's report to the provided stream. Return None to allow
other plugins to print reports, any other value to stop them.
:param stream: stream object; send your output here
:type stream: file-like object
"""
pass
def setOutputStream(self, stream):
"""Called before test output begins. To direct test output to a
new stream, return a stream object, which must implement a
`write(msg)` method. If you only want to note the stream, not
capture or redirect it, then return None.
:param stream: stream object; send your output here
:type stream: file-like object
"""
def startContext(self, context):
"""Called before context setup and the running of tests in the
context. Note that tests have already been *loaded* from the
context before this call.
:param context: the context about to be setup. May be a module or
class, or any other object that contains tests.
"""
pass
startContext._new = True
def startTest(self, test):
"""Called before each test is run. DO NOT return a value unless
you want to stop other plugins from seeing the test start.
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
def stopContext(self, context):
"""Called after the tests in a context have run and the
context has been torn down.
:param context: the context about to be setup. May be a module or
class, or any other object that contains tests.
"""
pass
stopContext._new = True
def stopTest(self, test):
"""Called after each test is run. DO NOT return a value unless
you want to stop other plugins from seeing that the test has stopped.
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
def testName(self, test):
"""Return a short test name. Called by `nose.case.Test.__str__`.
:param err: sys.exc_info() tuple
:type err: 3-tuple
"""
pass
testName._new = True
def wantClass(self, cls):
"""Return true if you want the main test selector to collect
tests from this class, false if you don't, and None if you don't
care.
:param cls: The class being examined by the selector
"""
pass
def wantDirectory(self, dirname):
"""Return true if you want test collection to descend into this
directory, false if you do not, and None if you don't care.
:param dirname: Full path to directory being examined by the selector
"""
pass
def wantFile(self, file):
"""Return true if you want to collect tests from this file,
false if you do not and None if you don't care.
Change from 0.9: The optional package parameter is no longer passed.
:param file: Full path to file being examined by the selector
"""
pass
def wantFunction(self, function):
"""Return true to collect this function as a test, false to
prevent it from being collected, and None if you don't care.
:param function: The function object being examined by the selector
"""
pass
def wantMethod(self, method):
"""Return true to collect this method as a test, false to
prevent it from being collected, and None if you don't care.
:param method: The method object being examined by the selector
:type method: unbound method
"""
pass
def wantModule(self, module):
"""Return true if you want to collection to descend into this
module, false to prevent the collector from descending into the
module, and None if you don't care.
:param module: The module object being examined by the selector
:type module: python module
"""
pass
def wantModuleTests(self, module):
"""
.. warning:: DEPRECATED -- this method will not be called, it has
been folded into wantModule.
"""
pass
wantModuleTests.deprecated = True
|
rocopartners/django-oscar
|
refs/heads/master
|
src/oscar/apps/dashboard/reviews/views.py
|
23
|
import datetime
from django.views import generic
from django.conf import settings
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from oscar.views.generic import BulkEditMixin
from oscar.apps.dashboard.reviews import forms
from oscar.core.utils import format_datetime
from oscar.core.loading import get_model
from oscar.views import sort_queryset
ProductReview = get_model('reviews', 'productreview')
class ReviewListView(BulkEditMixin, generic.ListView):
model = ProductReview
template_name = 'dashboard/reviews/review_list.html'
context_object_name = 'review_list'
form_class = forms.ProductReviewSearchForm
review_form_class = forms.DashboardProductReviewForm
paginate_by = settings.OSCAR_DASHBOARD_ITEMS_PER_PAGE
actions = ('update_selected_review_status',)
checkbox_object_name = 'review'
desc_template = _("%(main_filter)s %(date_filter)s %(status_filter)s"
"%(kw_filter)s %(name_filter)s")
def get(self, request, *args, **kwargs):
response = super(self.__class__, self).get(request, **kwargs)
self.form = self.form_class()
return response
def get_date_from_to_queryset(self, date_from, date_to, queryset=None):
"""
Get a ``QuerySet`` of ``ProductReview`` items that match the time
frame specified by *date_from* and *date_to*. Both parameters are
expected to be in ``datetime`` format with *date_from* < *date_to*.
If *queryset* is specified, it will be filtered according to the
given dates. Otherwise, a new queryset for all ``ProductReview``
items is created.
"""
if not queryset:
self.model.objects.all()
if date_from and date_to:
# Add 24 hours to make search inclusive
date_to = date_to + datetime.timedelta(days=1)
queryset = queryset.filter(
date_created__gte=date_from
).filter(
date_created__lt=date_to
)
self.desc_ctx['date_filter'] \
= _(" created between %(start_date)s and %(end_date)s") % {
'start_date': format_datetime(date_from),
'end_date': format_datetime(date_to)}
elif date_from:
queryset = queryset.filter(date_created__gte=date_from)
self.desc_ctx['date_filter'] \
= _(" created after %s") % format_datetime(date_from)
elif date_to:
# Add 24 hours to make search inclusive
date_to = date_to + datetime.timedelta(days=1)
queryset = queryset.filter(date_created__lt=date_to)
self.desc_ctx['date_filter'] \
= _(" created before %s") % format_datetime(date_to)
return queryset
def get_queryset(self):
queryset = self.model.objects.all()
queryset = sort_queryset(queryset, self.request,
['score', 'total_votes', 'date_created'])
self.desc_ctx = {
'main_filter': _('All reviews'),
'date_filter': '',
'status_filter': '',
'kw_filter': '',
'name_filter': '',
}
self.form = self.form_class(self.request.GET)
if not self.form.is_valid():
return queryset
data = self.form.cleaned_data
# checking for empty string rather then True is required
# as zero is a valid value for 'status' but would be
# evaluated to False
if data['status'] != '':
queryset = queryset.filter(status=data['status']).distinct()
display_status = self.form.get_friendly_status()
self.desc_ctx['status_filter'] \
= _(" with status matching '%s'") % display_status
if data['keyword']:
queryset = queryset.filter(
Q(title__icontains=data['keyword']) |
Q(body__icontains=data['keyword'])
).distinct()
self.desc_ctx['kw_filter'] \
= _(" with keyword matching '%s'") % data['keyword']
queryset = self.get_date_from_to_queryset(data['date_from'],
data['date_to'], queryset)
if data['name']:
# If the value is two words, then assume they are first name and
# last name
parts = data['name'].split()
if len(parts) >= 2:
queryset = queryset.filter(
user__first_name__istartswith=parts[0],
user__last_name__istartswith=parts[1]
).distinct()
else:
queryset = queryset.filter(
Q(user__first_name__istartswith=parts[0]) |
Q(user__last_name__istartswith=parts[-1])
).distinct()
self.desc_ctx['name_filter'] \
= _(" with customer name matching '%s'") % data['name']
return queryset
def get_context_data(self, **kwargs):
context = super(self.__class__, self).get_context_data(**kwargs)
context['review_form'] = self.review_form_class()
context['form'] = self.form
context['description'] = self.desc_template % self.desc_ctx
return context
def update_selected_review_status(self, request, reviews):
"""
Update the status of the selected *reviews* with the new
status in the *request* POST data. Redirects back to the
list view of reviews.
"""
new_status = int(request.POST.get('status'))
for review in reviews:
review.status = new_status
review.save()
return HttpResponseRedirect(reverse('dashboard:reviews-list'))
class ReviewUpdateView(generic.UpdateView):
model = ProductReview
template_name = 'dashboard/reviews/review_update.html'
form_class = forms.DashboardProductReviewForm
context_object_name = 'review'
def get_success_url(self):
return reverse('dashboard:reviews-list')
class ReviewDeleteView(generic.DeleteView):
model = ProductReview
template_name = 'dashboard/reviews/review_delete.html'
context_object_name = 'review'
def get_success_url(self):
return reverse('dashboard:reviews-list')
|
kawamon/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.11.29/tests/staticfiles_tests/test_management.py
|
5
|
from __future__ import unicode_literals
import codecs
import datetime
import os
import shutil
import tempfile
import unittest
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.contrib.staticfiles import storage
from django.contrib.staticfiles.management.commands import collectstatic
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command
from django.test import mock, override_settings
from django.test.utils import extend_sys_path
from django.utils import six, timezone
from django.utils._os import symlinks_supported
from django.utils.encoding import force_text
from django.utils.functional import empty
from .cases import CollectionTestCase, StaticFilesTestCase, TestDefaults
from .settings import TEST_ROOT, TEST_SETTINGS
from .storage import DummyStorage
class TestNoFilesCreated(object):
def test_no_files_created(self):
"""
Make sure no files were create in the destination directory.
"""
self.assertEqual(os.listdir(settings.STATIC_ROOT), [])
class TestFindStatic(TestDefaults, CollectionTestCase):
"""
Test ``findstatic`` management command.
"""
def _get_file(self, filepath):
path = call_command('findstatic', filepath, all=False, verbosity=0, stdout=six.StringIO())
with codecs.open(force_text(path), "r", "utf-8") as f:
return f.read()
def test_all_files(self):
"""
findstatic returns all candidate files if run without --first and -v1.
"""
result = call_command('findstatic', 'test/file.txt', verbosity=1, stdout=six.StringIO())
lines = [l.strip() for l in result.split('\n')]
self.assertEqual(len(lines), 3) # three because there is also the "Found <file> here" line
self.assertIn('project', force_text(lines[1]))
self.assertIn('apps', force_text(lines[2]))
def test_all_files_less_verbose(self):
"""
findstatic returns all candidate files if run without --first and -v0.
"""
result = call_command('findstatic', 'test/file.txt', verbosity=0, stdout=six.StringIO())
lines = [l.strip() for l in result.split('\n')]
self.assertEqual(len(lines), 2)
self.assertIn('project', force_text(lines[0]))
self.assertIn('apps', force_text(lines[1]))
def test_all_files_more_verbose(self):
"""
findstatic returns all candidate files if run without --first and -v2.
Also, test that findstatic returns the searched locations with -v2.
"""
result = call_command('findstatic', 'test/file.txt', verbosity=2, stdout=six.StringIO())
lines = [l.strip() for l in result.split('\n')]
self.assertIn('project', force_text(lines[1]))
self.assertIn('apps', force_text(lines[2]))
self.assertIn("Looking in the following locations:", force_text(lines[3]))
searched_locations = ', '.join(force_text(x) for x in lines[4:])
# AppDirectoriesFinder searched locations
self.assertIn(os.path.join('staticfiles_tests', 'apps', 'test', 'static'), searched_locations)
self.assertIn(os.path.join('staticfiles_tests', 'apps', 'no_label', 'static'), searched_locations)
# FileSystemFinder searched locations
self.assertIn(TEST_SETTINGS['STATICFILES_DIRS'][1][1], searched_locations)
self.assertIn(TEST_SETTINGS['STATICFILES_DIRS'][0], searched_locations)
# DefaultStorageFinder searched locations
self.assertIn(
os.path.join('staticfiles_tests', 'project', 'site_media', 'media'),
searched_locations
)
class TestConfiguration(StaticFilesTestCase):
def test_location_empty(self):
msg = 'without having set the STATIC_ROOT setting to a filesystem path'
err = six.StringIO()
for root in ['', None]:
with override_settings(STATIC_ROOT=root):
with self.assertRaisesMessage(ImproperlyConfigured, msg):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
def test_local_storage_detection_helper(self):
staticfiles_storage = storage.staticfiles_storage
try:
storage.staticfiles_storage._wrapped = empty
with self.settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.StaticFilesStorage'):
command = collectstatic.Command()
self.assertTrue(command.is_local_storage())
storage.staticfiles_storage._wrapped = empty
with self.settings(STATICFILES_STORAGE='staticfiles_tests.storage.DummyStorage'):
command = collectstatic.Command()
self.assertFalse(command.is_local_storage())
collectstatic.staticfiles_storage = storage.FileSystemStorage()
command = collectstatic.Command()
self.assertTrue(command.is_local_storage())
collectstatic.staticfiles_storage = DummyStorage()
command = collectstatic.Command()
self.assertFalse(command.is_local_storage())
finally:
staticfiles_storage._wrapped = empty
collectstatic.staticfiles_storage = staticfiles_storage
storage.staticfiles_storage = staticfiles_storage
class TestCollectionHelpSubcommand(AdminScriptTestCase):
@override_settings(STATIC_ROOT=None)
def test_missing_settings_dont_prevent_help(self):
"""
Even if the STATIC_ROOT setting is not set, one can still call the
`manage.py help collectstatic` command.
"""
self.write_settings('settings.py', apps=['django.contrib.staticfiles'])
out, err = self.run_manage(['help', 'collectstatic'])
self.assertNoOutput(err)
class TestCollection(TestDefaults, CollectionTestCase):
"""
Test ``collectstatic`` management command.
"""
def test_ignore(self):
"""
-i patterns are ignored.
"""
self.assertFileNotFound('test/test.ignoreme')
def test_common_ignore_patterns(self):
"""
Common ignore patterns (*~, .*, CVS) are ignored.
"""
self.assertFileNotFound('test/.hidden')
self.assertFileNotFound('test/backup~')
self.assertFileNotFound('test/CVS')
class TestCollectionClear(CollectionTestCase):
"""
Test the ``--clear`` option of the ``collectstatic`` management command.
"""
def run_collectstatic(self, **kwargs):
clear_filepath = os.path.join(settings.STATIC_ROOT, 'cleared.txt')
with open(clear_filepath, 'w') as f:
f.write('should be cleared')
super(TestCollectionClear, self).run_collectstatic(clear=True)
def test_cleared_not_found(self):
self.assertFileNotFound('cleared.txt')
def test_dir_not_exists(self, **kwargs):
shutil.rmtree(six.text_type(settings.STATIC_ROOT))
super(TestCollectionClear, self).run_collectstatic(clear=True)
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.PathNotImplementedStorage')
def test_handle_path_notimplemented(self):
self.run_collectstatic()
self.assertFileNotFound('cleared.txt')
class TestInteractiveMessages(CollectionTestCase):
overwrite_warning_msg = "This will overwrite existing files!"
delete_warning_msg = "This will DELETE ALL FILES in this location!"
files_copied_msg = "static files copied"
@staticmethod
def mock_input(stdout):
def _input(msg):
# Python 2 reads bytes from the console output, use bytes for the StringIO
stdout.write(msg.encode('utf-8') if six.PY2 else msg)
return 'yes'
return _input
def test_warning_when_clearing_staticdir(self):
stdout = six.StringIO()
self.run_collectstatic()
with mock.patch('django.contrib.staticfiles.management.commands.collectstatic.input',
side_effect=self.mock_input(stdout)):
call_command('collectstatic', interactive=True, clear=True, stdout=stdout)
output = force_text(stdout.getvalue())
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertIn(self.delete_warning_msg, output)
def test_warning_when_overwriting_files_in_staticdir(self):
stdout = six.StringIO()
self.run_collectstatic()
with mock.patch('django.contrib.staticfiles.management.commands.collectstatic.input',
side_effect=self.mock_input(stdout)):
call_command('collectstatic', interactive=True, stdout=stdout)
output = force_text(stdout.getvalue())
self.assertIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
def test_no_warning_when_staticdir_does_not_exist(self):
stdout = six.StringIO()
shutil.rmtree(six.text_type(settings.STATIC_ROOT))
call_command('collectstatic', interactive=True, stdout=stdout)
output = force_text(stdout.getvalue())
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
self.assertIn(self.files_copied_msg, output)
def test_no_warning_for_empty_staticdir(self):
stdout = six.StringIO()
static_dir = tempfile.mkdtemp(prefix='collectstatic_empty_staticdir_test')
with override_settings(STATIC_ROOT=static_dir):
call_command('collectstatic', interactive=True, stdout=stdout)
shutil.rmtree(six.text_type(static_dir))
output = force_text(stdout.getvalue())
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
self.assertIn(self.files_copied_msg, output)
class TestCollectionExcludeNoDefaultIgnore(TestDefaults, CollectionTestCase):
"""
Test ``--exclude-dirs`` and ``--no-default-ignore`` options of the
``collectstatic`` management command.
"""
def run_collectstatic(self):
super(TestCollectionExcludeNoDefaultIgnore, self).run_collectstatic(
use_default_ignore_patterns=False)
def test_no_common_ignore_patterns(self):
"""
With --no-default-ignore, common ignore patterns (*~, .*, CVS)
are not ignored.
"""
self.assertFileContains('test/.hidden', 'should be ignored')
self.assertFileContains('test/backup~', 'should be ignored')
self.assertFileContains('test/CVS', 'should be ignored')
@override_settings(INSTALLED_APPS=[
'staticfiles_tests.apps.staticfiles_config.IgnorePatternsAppConfig',
'staticfiles_tests.apps.test',
])
class TestCollectionCustomIgnorePatterns(CollectionTestCase):
def test_custom_ignore_patterns(self):
"""
A custom ignore_patterns list, ['*.css'] in this case, can be specified
in an AppConfig definition.
"""
self.assertFileNotFound('test/nonascii.css')
self.assertFileContains('test/.hidden', 'should be ignored')
class TestCollectionDryRun(TestNoFilesCreated, CollectionTestCase):
"""
Test ``--dry-run`` option for ``collectstatic`` management command.
"""
def run_collectstatic(self):
super(TestCollectionDryRun, self).run_collectstatic(dry_run=True)
class TestCollectionFilesOverride(CollectionTestCase):
"""
Test overriding duplicated files by ``collectstatic`` management command.
Check for proper handling of apps order in installed apps even if file modification
dates are in different order:
'staticfiles_test_app',
'staticfiles_tests.apps.no_label',
"""
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.temp_dir)
# get modification and access times for no_label/static/file2.txt
self.orig_path = os.path.join(TEST_ROOT, 'apps', 'no_label', 'static', 'file2.txt')
self.orig_mtime = os.path.getmtime(self.orig_path)
self.orig_atime = os.path.getatime(self.orig_path)
# prepare duplicate of file2.txt from a temporary app
# this file will have modification time older than no_label/static/file2.txt
# anyway it should be taken to STATIC_ROOT because the temporary app is before
# 'no_label' app in installed apps
self.temp_app_path = os.path.join(self.temp_dir, 'staticfiles_test_app')
self.testfile_path = os.path.join(self.temp_app_path, 'static', 'file2.txt')
os.makedirs(self.temp_app_path)
with open(os.path.join(self.temp_app_path, '__init__.py'), 'w+'):
pass
os.makedirs(os.path.dirname(self.testfile_path))
with open(self.testfile_path, 'w+') as f:
f.write('duplicate of file2.txt')
os.utime(self.testfile_path, (self.orig_atime - 1, self.orig_mtime - 1))
self.settings_with_test_app = self.modify_settings(
INSTALLED_APPS={'prepend': 'staticfiles_test_app'})
with extend_sys_path(self.temp_dir):
self.settings_with_test_app.enable()
super(TestCollectionFilesOverride, self).setUp()
def tearDown(self):
super(TestCollectionFilesOverride, self).tearDown()
self.settings_with_test_app.disable()
def test_ordering_override(self):
"""
Test if collectstatic takes files in proper order
"""
self.assertFileContains('file2.txt', 'duplicate of file2.txt')
# run collectstatic again
self.run_collectstatic()
self.assertFileContains('file2.txt', 'duplicate of file2.txt')
# The collectstatic test suite already has conflicting files since both
# project/test/file.txt and apps/test/static/test/file.txt are collected. To
# properly test for the warning not happening unless we tell it to explicitly,
# we remove the project directory and will add back a conflicting file later.
@override_settings(STATICFILES_DIRS=[])
class TestCollectionOverwriteWarning(CollectionTestCase):
"""
Test warning in ``collectstatic`` output when a file is skipped because a
previous file was already written to the same path.
"""
# If this string is in the collectstatic output, it means the warning we're
# looking for was emitted.
warning_string = 'Found another file'
def _collectstatic_output(self, **kwargs):
"""
Run collectstatic, and capture and return the output. We want to run
the command at highest verbosity, which is why we can't
just call e.g. BaseCollectionTestCase.run_collectstatic()
"""
out = six.StringIO()
call_command('collectstatic', interactive=False, verbosity=3, stdout=out, **kwargs)
return force_text(out.getvalue())
def test_no_warning(self):
"""
There isn't a warning if there isn't a duplicate destination.
"""
output = self._collectstatic_output(clear=True)
self.assertNotIn(self.warning_string, output)
def test_warning(self):
"""
There is a warning when there are duplicate destinations.
"""
static_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, static_dir)
duplicate = os.path.join(static_dir, 'test', 'file.txt')
os.mkdir(os.path.dirname(duplicate))
with open(duplicate, 'w+') as f:
f.write('duplicate of file.txt')
with self.settings(STATICFILES_DIRS=[static_dir]):
output = self._collectstatic_output(clear=True)
self.assertIn(self.warning_string, output)
os.remove(duplicate)
# Make sure the warning went away again.
with self.settings(STATICFILES_DIRS=[static_dir]):
output = self._collectstatic_output(clear=True)
self.assertNotIn(self.warning_string, output)
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.DummyStorage')
class TestCollectionNonLocalStorage(TestNoFilesCreated, CollectionTestCase):
"""
Tests for a Storage that implements get_modified_time() but not path()
(#15035).
"""
def test_storage_properties(self):
# Properties of the Storage as described in the ticket.
storage = DummyStorage()
self.assertEqual(storage.get_modified_time('name'), datetime.datetime(1970, 1, 1, tzinfo=timezone.utc))
with self.assertRaisesMessage(NotImplementedError, "This backend doesn't support absolute paths."):
storage.path('name')
class TestCollectionNeverCopyStorage(CollectionTestCase):
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.NeverCopyRemoteStorage')
def test_skips_newer_files_in_remote_storage(self):
"""
collectstatic skips newer files in a remote storage.
run_collectstatic() in setUp() copies the static files, then files are
always skipped after NeverCopyRemoteStorage is activated since
NeverCopyRemoteStorage.get_modified_time() returns a datetime in the
future to simulate an unmodified file.
"""
stdout = six.StringIO()
self.run_collectstatic(stdout=stdout, verbosity=2)
output = force_text(stdout.getvalue())
self.assertIn("Skipping 'test.txt' (not modified)", output)
@unittest.skipUnless(symlinks_supported(), "Must be able to symlink to run this test.")
class TestCollectionLinks(TestDefaults, CollectionTestCase):
"""
Test ``--link`` option for ``collectstatic`` management command.
Note that by inheriting ``TestDefaults`` we repeat all
the standard file resolving tests here, to make sure using
``--link`` does not change the file-selection semantics.
"""
def run_collectstatic(self, clear=False, link=True, **kwargs):
super(TestCollectionLinks, self).run_collectstatic(link=link, clear=clear, **kwargs)
def test_links_created(self):
"""
With ``--link``, symbolic links are created.
"""
self.assertTrue(os.path.islink(os.path.join(settings.STATIC_ROOT, 'test.txt')))
def test_broken_symlink(self):
"""
Test broken symlink gets deleted.
"""
path = os.path.join(settings.STATIC_ROOT, 'test.txt')
os.unlink(path)
self.run_collectstatic()
self.assertTrue(os.path.islink(path))
def test_symlinks_and_files_replaced(self):
"""
Running collectstatic in non-symlink mode replaces symlinks with files,
while symlink mode replaces files with symlinks.
"""
path = os.path.join(settings.STATIC_ROOT, 'test.txt')
self.assertTrue(os.path.islink(path))
self.run_collectstatic(link=False)
self.assertFalse(os.path.islink(path))
self.run_collectstatic(link=True)
self.assertTrue(os.path.islink(path))
def test_clear_broken_symlink(self):
"""
With ``--clear``, broken symbolic links are deleted.
"""
nonexistent_file_path = os.path.join(settings.STATIC_ROOT, 'nonexistent.txt')
broken_symlink_path = os.path.join(settings.STATIC_ROOT, 'symlink.txt')
os.symlink(nonexistent_file_path, broken_symlink_path)
self.run_collectstatic(clear=True)
self.assertFalse(os.path.lexists(broken_symlink_path))
|
sznekol/django-cms
|
refs/heads/develop
|
cms/test_utils/project/placeholderapp/migrations/0004_auto_20150503_1749.py
|
50
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('placeholderapp', '0003_example1_publish'),
]
operations = [
migrations.AlterModelOptions(
name='multilingualexample1translation',
options={'managed': True},
),
]
|
ErcOne/kernel-3-4-projek-n7000
|
refs/heads/android-exynos-3.4
|
Documentation/target/tcm_mod_builder.py
|
4981
|
#!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
|
midnightradio/gensim
|
refs/heads/develop
|
gensim/test/test_nmf.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2018 Timofey Yefimov <anotherbugmaster@gmail.com>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking transformation algorithms (the models package).
"""
import unittest
import copy
import logging
import numbers
import numpy as np
from gensim import matutils
from gensim.models import nmf
from gensim.test import basetmtests
from gensim.test.utils import datapath, get_tmpfile, common_corpus, common_dictionary
class TestNmf(unittest.TestCase, basetmtests.TestBaseTopicModel):
def setUp(self):
self.model = nmf.Nmf(
common_corpus,
id2word=common_dictionary,
chunksize=1,
num_topics=2,
passes=100,
random_state=42,
)
def testGenerator(self):
model_1 = nmf.Nmf(
iter(common_corpus * 100),
id2word=common_dictionary,
chunksize=1,
num_topics=2,
passes=1,
random_state=42,
)
model_2 = nmf.Nmf(
common_corpus * 100,
id2word=common_dictionary,
chunksize=1,
num_topics=2,
passes=1,
random_state=42,
)
self.assertTrue(np.allclose(model_1.get_topics(), model_2.get_topics()))
def testUpdate(self):
model = copy.deepcopy(self.model)
model.update(common_corpus)
self.assertFalse(np.allclose(self.model.get_topics(), model.get_topics()))
def testRandomState(self):
model_1 = nmf.Nmf(
common_corpus,
id2word=common_dictionary,
chunksize=1,
num_topics=2,
passes=100,
random_state=42,
)
model_2 = nmf.Nmf(
common_corpus,
id2word=common_dictionary,
chunksize=1,
num_topics=2,
passes=100,
random_state=0,
)
self.assertTrue(np.allclose(self.model.get_topics(), model_1.get_topics()))
self.assertFalse(np.allclose(self.model.get_topics(), model_2.get_topics()))
def testTransform(self):
# transform one document
doc = list(common_corpus)[0]
transformed = self.model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests
expected = [0., 1.]
# must contain the same values, up to re-ordering
self.assertTrue(np.allclose(sorted(vec), sorted(expected)))
# transform one word
word = 5
transformed = self.model.get_term_topics(word)
vec = matutils.sparse2full(transformed, 2)
expected = [0.35023746, 0.64976251]
# must contain the same values, up to re-ordering
self.assertTrue(np.allclose(sorted(vec), sorted(expected), rtol=1e-3))
def testTopTopics(self):
top_topics = self.model.top_topics(common_corpus)
for topic, score in top_topics:
self.assertTrue(isinstance(topic, list))
self.assertTrue(isinstance(score, float))
for v, k in topic:
self.assertTrue(isinstance(k, str))
self.assertTrue(np.issubdtype(v, float))
def testGetTopicTerms(self):
topic_terms = self.model.get_topic_terms(1)
for k, v in topic_terms:
self.assertTrue(isinstance(k, numbers.Integral))
self.assertTrue(np.issubdtype(v, float))
def testGetDocumentTopics(self):
doc_topics = self.model.get_document_topics(common_corpus)
for topic in doc_topics:
self.assertTrue(isinstance(topic, list))
for k, v in topic:
self.assertTrue(isinstance(k, numbers.Integral))
self.assertTrue(np.issubdtype(v, float))
# Test case to use the get_document_topic function for the corpus
all_topics = self.model.get_document_topics(common_corpus)
print(list(all_topics))
for topic in all_topics:
self.assertTrue(isinstance(topic, list))
for k, v in topic: # list of doc_topics
self.assertTrue(isinstance(k, numbers.Integral))
self.assertTrue(np.issubdtype(v, float))
def testTermTopics(self):
# check with word_type
result = self.model.get_term_topics(2)
for topic_no, probability in result:
self.assertTrue(isinstance(topic_no, int))
self.assertTrue(np.issubdtype(probability, float))
# if user has entered word instead, check with word
result = self.model.get_term_topics(str(self.model.id2word[2]))
for topic_no, probability in result:
self.assertTrue(isinstance(topic_no, int))
self.assertTrue(np.issubdtype(probability, float))
def testPersistence(self):
fname = get_tmpfile('gensim_models_nmf.tst')
self.model.save(fname)
model2 = nmf.Nmf.load(fname)
tstvec = []
self.assertTrue(np.allclose(self.model[tstvec], model2[tstvec])) # try projecting an empty vector
def testLargeMmap(self):
fname = get_tmpfile('gensim_models_nmf.tst')
# simulate storing large arrays separately
self.model.save(fname, sep_limit=0)
# test loading the large model arrays with mmap
model2 = nmf.Nmf.load(fname, mmap='r')
self.assertEqual(self.model.num_topics, model2.num_topics)
tstvec = []
self.assertTrue(np.allclose(self.model[tstvec], model2[tstvec])) # try projecting an empty vector
def testLargeMmapCompressed(self):
fname = get_tmpfile('gensim_models_nmf.tst.gz')
# simulate storing large arrays separately
self.model.save(fname, sep_limit=0)
# test loading the large model arrays with mmap
self.assertRaises(IOError, nmf.Nmf.load, fname, mmap='r')
def testDtypeBackwardCompatibility(self):
nmf_fname = datapath('nmf_model')
test_doc = [(0, 1), (1, 1), (2, 1)]
expected_topics = [(1, 1.0)]
# save model to use in test
# self.model.save(nmf_fname)
# load a model saved using the latest version of Gensim
model = nmf.Nmf.load(nmf_fname)
# and test it on a predefined document
topics = model[test_doc]
self.assertTrue(np.allclose(expected_topics, topics))
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
unittest.main()
|
widowild/messcripts
|
refs/heads/master
|
exercice/python2/solutions/exercice_7_15.py
|
1
|
#! /usr/bin/env python
# -*- coding: Latin-1 -*-
def volBoite(x1 =-1, x2 =-1, x3 =-1):
"Volume d'une boîte parallélipipédique"
if x1 == -1 :
return x1 # aucun argument n'a été fourni
elif x2 == -1 :
return x1**3 # un seul argument -> boîte cubique
elif x3 == -1 :
return x1*x1*x2 # deux arguments -> boîte prismatique
else :
return x1*x2*x3
# test :
print volBoite()
print volBoite(5.2)
print volBoite(5.2, 3)
print volBoite(5.2, 3, 7.4)
|
mabelcalim/tide-app
|
refs/heads/master
|
kivy/test8/.buildozer/venv/lib/python2.7/site-packages/setuptools/tests/test_packageindex.py
|
377
|
"""Package Index Tests
"""
import sys
import os
import unittest
import pkg_resources
from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url
import distutils.errors
import setuptools.package_index
from setuptools.tests.server import IndexServer
class TestPackageIndex(unittest.TestCase):
def test_bad_url_bad_port(self):
index = setuptools.package_index.PackageIndex()
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
else:
self.assertTrue(isinstance(v, HTTPError))
def test_bad_url_typo(self):
# issue 16
# easy_install inquant.contentmirror.plone breaks because of a typo
# in its home URL
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
else:
self.assertTrue(isinstance(v, HTTPError))
def test_bad_url_bad_status_line(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
def _urlopen(*args):
raise httplib.BadStatusLine('line')
index.opener = _urlopen
url = 'http://example.com'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue('line' in str(v))
else:
raise AssertionError('Should have raise here!')
def test_bad_url_double_scheme(self):
"""
A bad URL with a double scheme should raise a DistutilsError.
"""
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
# issue 20
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
try:
index.open_url(url)
except distutils.errors.DistutilsError:
error = sys.exc_info()[1]
msg = unicode(error)
assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
return
raise RuntimeError("Did not raise")
def test_bad_url_screwy_href(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
# issue #160
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
# this should not fail
url = 'http://example.com'
page = ('<a href="http://www.famfamfam.com]('
'http://www.famfamfam.com/">')
index.process_index(url, page)
def test_url_ok(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'file:///tmp/test_package_index'
self.assertTrue(index.url_ok(url, True))
def test_links_priority(self):
"""
Download links from the pypi simple index should be used before
external download links.
https://bitbucket.org/tarek/distribute/issue/163
Usecase :
- someone uploads a package on pypi, a md5 is generated
- someone manually copies this link (with the md5 in the url) onto an
external page accessible from the package page.
- someone reuploads the package (with a different md5)
- while easy_installing, an MD5 error occurs because the external link
is used
-> Setuptools should use the link from pypi, not the external one.
"""
if sys.platform.startswith('java'):
# Skip this test on jython because binding to :0 fails
return
# start an index server
server = IndexServer()
server.start()
index_url = server.base_url() + 'test_links_priority/simple/'
# scan a test index
pi = setuptools.package_index.PackageIndex(index_url)
requirement = pkg_resources.Requirement.parse('foobar')
pi.find_packages(requirement)
server.stop()
# the distribution has been found
self.assertTrue('foobar' in pi)
# we have only one link, because links are compared without md5
self.assertTrue(len(pi['foobar'])==1)
# the link should be from the index
self.assertTrue('correct_md5' in pi['foobar'][0].location)
def test_parse_bdist_wininst(self):
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
def test__vcs_split_rev_from_url(self):
"""
Test the basic usage of _vcs_split_rev_from_url
"""
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
url, rev = vsrfu('https://example.com/bar@2995')
self.assertEqual(url, 'https://example.com/bar')
self.assertEqual(rev, '2995')
def test_local_index(self):
"""
local_open should be able to read an index from the file system.
"""
f = open('index.html', 'w')
f.write('<div>content</div>')
f.close()
try:
url = 'file:' + pathname2url(os.getcwd()) + '/'
res = setuptools.package_index.local_open(url)
finally:
os.remove('index.html')
assert 'content' in res.read()
class TestContentCheckers(unittest.TestCase):
def test_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
checker.feed('You should probably not be using MD5'.encode('ascii'))
self.assertEqual(checker.hash.hexdigest(),
'f12895fdffbd45007040d2e44df98478')
self.assertTrue(checker.is_valid())
def test_other_fragment(self):
"Content checks should succeed silently if no hash is present"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#something%20completely%20different')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
def test_blank_md5(self):
"Content checks should succeed if a hash is empty"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
def test_get_hash_name_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
self.assertEqual(checker.hash_name, 'md5')
def test_report(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
rep = checker.report(lambda x: x, 'My message about %s')
self.assertEqual(rep, 'My message about md5')
|
renegelinas/mi-instrument
|
refs/heads/master
|
mi/instrument/wetlabs/fluorometer/flort_d/test/sample_data.py
|
11
|
from mi.instrument.wetlabs.fluorometer.flort_d.driver import NEWLINE
SAMPLE_MNU_RESPONSE = "Ser BBFL2W-993" + NEWLINE + \
"Ver Triplet5.20" + NEWLINE + \
"Ave 1" + NEWLINE + \
"Pkt 0" + NEWLINE + \
"M1d 0" + NEWLINE + \
"M2d 0" + NEWLINE + \
"M3d 0" + NEWLINE + \
"M1s 1.000E+00" + NEWLINE + \
"M2s 1.000E+00" + NEWLINE + \
"M3s 1.000E+00" + NEWLINE + \
"Seq 0" + NEWLINE + \
"Rat 19200" + NEWLINE + \
"Set 0" + NEWLINE + \
"Rec 1" + NEWLINE + \
"Man 0" + NEWLINE + \
"Int 00:00:10" + NEWLINE + \
"Dat 07/11/13" + NEWLINE + \
"Clk 12:48:34" + NEWLINE + \
"Mst 12:48:31" + NEWLINE + \
"Mem 4095"
SAMPLE_SAMPLE_RESPONSE = "07/16/13\t09:33:06\t700\t4130\t695\t1018\t460\t4130\t525" + NEWLINE
SAMPLE_MET_RESPONSE = "Sig_1,counts,,SO,3.000E+00,3" + NEWLINE + \
"5,Ref_2,Emission_WL," + NEWLINE + \
"6,Sig_2,counts,,SO,2.000E+00,2" + NEWLINE + \
"7,Ref_3,Emission_WL," + NEWLINE + \
"8,Sig_3,counts,,SO,1.000E+00,1"
|
stack-of-tasks/rbdlpy
|
refs/heads/master
|
tutorial/lib/python2.7/site-packages/OpenGLContext/passes/_flat.py
|
1
|
"""Flat rendering passes (base implementation)
"""
from OpenGLContext.scenegraph import nodepath,switch,boundingvolume
from OpenGL.GL import *
from OpenGLContext.arrays import array, dot, allclose
from OpenGLContext import frustum
from OpenGLContext.debug.logs import getTraceback
from vrml.vrml97 import nodetypes
from vrml import olist
from OpenGLContext.scenegraph import shaders
import sys
from pydispatch.dispatcher import connect
import logging
log = logging.getLogger( __name__ )
__all__ = (
'SGObserver',
'FlatPass',
'get_inv_modelproj',
'get_inv_modelview',
'get_modelproj',
'get_modelview',
'get_projection',
)
class SGObserver( object ):
"""Observer of a scenegraph that creates a flat set of paths
Uses dispatcher watches to observe any changes to the (rendering)
structure of a scenegraph and uses it to update an internal set
of paths for all renderable objects in the scenegraph.
"""
INTERESTING_TYPES = []
def __init__( self, scene, contexts ):
"""Initialize the FlatPass for this scene and set of contexts
scene -- the scenegraph to manage as a flattened hierarchy
contexts -- set of (weakrefs to) contexts to be serviced,
normally is a reference to Context.allContexts
"""
self.scene = scene
self.contexts = contexts
self.paths = {
}
self.nodePaths = {}
if scene:
self.integrate( scene )
connect(
self.onChildAdd,
signal = olist.OList.NEW_CHILD_EVT,
)
connect(
self.onChildRemove,
signal = olist.OList.DEL_CHILD_EVT,
)
connect(
self.onSwitchChange,
signal = switch.SWITCH_CHANGE_SIGNAL,
)
def integrate( self, node, parentPath=None ):
"""Integrate any children of node which are of interest"""
if parentPath is None:
parentPath = nodepath.NodePath( [] )
todo = [ (node,parentPath) ]
while todo:
next,parents = todo.pop(0)
path = parents + next
np = self.npFor( next )
np.append( path )
if hasattr( next, 'bind' ):
for context in self.contexts:
context = context()
if context is not None:
next.bind( context )
_ = self.npFor(next)
for typ in self.INTERESTING_TYPES:
if isinstance( next, typ ):
self.paths.setdefault( typ, []).append( path )
if hasattr(next, 'renderedChildren'):
# watch for next's changes...
for child in next.renderedChildren( ):
todo.append( (child,path) )
def npFor( self, node ):
"""For some reason setdefault isn't working for the weakkeydict"""
current = self.nodePaths.get( id(node) )
if current is None:
self.nodePaths[id(node)] = current = []
return current
def onSwitchChange( self, sender, value ):
for path in self.npFor( sender ):
for childPath in path.iterchildren():
if childPath[-1] is not value:
childPath.invalidate()
self.integrate( value, path )
self.purge()
def onChildAdd( self, sender, value ):
"""Sender has a new child named value"""
if hasattr( sender, 'renderedChildren' ):
children = sender.renderedChildren()
if value in children:
for path in self.npFor( sender ):
self.integrate( value, path )
def onChildRemove( self, sender, value ):
"""Invalidate all paths where sender has value as its child IFF child no longer in renderedChildren"""
if hasattr( sender, 'renderedChildren' ):
children = sender.renderedChildren()
if value not in children:
for path in self.npFor( sender ):
for childPath in path.iterchildren():
if childPath[-1] is value:
childPath.invalidate()
self.purge()
def purge( self ):
"""Purge all references to path"""
for key,values in self.paths.items():
filtered = []
for v in values:
if not v.broken:
filtered.append( v )
else:
np = self.npFor( v )
while v in np:
np.remove( v )
if not np:
try:
del self.nodePaths[id(v)]
except KeyError, err:
pass
self.paths[key][:] = filtered
def get_modelview( shader, mode ):
return mode.matrix
def get_projection( shader, mode ):
return mode.projection
def get_modelproj( shader, mode ):
return dot( mode.matrix, mode.projection )
def get_inv_modelview( shader, mode ):
return dot( mode.viewPlatform.modelMatrix(inverse=True), mode.renderPath.transformMatrix( inverse=True ) )
def get_inv_projection( shader, mode ):
return mode.viewPlatform.viewMatrix( mode.maxDepth, inverse=True )
def get_inv_modelproj( shader, mode ):
mv = get_inv_modelview( shader, mode )
proj = get_inv_projection( shader, mode )
return dot( proj, mv )
class FlatPass( SGObserver ):
"""Flat rendering pass with a single function to render scenegraph
Uses structural scenegraph observations to allow the actual
rendering pass be a simple iteration over the paths known
to be active in the scenegraph.
Rendering Attributes:
visible -- whether we are currently rendering a visible pass
transparent -- whether we are currently doing a transparent pass
lighting -- whether we currently are rendering a lit pass
context -- context for which we are rendering
cache -- cache of the context for which we are rendering
projection -- projection matrix of current view platform
modelView -- model-view matrix of current view platform
viewport -- 4-component viewport definition for current context
frustum -- viewing-frustum definition for current view platform
MAX_LIGHTS -- queried maximum number of lights
passCount -- not used, always set to 0 for code that expects
a passCount to be available.
transform -- ignored, legacy code only
"""
passCount = 0
visible = True
transparent = False
transform = True
lighting = True
lightingAmbient = True
lightingDiffuse = True
# this are now obsolete...
selectNames = False
selectForced = False
cache = None
_UNIFORM_NAMES = '''mat_modelview inv_modelview tps_modelview itp_modelview
mat_projection inv_projection tps_projection itp_projection
mat_modelproj inv_modelproj tps_modelproj itp_modelproj'''.split()
_uniforms = None
@property
def uniforms( self ):
if self._uniforms is None:
self._uniforms = []
for name in self._UNIFORM_NAMES:
attr = 'uniform_%s'%(name,)
uniform = shaders.FloatUniformm4( name=name )
self._uniforms.append( uniform )
setattr( self, attr, uniform )
if name.startswith( 'tps_' ) or name.startswith( 'itp_' ):
uniform.NEED_TRANSPOSE = True
else:
uniform.NEED_TRANSPOSE = False
if name.startswith( 'inv_' ) or name.startswith( 'itp_' ):
uniform.NEED_INVERSE = True
else:
uniform.NEED_INVERSE = False
# now set up the lazy calculation operations
self.uniform_mat_modelview.currentValue = get_modelview
self.uniform_mat_projection.currentValue = get_projection
self.uniform_mat_modelproj.currentValue = get_modelproj
self.uniform_tps_modelview.currentValue = get_modelview
self.uniform_tps_projection.currentValue = get_projection
self.uniform_tps_modelproj.currentValue = get_modelproj
self.uniform_inv_modelview.currentValue = get_inv_modelview
self.uniform_inv_projection.currentValue = get_inv_projection
self.uniform_inv_modelproj.currentValue = get_inv_modelproj
self.uniform_itp_modelview.currentValue = get_inv_modelview
self.uniform_itp_projection.currentValue = get_inv_projection
self.uniform_itp_modelproj.currentValue = get_inv_modelproj
return self._uniforms
def applyUniforms( self, shader ):
"""Apply our uniforms to the shader as appropriate"""
for uniform in self.uniforms:
uniform.render( shader, self )
INTERESTING_TYPES = [
nodetypes.Rendering,
nodetypes.Bindable,
nodetypes.Light,
nodetypes.Traversable,
nodetypes.Background,
nodetypes.TimeDependent,
nodetypes.Fog,
nodetypes.Viewpoint,
nodetypes.NavigationInfo,
]
def currentBackground( self ):
"""Find our current background node"""
paths = self.paths.get( nodetypes.Background, () )
for background in paths:
if background[-1].bound:
return background
if paths:
current = paths[0]
current[-1].bound = 1
return current
return None
def renderSet( self, matrix ):
"""Calculate ordered rendering set to display"""
# ordered set of things to work with...
toRender = []
for path in self.paths.get( nodetypes.Rendering, ()):
tmatrix = path.transformMatrix()
mvmatrix = dot(tmatrix,matrix)
sortKey = path[-1].sortKey( self, tmatrix )
if hasattr( path[-1], 'boundingVolume' ):
bvolume = path[-1].boundingVolume( self )
else:
bvolume = None
toRender.append( (sortKey, mvmatrix,tmatrix,bvolume, path ) )
toRender = self.frustumVisibilityFilter( toRender )
toRender.sort( key = lambda x: x[0])
return toRender
def greatestDepth( self, toRender ):
# experimental: adjust our frustum to smaller depth based on
# the projected z-depth of bbox points...
maxDepth = 0
for (key,mv,tm,bv,path) in toRender:
try:
points = bv.getPoints()
except (AttributeError,boundingvolume.UnboundedObject), err:
return 0
else:
translated = dot( points, mv )
maxDepth = min((maxDepth, min( translated[:,2] )))
# 101 is to allow the 100 unit background to show... sigh
maxDepth = max((maxDepth,101))
return -(maxDepth*1.01)
def frustumVisibilityFilter( self, records ):
"""Filter records for visibility using frustum planes
This does per-object culling based on frustum lookups
rather than object query values. It should be fast
*if* the frustcullaccel module is available, if not
it will be dog-slow.
"""
result = []
for record in records:
(key,mv,tm,bv,path) = record
if bv is not None:
visible = bv.visible(
self.frustum, tm.astype('f'),
occlusion=False,
mode=self
)
if visible:
result.append( record )
else:
result.append( record )
return result
def Render( self, context, mode ):
"""Render the geometry attached to this flat-renderer's scenegraph"""
# clear the projection matrix set up by legacy sg
matrix = self.getModelView()
self.matrix = matrix
toRender = self.renderSet( matrix )
maxDepth = self.maxDepth = self.greatestDepth( toRender )
vp = context.getViewPlatform()
if maxDepth:
self.projection = vp.viewMatrix(maxDepth)
# Load our projection matrix for all legacy rendering operations...
# do we need to do a selection-render pass?
events = context.getPickEvents()
debugSelection = mode.context.contextDefinition.debugSelection
if events or debugSelection:
self.selectRender( mode, toRender, events )
events.clear()
# Load the root
if not debugSelection:
self.matrix = matrix
self.visible = True
self.transparent = False
self.lighting = True
self.textured = True
self.legacyBackgroundRender( vp,matrix )
# Set up generic "geometric" rendering parameters
glFrontFace( GL_CCW )
glEnable(GL_DEPTH_TEST)
glDepthFunc( GL_LESS )
glDepthFunc(GL_LESS)
glEnable(GL_CULL_FACE)
glCullFace(GL_BACK)
self.legacyLightRender( matrix )
self.renderOpaque( toRender )
self.renderTransparent( toRender )
if context.frameCounter and context.frameCounter.display:
context.frameCounter.Render( context )
context.SwapBuffers()
self.matrix = matrix
def legacyBackgroundRender( self, vp, matrix ):
"""Do legacy background rendering"""
bPath = self.currentBackground( )
if bPath is not None:
# legacy...
self.matrix = dot(
vp.quaternion.matrix( dtype='f'),
bPath.transformMatrix(translate=0,scale=0, rotate=1 )
)
bPath[-1].Render( mode=self, clear=True )
else:
### default VRML background is black
glClearColor(0.0,0.0,0.0,1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT )
def legacyLightRender( self, matrix ):
"""Do legacy light-rendering operation"""
id = 0
for path in self.paths.get( nodetypes.Light, ()):
tmatrix = path.transformMatrix()
localMatrix = dot(tmatrix,matrix)
self.matrix = localMatrix
self.renderPath = path
#glLoadMatrixf( localMatrix )
#path[-1].Light( GL_LIGHT0+id, mode=self )
id += 1
if id >= (self.MAX_LIGHTS-1):
break
if not id:
# default VRML lighting...
from OpenGLContext.scenegraph import light
l = light.DirectionalLight( direction = (0,0,-1.0))
# glLoadMatrixf( matrix )
# l.Light( GL_LIGHT0, mode = self )
self.matrix = matrix
def renderGeometry( self, mvmatrix ):
"""Render geometry present in the given mvmatrix
Intended for use in e.g. setting up a light texture, this
operation *just* does a query to find the visible objects
and then passes them (all) to renderOpaque
"""
toRender = self.renderSet( mvmatrix )
self.renderOpaque( toRender )
self.renderTransparent( toRender )
def renderOpaque( self, toRender ):
"""Render the opaque geometry from toRender (in reverse order)"""
self.transparent = False
debugFrustum = self.context.contextDefinition.debugBBox
for key,mvmatrix,tmatrix,bvolume,path in toRender:
if not key[0]:
self.matrix = mvmatrix
self.renderPath = path
# glMatrixMode(GL_MODELVIEW)
# glLoadMatrixf( mvmatrix )
try:
path[-1].Render( mode = self )
if debugFrustum:
bvolume.debugRender( )
except Exception, err:
log.error(
"""Failure in opaque render: %s""",
getTraceback( err ),
)
import os
os._exit(1)
def renderTransparent( self, toRender ):
"""Render the transparent geometry from toRender (in forward order)"""
self.transparent = True
setup = False
debugFrustum = self.context.contextDefinition.debugBBox
try:
for key,mvmatrix,tmatrix,bvolume,path in toRender:
if key[0]:
if not setup:
setup = True
glEnable(GL_BLEND)
glBlendFunc(GL_ONE_MINUS_SRC_ALPHA,GL_SRC_ALPHA, )
glDepthMask( 0 )
glDepthFunc( GL_LEQUAL )
self.matrix = mvmatrix
self.renderPath = path
glLoadMatrixf( mvmatrix )
try:
path[-1].RenderTransparent( mode = self )
if debugFrustum:
bvolume.debugRender( )
except Exception, err:
log.error(
"""Failure in %s: %s""",
path[-1].Render,
getTraceback( err ),
)
finally:
self.transparent = False
if setup:
glDisable( GL_BLEND )
glDepthMask( 1 )
glDepthFunc( GL_LEQUAL )
glEnable( GL_DEPTH_TEST )
def addTransparent( self, other ):
pass
def selectRender( self, mode, toRender, events ):
"""Render each path to color buffer
We render all geometry as non-transparent geometry with
unique colour values for each object. We should be able
to handle up to 2**24 objects before that starts failing.
"""
# TODO: allow context to signal that it is "captured" by a
# movement manager that doesn't need select rendering...
# e.g. for an examine manager there's no reason to do select
# render passes...
# TODO: do line-box intersection tests for bounding boxes to
# only render the geometry which is under the cursor
# TODO: render to an FBO instead of the back buffer
# (when available)
# TODO: render at 1/2 size compared to context to create a
# 2x2 selection square and reduce overhead.
glClearColor( 0,0,0, 0 )
glClear( GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT )
self.visible = False
self.transparent = False
self.lighting = False
self.textured = False
matrix = self.matrix
map = {}
pickPoints = {}
# TODO: this could be faster, and we could do further filtering
# using a frustum a-la select render mode approach...
min_x,min_y = self.getViewport()[2:]
max_x,max_y = 0,0
pickSize = 2
offset = pickSize//2
for event in events.values():
x,y = key = tuple(event.getPickPoint())
pickPoints.setdefault( key, []).append( event )
min_x = min((x-offset,min_x))
max_x = max((x+offset,max_x))
min_y = min((y-offset,min_y))
max_y = max((y+offset,max_y))
min_x = int(max((0,min_x)))
min_y = int(max((0,min_y)))
if max_x < min_x or max_y < min_y:
# no pick points were found
return
debugSelection = mode.context.contextDefinition.debugSelection
if not debugSelection:
glScissor( min_x,min_y,int(max_x)-min_x,int(max_y)-min_y)
glEnable( GL_SCISSOR_TEST )
glMatrixMode( GL_MODELVIEW )
try:
idHolder = array( [0,0,0,0], 'B' )
idSetter = idHolder.view( '<I' )
for id,(key,mvmatrix,tmatrix,bvolume,path) in enumerate(toRender):
id = (id+1) << 12
idSetter[0] = id
glColor4ubv( idHolder )
self.matrix = mvmatrix
self.renderPath = path
glLoadMatrixf( mvmatrix )
path[-1].Render( mode=self )
map[id] = path
pixel = array([0,0,0,0],'B')
depth_pixel = array([[0]],'f')
for point,eventSet in pickPoints.items():
# get the pixel colour (id) under the cursor.
glReadPixels( point[0],point[1],1,1,GL_RGBA,GL_UNSIGNED_BYTE, pixel )
lpixel = long( pixel.view( '<I' )[0] )
paths = map.get( lpixel, [] )
event.setObjectPaths( [paths] )
# get the depth value under the cursor...
glReadPixels(
point[0],point[1],1,1,GL_DEPTH_COMPONENT,GL_FLOAT,depth_pixel
)
event.viewCoordinate = point[0],point[1],depth_pixel[0][0]
event.modelViewMatrix = matrix
event.projectionMatrix = self.projection
event.viewport = self.viewport
if hasattr( mode.context, 'ProcessEvent'):
mode.context.ProcessEvent( event )
finally:
glColor4f( 1.0,1.0,1.0, 1.0)
glDisable( GL_COLOR_MATERIAL )
glEnable( GL_LIGHTING )
glDisable( GL_SCISSOR_TEST )
MAX_LIGHTS = -1
def __call__( self, context ):
"""Overall rendering pass interface for the context client"""
vp = context.getViewPlatform()
self.setViewPlatform( vp )
# These values are temporarily stored locally, we are
# in the context lock, so we're not causing conflicts
if self.MAX_LIGHTS == -1:
self.MAX_LIGHTS = 8 #glGetIntegerv( GL_MAX_LIGHTS )
self.context = context
self.cache = context.cache
self.viewport = (0,0) + context.getViewPort()
self.calculateFrustum()
self.Render( context, self )
return True # flip yes, for now we always flip...
def calculateFrustum( self ):
"""Construct our Frustum instance (currently by extracting from mv matrix)"""
# TODO: calculate from view platform instead
self.frustum = frustum.Frustum.fromViewingMatrix(
self.modelproj,
normalize = 1
)
return self.frustum
def getProjection (self):
"""Retrieve the projection matrix for the rendering pass"""
return self.projection
def getViewport (self):
"""Retrieve the viewport parameters for the rendering pass"""
return self.viewport
def getModelView( self ):
"""Retrieve the base model-view matrix for the rendering pass"""
return self.modelView
def setViewPlatform( self, vp ):
"""Set our view platform"""
self.viewPlatform = vp
self.projection = vp.viewMatrix().astype('f')
self.modelView = vp.modelMatrix().astype('f')
self.modelproj = dot( self.modelView, self.projection )
self.matrix = None
|
b-me/django
|
refs/heads/master
|
tests/template_backends/test_dummy.py
|
306
|
# coding: utf-8
from __future__ import unicode_literals
from django.forms import CharField, Form, Media
from django.http import HttpRequest
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.template.backends.dummy import TemplateStrings
from django.test import SimpleTestCase
class TemplateStringsTests(SimpleTestCase):
engine_class = TemplateStrings
backend_name = 'dummy'
options = {}
@classmethod
def setUpClass(cls):
super(TemplateStringsTests, cls).setUpClass()
params = {
'DIRS': [],
'APP_DIRS': True,
'NAME': cls.backend_name,
'OPTIONS': cls.options,
}
cls.engine = cls.engine_class(params)
def test_from_string(self):
template = self.engine.from_string("Hello!\n")
content = template.render()
self.assertEqual(content, "Hello!\n")
def test_get_template(self):
template = self.engine.get_template('template_backends/hello.html')
content = template.render({'name': 'world'})
self.assertEqual(content, "Hello world!\n")
def test_get_template_non_existing(self):
with self.assertRaises(TemplateDoesNotExist) as e:
self.engine.get_template('template_backends/non_existing.html')
self.assertEqual(e.exception.backend, self.engine)
def test_get_template_syntax_error(self):
# There's no way to trigger a syntax error with the dummy backend.
# The test still lives here to factor it between other backends.
if self.backend_name == 'dummy':
self.skipTest("test doesn't apply to dummy backend")
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('template_backends/syntax_error.html')
def test_html_escaping(self):
template = self.engine.get_template('template_backends/hello.html')
context = {'name': '<script>alert("XSS!");</script>'}
content = template.render(context)
self.assertIn('<script>', content)
self.assertNotIn('<script>', content)
def test_django_html_escaping(self):
if self.backend_name == 'dummy':
self.skipTest("test doesn't apply to dummy backend")
class TestForm(Form):
test_field = CharField()
media = Media(js=['my-script.js'])
form = TestForm()
template = self.engine.get_template('template_backends/django_escaping.html')
content = template.render({'media': media, 'test_form': form})
expected = '{}\n\n{}\n\n{}'.format(media, form, form['test_field'])
self.assertHTMLEqual(content, expected)
def test_csrf_token(self):
request = HttpRequest()
CsrfViewMiddleware().process_view(request, lambda r: None, (), {})
template = self.engine.get_template('template_backends/csrf.html')
content = template.render(request=request)
expected = (
'<input type="hidden" name="csrfmiddlewaretoken" '
'value="{}" />'.format(get_token(request)))
self.assertHTMLEqual(content, expected)
def test_no_directory_traversal(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('../forbidden/template_backends/hello.html')
def test_non_ascii_characters(self):
template = self.engine.get_template('template_backends/hello.html')
content = template.render({'name': 'Jérôme'})
self.assertEqual(content, "Hello Jérôme!\n")
|
vanpact/scipy
|
refs/heads/master
|
scipy/odr/setup.py
|
128
|
#!/usr/bin/env python
from __future__ import division, print_function, absolute_import
from os.path import join
def configuration(parent_package='', top_path=None):
import warnings
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info, BlasNotFoundError
config = Configuration('odr', parent_package, top_path)
libodr_files = ['d_odr.f',
'd_mprec.f',
'dlunoc.f']
blas_info = get_info('blas_opt')
if blas_info:
libodr_files.append('d_lpk.f')
else:
warnings.warn(BlasNotFoundError.__doc__)
libodr_files.append('d_lpkbls.f')
odrpack_src = [join('odrpack', x) for x in libodr_files]
config.add_library('odrpack', sources=odrpack_src)
sources = ['__odrpack.c']
libraries = ['odrpack'] + blas_info.pop('libraries', [])
include_dirs = ['.'] + blas_info.pop('include_dirs', [])
config.add_extension('__odrpack',
sources=sources,
libraries=libraries,
include_dirs=include_dirs,
depends=(['odrpack.h'] + odrpack_src),
**blas_info
)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.