code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
"""Common algebra of "quantum" objects
Quantum objects have an associated Hilbert space, and they (at least partially)
summation, products, multiplication with a scalar, and adjoints.
The algebra defined in this module is the superset of the Hilbert space algebra
of states (augmented by the tensor product), and the C* algebras of operators
and superoperators.
"""
import re
from abc import ABCMeta, abstractmethod
from collections import defaultdict, OrderedDict
from itertools import product as cartesian_product
import sympy
from sympy import Symbol, sympify
from .hilbert_space_algebra import ProductSpace, LocalSpace, TrivialSpace
from .abstract_algebra import Operation, Expression, substitute
from .algebraic_properties import derivative_via_diff
from .indexed_operations import IndexedSum
from ...utils.ordering import (
DisjunctCommutativeHSOrder, FullCommutativeHSOrder, KeyTuple, )
from ...utils.indices import (
SymbolicLabelBase, IndexOverList, IndexOverFockSpace, IndexOverRange)
__all__ = [
'ScalarTimesQuantumExpression', 'QuantumExpression', 'QuantumOperation',
'QuantumPlus', 'QuantumTimes', 'SingleQuantumOperation', 'QuantumAdjoint',
'QuantumSymbol', 'QuantumIndexedSum', 'QuantumDerivative', 'Sum']
__private__ = [
'ensure_local_space']
_sympyOne = sympify(1)
class QuantumExpression(Expression, metaclass=ABCMeta):
"""Base class for expressions associated with a Hilbert space"""
_zero = None # The neutral element for addition
_one = None # The neutral element for multiplication
_base_cls = None # The most general class we can add / multiply
_scalar_times_expr_cls = None # class for multiplication with scalar
_plus_cls = None # class for internal addition
_times_cls = None # class for internal multiplication
_adjoint_cls = None # class for the adjoint
_indexed_sum_cls = None # class for indexed sum
_default_hs_cls = None # class for implicit Hilbert spaces (str, int)
# _default_hs_cls is set by `init_algebra`
_order_index = 0 # index of "order group": things that should go together
_order_coeff = 1 # scalar prefactor
_order_name = None
def __init__(self, *args, **kwargs):
self._order_args = KeyTuple([
arg._order_key if hasattr(arg, '_order_key') else arg
for arg in args])
self._order_kwargs = KeyTuple([
KeyTuple([
key, val._order_key if hasattr(val, '_order_key') else val])
for (key, val) in sorted(kwargs.items())])
super().__init__(*args, **kwargs)
@property
def is_zero(self):
"""Check whether the expression is equal to zero.
Specifically, this checks whether the expression is equal to the
neutral element for the addition within the algebra. This does not
generally imply equality with a scalar zero:
>>> ZeroOperator.is_zero
True
>>> ZeroOperator == 0
False
"""
return self == self._zero
@property
def _order_key(self):
return KeyTuple([
self._order_index, self._order_name or self.__class__.__name__,
self._order_coeff, self._order_args, self._order_kwargs])
@property
@abstractmethod
def space(self):
"""The :class:`.HilbertSpace` on which the operator acts
non-trivially"""
raise NotImplementedError(self.__class__.__name__)
def adjoint(self):
"""The Hermitian adjoint of the Expression"""
return self._adjoint()
def dag(self):
"""Alias for :meth:`adjoint`"""
return self._adjoint()
@abstractmethod
def _adjoint(self):
raise NotImplementedError(self.__class__.__name__)
def expand(self):
"""Expand out distributively all products of sums.
Note:
This does not expand out sums of scalar coefficients. You may use
:meth:`simplify_scalar` for this purpose.
"""
return self._expand()
def _expand(self):
return self
def simplify_scalar(self, func=sympy.simplify):
"""Simplify all scalar symbolic (SymPy) coefficients by appyling `func`
to them"""
return self._simplify_scalar(func=func)
def _simplify_scalar(self, func):
return self
def diff(self, sym: Symbol, n: int = 1, expand_simplify: bool = True):
"""Differentiate by scalar parameter `sym`.
Args:
sym: What to differentiate by.
n: How often to differentiate
expand_simplify: Whether to simplify the result.
Returns:
The n-th derivative.
"""
if not isinstance(sym, sympy.Basic):
raise TypeError("%s needs to be a Sympy symbol" % sym)
if sym.free_symbols.issubset(self.free_symbols):
# QuantumDerivative.create delegates internally to _diff (the
# explicit non-trivial derivative). Using `create` gives us free
# caching
deriv = QuantumDerivative.create(self, derivs={sym: n}, vals=None)
if not deriv.is_zero and expand_simplify:
deriv = deriv.expand().simplify_scalar()
return deriv
else:
# the "issubset" of free symbols is a sufficient, but not a
# necessary condition; if `sym` is non-atomic, determining whether
# `self` depends on `sym` is not completely trivial (you'd have to
# substitute with a Dummy)
return self.__class__._zero
@abstractmethod
def _diff(self, sym):
raise NotImplementedError()
def series_expand(
self, param: Symbol, about, order: int) -> tuple:
r"""Expand the expression as a truncated power series in a
scalar parameter.
When expanding an expr for a parameter $x$ about the point $x_0$ up to
order $N$, the resulting coefficients $(c_1, \dots, c_N)$ fulfill
.. math::
\text{expr} = \sum_{n=0}^{N} c_n (x - x_0)^n + O(N+1)
Args:
param: Expansion parameter $x$
about (Scalar): Point $x_0$ about which to expand
order: Maximum order $N$ of expansion (>= 0)
Returns:
tuple of length ``order + 1``, where the entries are the
expansion coefficients, $(c_0, \dots, c_N)$.
Note:
The expansion coefficients are
"type-stable", in that they share a common base class with the
original expression. In particular, this applies to "zero"
coefficients::
>>> expr = KetSymbol("Psi", hs=0)
>>> t = sympy.symbols("t")
>>> assert expr.series_expand(t, 0, 1) == (expr, ZeroKet)
"""
expansion = self._series_expand(param, about, order)
# _series_expand is generally not "type-stable", so we continue to
# ensure the type-stability
res = []
for v in expansion:
if v == 0 or v.is_zero:
v = self._zero
elif v == 1:
v = self._one
assert isinstance(v, self._base_cls)
res.append(v)
return tuple(res)
def _series_expand(self, param, about, order):
expr = self
result = [_evaluate_at(expr, param, about)]
for n in range(1, order+1):
if not expr.is_zero:
expr = expr.diff(param)
result.append(
_evaluate_at(expr, param, about) / sympy.factorial(n))
else:
result.append(expr)
return tuple(result)
def __add__(self, other):
if not isinstance(other, self._base_cls):
try:
other = self.__class__._one * other
except TypeError:
pass
if isinstance(other, self.__class__._base_cls):
return self.__class__._plus_cls.create(self, other)
else:
return NotImplemented
def __radd__(self, other):
# addition is assumed to be commutative
return self.__add__(other)
def __mul__(self, other):
from qnet.algebra.core.scalar_algebra import is_scalar, ScalarValue
if not isinstance(other, self._base_cls):
if is_scalar(other):
other = ScalarValue.create(other)
# if other was an ScalarExpression, the conversion above leaves
# it unchanged
return self.__class__._scalar_times_expr_cls.create(
other, self)
if isinstance(other, self.__class__._base_cls):
return self.__class__._times_cls.create(self, other)
else:
return NotImplemented
def __rmul__(self, other):
# multiplication with scalar is assumed to be commutative, but any
# other multiplication is not
from qnet.algebra.core.scalar_algebra import is_scalar
if is_scalar(other):
return self.__mul__(other)
else:
return NotImplemented
def __sub__(self, other):
return self + (-1) * other
def __rsub__(self, other):
return (-1) * self + other
def __neg__(self):
return (-1) * self
def __truediv__(self, other):
try:
factor = _sympyOne / other
return self * factor
except TypeError:
try:
return super().__rmul__(other)
except AttributeError:
return NotImplemented
def __pow__(self, other):
if other == 0:
return self._one
elif other == 1:
return self
else:
try:
other_is_int = (other == int(other))
except TypeError:
other_is_int = False
if other_is_int:
if other > 1:
return self.__class__._times_cls.create(
*[self for _ in range(other)])
elif other < 1:
return 1 / self**(-other)
else:
raise ValueError("Invalid exponent %r" % other)
else:
return NotImplemented
class QuantumSymbol(QuantumExpression, metaclass=ABCMeta):
"""Symbolic element of an algebra
Args:
label (str or SymbolicLabelBase): Label for the symbol
sym_args (Scalar): optional scalar arguments. With zero `sym_args`, the
resulting symbol is a constant. With one or more `sym_args`, it
becomes a function.
hs (HilbertSpace, str, int, tuple): the Hilbert space associated with
the symbol. If a `str` or an `int`, an implicit (sub-)instance of
:class:`LocalSpace` with a corresponding label will be created, or,
for a tuple of `str` or `int`, a :class:`ProducSpace. The type of
the implicit Hilbert space is set by :func:`.init_algebra`.
"""
_rx_label = re.compile('^[A-Za-z][A-Za-z0-9]*(_[A-Za-z0-9().+-=]+)?$')
def __init__(self, label, *sym_args, hs):
from qnet.algebra.core.scalar_algebra import ScalarValue
self._label = label
sym_args = [ScalarValue.create(arg) for arg in sym_args]
self._sym_args = tuple(sym_args)
if isinstance(label, str):
if not self._rx_label.match(label):
raise ValueError(
"label '%s' does not match pattern '%s'"
% (label, self._rx_label.pattern))
elif isinstance(label, SymbolicLabelBase):
self._label = label
else:
raise TypeError(
"type of label must be str or SymbolicLabelBase, not %s"
% type(label))
if isinstance(hs, (str, int)):
hs = self._default_hs_cls(hs)
elif isinstance(hs, tuple):
hs = ProductSpace.create(*[self._default_hs_cls(h) for h in hs])
self._hs = hs
super().__init__(label, *sym_args, hs=hs)
@property
def label(self):
"""Label of the symbol"""
return self._label
@property
def args(self):
"""Tuple of positional arguments, consisting of the label and possible
`sym_args`"""
return (self.label, ) + self._sym_args
@property
def kwargs(self):
"""Dict of keyword arguments, containing only `hs`"""
return {'hs': self._hs}
@property
def sym_args(self):
"""Tuple of scalar arguments of the symbol"""
return self._sym_args
@property
def space(self):
return self._hs
def _diff(self, sym):
if all([arg.diff(sym).is_zero for arg in self.sym_args]):
# This includes the case where sym_args is empty
return self.__class__._zero
else:
return self.__class__._derivative_cls(self, derivs={sym: 1})
def _series_expand(self, param, about, order):
if len(self._sym_args) == 0:
return (self, ) + (0, ) * order
else:
# QuantumExpression._series_expand will return the abstract Taylor
# series
return super()._series_expand(param, about, order)
def _simplify_scalar(self, func):
simplified_sym_args = [func(sym) for sym in self._sym_args]
return self.create(self.label, *simplified_sym_args, hs=self.space)
def _expand(self):
return self
@property
def free_symbols(self):
try:
res = self.label.free_symbols
# TODO: anywhere else there are symbolic labels, symbols from the
# labels should be included in the free_symbols, too
except AttributeError:
res = set()
res.update(self._hs.free_symbols)
return res.union(*[sym.free_symbols for sym in self.sym_args])
def _adjoint(self):
return self.__class__._adjoint_cls(self)
class QuantumOperation(QuantumExpression, Operation, metaclass=ABCMeta):
"""Base class for operations on quantum expression
These are operations on quantum expressions within the same fundamental
set."""
# "same fundamental set" means all operandas are instances of _base_cls
# Operations that involve objects from different sets (e.g.,
# OperatorTimesKet) should directly subclass from QuantumExpression and
# Operation
_order_index = 1 # Operations are printed after "atomic" Expressions
def __init__(self, *operands, **kwargs):
for o in operands:
assert isinstance(o, self.__class__._base_cls)
op_spaces = [o.space for o in operands]
self._space = ProductSpace.create(*op_spaces)
super().__init__(*operands, **kwargs)
@property
def space(self):
"""Hilbert space of the operation result"""
return self._space
def _simplify_scalar(self, func):
simplified_operands = []
operands_have_changed = False
for op in self.operands:
new_op = op.simplify_scalar(func=func)
simplified_operands.append(new_op)
if new_op is not op:
operands_have_changed = True
if operands_have_changed:
return self.create(*simplified_operands, **self.kwargs)
else:
return self
class SingleQuantumOperation(QuantumOperation, metaclass=ABCMeta):
"""Base class for operations on a single quantum expression"""
def __init__(self, op, **kwargs):
if not isinstance(op, self._base_cls):
try:
op = op * self.__class__._one
except TypeError:
pass
super().__init__(op, **kwargs)
@property
def operand(self):
"""The operator that the operation acts on"""
return self.operands[0]
def _diff(self, sym):
# most single-quantum-operations are linear, i.e. they commute with the
# derivative. Those that are not must override _diff
return self.__class__.create(self.operand.diff(sym))
def _series_expand(self, param, about, order):
ope = self.operand.series_expand(param, about, order)
return tuple(self.__class__.create(opet) for opet in ope)
class QuantumAdjoint(SingleQuantumOperation, metaclass=ABCMeta):
"""Base class for adjoints of quantum expressions"""
def _expand(self):
eo = self.operand.expand()
if isinstance(eo, self.__class__._plus_cls):
summands = [eoo.adjoin() for eoo in eo.operands]
return self.__class__._plus_cls.create(*summands)
return eo.adjoint()
def _diff(self, sym):
return self.__class__.create(self.operands[0].diff(sym))
def _adjoint(self):
return self.operand
class QuantumPlus(QuantumOperation, metaclass=ABCMeta):
"""General implementation of addition of quantum expressions"""
order_key = FullCommutativeHSOrder
_neutral_element = None
def __init__(self, *operands, **kwargs):
if len(operands) <= 1:
raise TypeError(
"%s requires at least two operands" % self.__class__.__name__)
super().__init__(*operands, **kwargs)
def _expand(self):
summands = [o.expand() for o in self.operands]
return self.__class__._plus_cls.create(*summands)
def _series_expand(self, param, about, order):
tuples = (o.series_expand(param, about, order) for o in self.operands)
res = (self.__class__._plus_cls.create(*tels) for tels in zip(*tuples))
return res
def _diff(self, sym):
return sum([o.diff(sym) for o in self.operands], self.__class__._zero)
def _adjoint(self):
return self.__class__._plus_cls(*[o.adjoint() for o in self.operands])
class QuantumTimes(QuantumOperation, metaclass=ABCMeta):
"""General implementation of product of quantum expressions"""
order_key = DisjunctCommutativeHSOrder
_neutral_element = None
def __init__(self, *operands, **kwargs):
if len(operands) <= 1:
raise TypeError(
"%s requires at least two operands" % self.__class__.__name__)
super().__init__(*operands, **kwargs)
def factor_for_space(self, spc):
"""Return a tuple of two products, where the first product contains the
given Hilbert space, and the second product is disjunct from it."""
if spc == TrivialSpace:
ops_on_spc = [
o for o in self.operands if o.space is TrivialSpace]
ops_not_on_spc = [
o for o in self.operands if o.space > TrivialSpace]
else:
ops_on_spc = [
o for o in self.operands if (o.space & spc) > TrivialSpace]
ops_not_on_spc = [
o for o in self.operands if (o.space & spc) is TrivialSpace]
return (
self.__class__._times_cls.create(*ops_on_spc),
self.__class__._times_cls.create(*ops_not_on_spc))
def _expand(self):
return _expand_product(self.operands)
def _series_expand(self, param, about, order):
assert len(self.operands) > 1
cfirst = self.operands[0].series_expand(param, about, order)
if len(self.operands) > 2:
crest = (
self.__class__(*self.operands[1:])
.series_expand(param, about, order))
else:
# a single remaining operand needs to be treated explicitly because
# we didn't use `create` for the `crest` above, for efficiency
crest = self.operands[1].series_expand(param, about, order)
return _series_expand_combine_prod(cfirst, crest, order)
def _diff(self, sym):
assert len(self.operands) > 1
first = self.operands[0]
rest = self.__class__._times_cls.create(*self.operands[1:])
return first.diff(sym) * rest + first * rest.diff(sym)
def _adjoint(self):
return self.__class__._times_cls.create(
*[o.adjoint() for o in reversed(self.operands)])
class ScalarTimesQuantumExpression(
QuantumExpression, Operation, metaclass=ABCMeta):
"""Product of a :class:`.Scalar` and a :class:`QuantumExpression`"""
@classmethod
def create(cls, coeff, term):
from qnet.algebra.core.scalar_algebra import Scalar, ScalarValue
if not isinstance(coeff, Scalar):
coeff = ScalarValue.create(coeff)
return super().create(coeff, term)
def __init__(self, coeff, term):
from qnet.algebra.core.scalar_algebra import Scalar, ScalarValue
if not isinstance(coeff, Scalar):
coeff = ScalarValue.create(coeff)
self._order_coeff = coeff
self._order_args = KeyTuple([term._order_key])
super().__init__(coeff, term)
@property
def coeff(self):
return self.operands[0]
@property
def term(self):
return self.operands[1]
def _substitute(self, var_map, safe=False):
st = self.term.substitute(var_map)
if isinstance(self.coeff, sympy.Basic):
svar_map = {k: v for k, v in var_map.items()
if not isinstance(k, Expression)}
sc = self.coeff.subs(svar_map)
else:
sc = substitute(self.coeff, var_map)
if safe:
return self.__class__(sc, st)
else:
return sc * st
@property
def free_symbols(self):
return self.coeff.free_symbols | self.term.free_symbols
def _adjoint(self):
return self.coeff.conjugate() * self.term.adjoint()
@property
def _order_key(self):
from qnet.printing.asciiprinter import QnetAsciiDefaultPrinter
ascii = QnetAsciiDefaultPrinter().doprint
t = self.term._order_key
try:
c = abs(float(self.coeff)) # smallest coefficients first
except (ValueError, TypeError):
c = float('inf')
return KeyTuple(t[:2] + (c,) + t[3:] + (ascii(self.coeff),))
@property
def space(self):
return self.operands[1].space
def _expand(self):
return _expand_product(self.operands)
def _series_expand(self, param, about, order):
ce = self.coeff.series_expand(param, about, order)
te = self.term.series_expand(param, about, order)
return _series_expand_combine_prod(ce, te, order)
def _diff(self, sym):
c, t = self.operands
return c.diff(sym) * t + c * t.diff(sym)
def _simplify_scalar(self, func):
coeff, term = self.operands
try:
if isinstance(coeff.val, sympy.Basic):
coeff = func(coeff)
except AttributeError:
# coeff is not a SymPy ScalarValue; leave it unchanged
pass
return coeff * term.simplify_scalar(func=func)
def __complex__(self):
if self.term is self.__class__._one:
return complex(self.coeff)
return NotImplemented
def __float__(self):
if self.term is self.__class__._one:
return float(self.coeff)
return NotImplemented
class QuantumDerivative(SingleQuantumOperation):
r"""Symbolic partial derivative
.. math::
\frac{\partial^n}{\partial x_1^{n_1} \dots \partial x_N^{n_N}}
A(x_1, \dots, x_N); \qquad
\text{with} \quad n = \sum_i n_i
Alternatively, if `vals` is given, a symbolic representation of the
derivative (partially) evaluated at a specific point.
.. math::
\left.\frac{\partial^n}{\partial x_1^{n_1} \dots \partial x_N^{n_N}}
A(x_1, \dots, x_N) \right\vert_{x_1=v_1, \dots}
Args:
op (QuantumExpression): the expression $A(x_1, \dots, x_N)$ that is
being derived
derivs (dict): a map of symbols $x_i$ to the order $n_i$ of the
derivate with respect to that symbol
vals (dict or None): If not ``None``, a map of symbols $x_i$ to values
$v_i$ for the point at which the derivative should be evaluated.
Note:
:class:`QuantumDerivative` is intended to be instantiated only inside
the :meth:`_diff` method of a :class:`QuantumExpression`, for
expressions that depend on scalar arguments in an unspecified way.
Generally, if a derivative can be calculated explicitly, the explicit
form is preferred over the abstract :class:`QuantumDerivative`.
"""
simplifications = [derivative_via_diff, ] # create -> ._diff
# *Any* invocations of `create` will directly return the result of
# `derivative_via_diff` (but with caching)
@classmethod
def create(cls, op, *, derivs, vals=None):
"""Instantiate the derivative by repeatedly calling
the :meth:`~QuantumExpression._diff` method of `op` and evaluating the
result at the given `vals`.
"""
# To ensure stable ordering in Expression._get_instance_key, we explicitly
# convert `derivs` and `vals` to a tuple structure with a custom sorting key.
if not isinstance(derivs, tuple):
derivs = cls._dict_to_ordered_tuple(dict(derivs))
if not (isinstance(vals, tuple) or vals is None):
vals = cls._dict_to_ordered_tuple(dict(vals))
return super().create(op, derivs=derivs, vals=vals)
def __init__(self, op, *, derivs, vals=None):
self._derivs = defaultdict(int)
self._n = 0
syms = []
for sym, n in dict(derivs).items():
if not isinstance(sym, sympy.Basic):
raise TypeError("%s needs to be a Sympy symbol" % sym)
syms.append(sym)
n = int(n)
if n <= 0:
raise ValueError(
"Derivative wrt symbol %s must be be for an order "
"greater than zero, not %s" % (sym, n))
assert n > 0
self._n += n
self._derivs[sym] += n
self._syms = set(syms)
self._vals = {}
if vals is not None:
for sym, val in dict(vals).items():
if sym not in self._syms:
raise ValueError(
"Derivative can only be evaluated for a symbol if "
"the derivative is with respect to that symbol; "
"not %s" % sym)
self._vals[sym] = val
self._ordered_derivs = self._dict_to_ordered_tuple(self._derivs)
self._ordered_vals = self._dict_to_ordered_tuple(self._vals)
# Expression._get_instance_key wouldn't work with mutable dicts
super().__init__(
op, derivs=self._ordered_derivs, vals=self._ordered_vals)
@property
def kwargs(self):
"""Keyword arguments for the instantiation of the derivative"""
return OrderedDict([
('derivs', self._ordered_derivs),
('vals', self._ordered_vals or None)])
@property
def minimal_kwargs(self):
"""Minimal keyword arguments for the instantiation of the derivative
(excluding defaults)"""
if len(self._vals) == 0:
return {'derivs': self._ordered_derivs}
else:
return self.kwargs
@staticmethod
def _dict_to_ordered_tuple(d):
from qnet.printing.asciiprinter import QnetAsciiDefaultPrinter
sort_key = QnetAsciiDefaultPrinter().doprint # arbitrary, but stable
return tuple([(s, d[s]) for s in sorted(d.keys(), key=sort_key)])
def evaluate_at(self, vals):
"""Evaluate the derivative at a specific point"""
new_vals = self._vals.copy()
new_vals.update(vals)
return self.__class__(self.operand, derivs=self._derivs, vals=new_vals)
@property
def derivs(self):
"""Mapping of symbols to the order of the derivative with respect to
that symbol. Keys are ordered alphanumerically."""
return OrderedDict(self._ordered_derivs)
@property
def syms(self):
"""Set of symbols with respect to which the derivative is taken"""
return set(self._syms)
@property
def vals(self):
"""Mapping of symbols to values for which the derivative is to be
evaluated. Keys are ordered alphanumerically."""
return OrderedDict(self._ordered_vals)
@property
def free_symbols(self):
"""Set of free SymPy symbols contained within the expression."""
if self._free_symbols is None:
if len(self._vals) == 0:
self._free_symbols = self.operand.free_symbols
else:
dummy_map = {}
for sym in self._vals.keys():
dummy_map[sym] = sympy.Dummy()
# bound symbols may not be atomic, so we have to replace them
# with dummies
self._free_symbols = {
sym for sym
in self.operand.substitute(dummy_map).free_symbols
if not isinstance(sym, sympy.Dummy)}
for val in self._vals.values():
self._free_symbols.update(val.free_symbols)
return self._free_symbols
@property
def bound_symbols(self):
"""Set of Sympy symbols that are eliminated by evaluation."""
if self._bound_symbols is None:
res = set()
self._bound_symbols = res.union(
*[sym.free_symbols for sym in self._vals.keys()])
return self._bound_symbols
@property
def n(self):
"""The total order of the derivative.
This is the sum of the order values in :attr:`derivs`
"""
return self._n
def _diff(self, sym):
if sym in self._vals.keys():
return self.__class__._zero
else:
if not isinstance(sym, sympy.Basic):
raise TypeError("%s must be a Sympy symbol" % sym)
if sym in self._vals.values():
return self.__class__(self, derivs={sym: 1})
else:
derivs = self._derivs.copy()
derivs[sym] += 1
return self.__class__(
self.operand, derivs=derivs, vals=self._vals)
def _adjoint(self):
return self.__class__(
self.operand.adjoint(), derivs=self._derivs, vals=self._vals)
class QuantumIndexedSum(IndexedSum, SingleQuantumOperation, metaclass=ABCMeta):
"""Base class for indexed sums"""
@property
def space(self):
"""The Hilbert space of the sum's term"""
return self.term.space
def _expand(self):
return self.__class__.create(self.term.expand(), *self.ranges)
def _adjoint(self):
return self.__class__.create(self.term.adjoint(), *self.ranges)
def _diff(self, sym):
return self.__class__.create(self.term.diff(sym), *self.ranges)
def __mul__(self, other):
from qnet.algebra.core.scalar_algebra import is_scalar
if isinstance(other, IndexedSum):
other = other.make_disjunct_indices(self)
new_ranges = self.ranges + other.ranges
new_term = self.term * other.term
# note that class may change, depending on type of new_term
return new_term.__class__._indexed_sum_cls.create(
new_term, *new_ranges)
elif is_scalar(other):
return self.__class__._scalar_times_expr_cls(other, self)
elif isinstance(other, ScalarTimesQuantumExpression):
return self._class__._scalar_times_expr_cls(
other.coeff, self * other.term)
else:
sum = self.make_disjunct_indices(*other.bound_symbols)
new_term = sum.term * other
return new_term.__class__._indexed_sum_cls.create(
new_term, *sum.ranges)
def __rmul__(self, other):
from qnet.algebra.core.scalar_algebra import is_scalar
if isinstance(other, IndexedSum):
self_new = self.make_disjunct_indices(other)
new_ranges = other.ranges + self_new.ranges
new_term = other.term * self_new.term
# note that class may change, depending on type of new_term
return new_term.__class__._indexed_sum_cls.create(
new_term, *new_ranges)
elif is_scalar(other):
return self.__class__._scalar_times_expr_cls.create(other, self)
elif isinstance(other, ScalarTimesQuantumExpression):
return self._class__._scalar_times_expr_cls(
other.coeff, other.term * self)
else:
sum = self.make_disjunct_indices(*other.bound_symbols)
new_term = other * sum.term
return new_term.__class__._indexed_sum_cls.create(
new_term, *sum.ranges)
def __add__(self, other):
raise NotImplementedError()
def __radd__(self, other):
raise NotImplementedError()
def __sub__(self, other):
raise NotImplementedError()
def __rsub__(self, other):
raise NotImplementedError()
def _sum_over_list(term, idx, values):
return IndexOverList(idx, values)
def _sum_over_range(term, idx, start_from, to, step=1):
return IndexOverRange(idx, start_from=start_from, to=to, step=step)
def _sum_over_fockspace(term, idx, hs=None):
if hs is None:
return IndexOverFockSpace(idx, hs=term.space)
else:
return IndexOverFockSpace(idx, hs=hs)
def Sum(idx, *args, **kwargs):
"""Instantiator for an arbitrary indexed sum.
This returns a function that instantiates the appropriate
:class:`QuantumIndexedSum` subclass for a given term expression. It is the
preferred way to "manually" create indexed sum expressions, closely
resembling the normal mathematical notation for sums.
Args:
idx (IdxSym): The index symbol over which the sum runs
args: arguments that describe the values over which `idx` runs,
kwargs: keyword-arguments, used in addition to `args`
Returns:
callable: an instantiator function that takes a
arbitrary `term` that should generally contain the `idx` symbol, and
returns an indexed sum over that `term` with the index range specified
by the original `args` and `kwargs`.
There is considerable flexibility to specify concise `args` for a variety
of index ranges.
Assume the following setup::
>>> i = IdxSym('i'); j = IdxSym('j')
>>> ket_i = BasisKet(FockIndex(i), hs=0)
>>> ket_j = BasisKet(FockIndex(j), hs=0)
>>> hs0 = LocalSpace('0')
Giving `i` as the only argument will sum over the indices of the basis
states of the Hilbert space of `term`::
>>> s = Sum(i)(ket_i)
>>> unicode(s)
'∑_{i ∈ ℌ₀} |i⟩⁽⁰⁾'
You may also specify a Hilbert space manually::
>>> Sum(i, hs0)(ket_i) == Sum(i, hs=hs0)(ket_i) == s
True
Note that using :func:`Sum` is vastly more readable than the equivalent
"manual" instantiation::
>>> s == KetIndexedSum.create(ket_i, IndexOverFockSpace(i, hs=hs0))
True
By nesting calls to `Sum`, you can instantiate sums running over multiple
indices::
>>> unicode( Sum(i)(Sum(j)(ket_i * ket_j.dag())) )
'∑_{i,j ∈ ℌ₀} |i⟩⟨j|⁽⁰⁾'
Giving two integers in addition to the index `i` in `args`, the index will
run between the two values::
>>> unicode( Sum(i, 1, 10)(ket_i) )
'∑_{i=1}^{10} |i⟩⁽⁰⁾'
>>> Sum(i, 1, 10)(ket_i) == Sum(i, 1, to=10)(ket_i)
True
You may also include an optional step width, either as a third integer or
using the `step` keyword argument.
>>> #unicode( Sum(i, 1, 10, step=2)(ket_i) ) # TODO
Lastly, by passing a tuple or list of values, the index will run over all
the elements in that tuple or list::
>>> unicode( Sum(i, (1, 2, 3))(ket_i))
'∑_{i ∈ {1,2,3}} |i⟩⁽⁰⁾'
"""
from qnet.algebra.core.hilbert_space_algebra import LocalSpace
from qnet.algebra.core.scalar_algebra import ScalarValue
from qnet.algebra.library.spin_algebra import SpinSpace
dispatch_table = {
tuple(): _sum_over_fockspace,
(LocalSpace, ): _sum_over_fockspace,
(SpinSpace, ): _sum_over_fockspace,
(list, ): _sum_over_list,
(tuple, ): _sum_over_list,
(int, ): _sum_over_range,
(int, int): _sum_over_range,
(int, int, int): _sum_over_range,
}
key = tuple((type(arg) for arg in args))
try:
idx_range_func = dispatch_table[key]
except KeyError:
raise TypeError("No implementation for args of type %s" % str(key))
def sum(term):
if isinstance(term, ScalarValue._val_types):
term = ScalarValue.create(term)
idx_range = idx_range_func(term, idx, *args, **kwargs)
return term._indexed_sum_cls.create(term, idx_range)
return sum
def ensure_local_space(hs, cls=LocalSpace):
"""Ensure that the given `hs` is an instance of :class:`LocalSpace`.
If `hs` an instance of :class:`str` or :class:`int`, it will be converted
to a `cls` (if possible). If it already is an instace of `cls`, `hs`
will be returned unchanged.
Args:
hs (HilbertSpace or str or int): The Hilbert space (or label) to
convert/check
cls (type): The class to which an int/str label for a Hilbert space
should be converted. Must be a subclass of :class:`LocalSpace`.
Raises:
TypeError: If `hs` is not a :class:`.LocalSpace`, :class:`str`, or
:class:`int`.
Returns:
LocalSpace: original or converted `hs`
Examples:
>>> srepr(ensure_local_space(0))
"LocalSpace('0')"
>>> srepr(ensure_local_space('tls'))
"LocalSpace('tls')"
>>> srepr(ensure_local_space(0, cls=LocalSpace))
"LocalSpace('0')"
>>> srepr(ensure_local_space(LocalSpace(0)))
"LocalSpace('0')"
>>> srepr(ensure_local_space(LocalSpace(0)))
"LocalSpace('0')"
>>> srepr(ensure_local_space(LocalSpace(0) * LocalSpace(1)))
Traceback (most recent call last):
...
TypeError: hs must be an instance of LocalSpace
"""
if isinstance(hs, (str, int)):
try:
hs = cls(hs)
except TypeError as exc_info:
raise TypeError(
"Cannot convert %s '%s' into a %s instance: %s"
% (hs.__class__.__name__, hs, cls.__name__, str(exc_info)))
if not isinstance(hs, LocalSpace):
raise TypeError("hs must be an instance of LocalSpace")
return hs
def _series_expand_combine_prod(c1, c2, order):
"""Given the result of the ``c1._series_expand(...)`` and
``c2._series_expand(...)``, construct the result of
``(c1*c2)._series_expand(...)``
"""
from qnet.algebra.core.scalar_algebra import Zero
res = []
c1 = list(c1)
c2 = list(c2)
for n in range(order + 1):
summands = []
for k in range(n + 1):
if c1[k].is_zero or c2[n-k].is_zero:
summands.append(Zero)
else:
summands.append(c1[k] * c2[n - k])
sum = summands[0]
for summand in summands[1:]:
if summand != 0:
sum += summand
res.append(sum)
return tuple(res)
def _evaluate_at(expr, sym, val):
try:
# for QuantumDerivative instance
return expr.evaluate_at({sym: val})
except AttributeError:
# for explicit Expression
return expr.substitute({sym: val})
def _expand_product(factors):
eops = [o.expand() for o in factors]
# store tuples of summands of all expanded factors
eopssummands = [
eo.operands if isinstance(eo, eo.__class__._plus_cls) else (eo,)
for eo in eops]
# iterate over a cartesian product of all factor summands, form product
# of each tuple and sum over result
summands = []
for combo in cartesian_product(*eopssummands):
summand = combo[0]
for c in combo[1:]:
summand *= c
summands.append(summand)
ret = summands[0]
for summand in summands[1:]:
ret += summand
if isinstance(ret, ret.__class__._plus_cls):
return ret.expand()
else:
return ret
|
mabuchilab/QNET
|
src/qnet/algebra/core/abstract_quantum_algebra.py
|
Python
|
mit
| 40,354
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 16 18:26:39 2017
@author: prabhu
"""
#from con_re_scipy import congrid
from scipy.io import readsav
from matplotlib import pyplot as plt
import numpy as np
from astropy.io import fits
def rebin(a, shape):
sh = shape[0],a.shape[0]//shape[0],shape[1],a.shape[1]//shape[1]
return a.reshape(sh).mean(-1).mean(1)
data = readsav("/scratch/prabhu/backup_workstation/sunrise_holly/movie_data/mk_magneto_tr2_reduc_rnr_300_22.sav",python_dict=True)
iid = data['iid'] #demodulated and restored
iidn = data['iidn'] #demodulated and not restored
bi = 2
stok,wvln,dimy,dimx = iid.shape
binr = np.zeros(shape = (stok,wvln,int(dimx/bi),int(dimy/bi)))
binnr = np.zeros(shape = (stok,wvln,int(dimx/bi),int(dimy/bi)))
#binning bi x bi for restored data
for i in range(stok):
for j in range(wvln):
ima = iid[i,j,:,:]
binr[i,j,:,:] = rebin(ima,(int(dimy/bi),int(dimx/bi)))
#binning bi x bi for non-restored data
for i in range(stok):
for j in range(wvln):
iman = iidn[i,j,:,:]
binnr[i,j,:,:] = rebin(iman,(int(dimy/bi),int(dimx/bi)))
# plt.imshow(binr[0,4,:,:],cmap='gray')
# plt.gca().invert_yaxis()
# plt.show()
#creating primary HDU for binned restored data (primary HDU is mandatory in writing fits file)
hdu1 = fits.PrimaryHDU(data=binr)
#creating Image HDU for binned non-restored data
hdu2 = fits.ImageHDU(data=binnr,name="not restored")
#making hdulist to write into a fits file
hdulist = fits.HDUList([hdu1,hdu2])
hdulist.writeto('/scratch/prabhu/backup_workstation/sunrise_holly/binned_cycles/binned_tr2_mk_restor_300_22_' + str(bi) +'.fits')
|
ameya30/IMaX_pole_data_scripts
|
my_scripts/bin_data_pulpo.py
|
Python
|
mit
| 1,645
|
import json
from functools import wraps
from django.conf import settings
from django.http import HttpResponseForbidden
from django.utils.decorators import available_attrs
from betty.authtoken.models import ApiToken
def forbidden():
response_text = json.dumps({'message': 'Not authorized'})
return HttpResponseForbidden(response_text, content_type="application/json")
def betty_token_auth(permissions):
"""
Decorator to make a view only accept particular request methods. Usage::
@require_http_methods(["GET", "POST"])
def my_view(request):
# I can assume now that only GET or POST requests make it this far
# ...
Note that request methods should be in uppercase.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
if "betty.authtoken" in settings.INSTALLED_APPS:
if request.user.is_anonymous():
if "HTTP_X_BETTY_API_KEY" not in request.META:
return forbidden()
api_key = request.META["HTTP_X_BETTY_API_KEY"]
try:
token = ApiToken.objects.get(public_token=api_key)
except ApiToken.DoesNotExist:
return forbidden()
request.user = token.get_user()
if not request.user.has_perms(permissions):
return forbidden()
return func(request, *args, **kwargs)
return inner
return decorator
|
theonion/betty-cropper
|
betty/cropper/api/decorators.py
|
Python
|
mit
| 1,588
|
##
# @license
# Copyright Neekware Inc. All Rights Reserved.
#
# Use of this source code is governed by an MIT-style license that can be
# found in the LICENSE file at http://neekware.com/license/MIT.html
###
import logging
from django.conf import settings
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext as _
from rest_framework import serializers
from ..utils.common import get_url_name
log = logging.getLogger(__name__)
User = get_user_model()
|
un33k/djangoware
|
api/api_v1/generics/serializers.py
|
Python
|
mit
| 497
|
'''
.. module:: schema
Stores schema configurations, both for unclean and clean schemas
.. moduleauthor:: Christopher Phillippi <c_phillippi@mfe.berkeley.edu>
'''
import filers as filers
import settings as settings
# Filing name constants
SOURCE = "source"
YEAR = "year"
MONTH = "month"
DAY = "day"
PAPER = "paper"
STORE_ORDER = [ SOURCE, YEAR, MONTH, DAY, PAPER ]
#======================================
# Clean Schema
#======================================
def getFilePath( source, paper, month, day, year ):
"""Configures cleaned file system schema
"""
attributes = { SOURCE : source,
PAPER : paper,
MONTH : month,
DAY : day,
YEAR : year
}
return "\\".join( [ settings.CLEAN_STORE ] + [ attributes[ key ] for key in STORE_ORDER ] )
#======================================
# Dirty Schemas
#======================================
def getSchema( sourceDirectory ):
"""Given a sourceDirectory, returns the registered schema.
MUST Register schema here!
Example Usage:
>>> getSchema( \'LexisNexis\' )
<__main__.LexisNexisSchema object at 0x022816F0>
"""
if( sourceDirectory == settings.LEXISNEXIS_FILETAG ): return LexisNexisSchema()
raise Exception( "Filer for source <%s> is not registered in getSchema( source )." % ( sourceDirectory ) )
class LexisNexisSchema( object ):
'''API to normalize IO from uncleaned data to cleaned data
'''
class LexisNexisArticleFiler( filers.ArticleFilerBase ):
'''API to store a LexisNexis Article according to afp.settings
'''
paperDateTitleRegex = settings.LEXISNEXIS_REGEX_PAPER_DATE_TITLE
dateRegex = settings.LEXISNEXIS_REGEX_DATE
removeFromTitleRegex = settings.LEXISNEXIS_REGEX_EXCLUDE_FROM_TITLE
schemaName = settings.LEXISNEXIS_FILETAG
sectionDelimiter = settings.LEXISNEXIS_SECTION_DELIMTER
removeFromArticleRegex = settings.LEXISNEXIS_REMOVE_FROM_ARTICLE
def getArticleDelimiter( self ):
return settings.LEXISNEXIS_ARTICLE_DELIMITER
def getArticleFiler( self ):
return self.LexisNexisArticleFiler()
|
ccphillippi/AFP
|
afp/cleaner/schema.py
|
Python
|
mit
| 2,317
|
# repeat_keyword test
def draw_square():
repeat 4:
print('move()')
print('turn_left()')
|
aroberge/python_experiments
|
version1/repeat_sample1.py
|
Python
|
cc0-1.0
| 110
|
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'MODEL1012090002.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
biomodels/MODEL1012090002
|
MODEL1012090002/model.py
|
Python
|
cc0-1.0
| 427
|
#!/usr/bin/python
'''
NPR 2017-12-17
https://www.npr.org/2017/12/17/571421849/sunday-puzzle-capital-letters
Think of a convenience introduced in the 19th century that is still around today.
Its name has two words. Take the first three letters of the first word and the
first letter of its second word, in order, to get a convenience introduced in the
21st century that serves a similar purpose. Their names are otherwise unrelated.
What two conveniences are these?
'''
from nltk.corpus import wordnet as wn
from nltk.tokenize import word_tokenize
from nltk import FreqDist
import sys
sys.path.append('..')
from nprcommontools import get_hypernyms
#%%
# "common" words from Wordnet
# We use words in example sentences
def_corpus = []
for s in wn.all_synsets():
for ex in s.examples():
for w in word_tokenize(ex):
if w.isalpha():
def_corpus.append(w.lower())
fdist = FreqDist(def_corpus)
common_words = set()
NUM_WORDS = 50000
for word, frequency in fdist.most_common(NUM_WORDS):
# Keep only the best short words
if len(word) > 2 or word in ('a','i') or fdist[word] > 100:
common_words.add(word)
#%%
four = set()
two_words = set()
for s in wn.all_synsets():
if s.pos() == 'n':
for l in s.lemma_names():
if l[0] != l[0].upper():
if len(l) == 4 and '_' not in l and l in common_words:
four.add(l)
elif l.count('_') == 1 and l.split('_')[0] in common_words and l.split('_')[1] in common_words:
if 'product' in get_hypernyms(l):
two_words.add(l)
#%%
for t in sorted(two_words):
w1,w2 = t.split('_')
if w1[:3] + w2[0] in four:
print t,w1[:3] + w2[0]
|
boisvert42/npr-puzzle-python
|
2017/1217_conveniences.py
|
Python
|
cc0-1.0
| 1,769
|
#trapSerial.py
#example to run: python trapSerial.py 0.0 1.0 10000
import numpy
import sys
#takes in command-line arguments [a,b,n]
a = float(sys.argv[1])
b = float(sys.argv[2])
n = int(sys.argv[3])
def f(x):
return x*x
def integrateRange(a, b, n):
'''Numerically integrate with the trapezoid rule on the interval from
a to b with n trapezoids.
'''
integral = -(f(a) + f(b))/2.0
# n+1 endpoints, but n trapazoids
for x in numpy.linspace(a,b,n+1):
integral = integral + f(x)
integral = integral* (b-a)/n
return integral
integral = integrateRange(a, b, n)
print "With n =", n, "trapezoids, our estimate of the integral\
from", a, "to", b, "is", integral
|
resbaz/hpc
|
trapezoids/trapSerial.py
|
Python
|
cc0-1.0
| 746
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# µScript documentation build configuration file, created by
# sphinx-quickstart on Fri Oct 10 07:28:23 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'µScript'
copyright = '2014, J.David Luque'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Scriptdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Script.tex', 'µScript Documentation',
'J.David Luque', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'script', 'µScript Documentation',
['J.David Luque'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Script', 'µScript Documentation',
'J.David Luque', 'Script', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
jdavidls/uScript
|
docs/source/conf.py
|
Python
|
cc0-1.0
| 8,292
|
# tipo lista
pedidos = []
# definindo funcoes
def criarPedido(nome, sabor, observacao='sem observacoes'):
# tipo dicionario
pedido = {}
# adicionando chaves a lista
pedido['nome'] = nome
pedido['sabor'] = sabor
pedido['observacao'] = observacao
return pedido
pedidos.append(criarPedido('mario', 'peperoni'))
pedidos.append(criarPedido('luigi', 'presunto', 'dobro de queijo'))
for pedido in pedidos:
template = 'Nome: {nome}\nSabor: {sabor}'
print(template.format(**pedido))
if pedido['observacao']:
print('Observacao: {}'.format(pedido['observacao']))
|
wesjrock/django-pizza
|
pyexamples/funcoes.py
|
Python
|
cc0-1.0
| 604
|
#!/usr/bin/python
'''
NPR 2017-11-12
https://www.npr.org/2017/11/12/563367879/sunday-puzzle-move-around-to-find-new-meaning
Take the name of a U.S. state capital. Immediately to the right of it write the name
of a world capital. If you have the right ones, the name of a U.S. state will be
embedded in consecutive letters within that letter string. What three places are these?
'''
from nltk.corpus import wordnet as wn, gazetteers
#%%
# US states
US_STATES = frozenset(gazetteers.words('usstates.txt'))
US_STATES_LOWER = frozenset(x.lower().replace(' ','') for x in US_STATES)
# COUNTRIES
COUNTRIES = frozenset(gazetteers.words('countries.txt'))
# State and world capitals
state_capitals = set(); world_capitals = set()
for s in wn.all_synsets():
d = s.definition()
if 'capital' in d:
for state in US_STATES:
if state in d:
for l in s.lemma_names():
if l[0] == l[0].upper() and 'capital' not in l:
state_capitals.add(l.lower())
for country in COUNTRIES:
if country in d:
for l in s.lemma_names():
if l[0] == l[0].upper() and 'capital' not in l:
world_capitals.add(l.lower())
#%%
for c in state_capitals:
for w in world_capitals:
for s in US_STATES_LOWER:
if s in c+w and s not in c and s not in w:
print c,w,s
|
boisvert42/npr-puzzle-python
|
2017/1112_capital_capital_state.py
|
Python
|
cc0-1.0
| 1,421
|
import hosts
print '''
ulimit -n 4096
java -Dcom.sun.management.jmxremote.port=9990 \\
-Dcom.sun.management.jmxremote.ssl=false \\
-Dcom.sun.management.jmxremote.authenticate=false \\
-Dcom.sun.management.jmxremote.local.only=false \\
-Djava.rmi.server.hostname={feed} \\
-jar /home/{user}/newsfeed-dropwizard-1.0.0-SNAPSHOT.jar server /home/{user}/config.yml
'''.format(feed=hosts.settings['feed'],
user=hosts.settings['user'])
|
gengstrand/clojure-news-feed
|
server/aws/build/run3.py
|
Python
|
epl-1.0
| 465
|
"""Generate Java code from an ASDL description."""
# TO DO
# handle fields that have a type but no name
import os, sys, traceback
import asdl
TABSIZE = 4
MAX_COL = 100
def reflow_lines(s, depth):
"""Reflow the line s indented depth tabs.
Return a sequence of lines where no line extends beyond MAX_COL
when properly indented. The first line is properly indented based
exclusively on depth * TABSIZE. All following lines -- these are
the reflowed lines generated by this function -- start at the same
column as the first character beyond the opening { in the first
line.
"""
size = MAX_COL - depth * TABSIZE
if len(s) < size:
return [s]
lines = []
cur = s
padding = ""
while len(cur) > size:
i = cur.rfind(' ', 0, size)
assert i != -1, "Impossible line to reflow: %s" % `s`
lines.append(padding + cur[:i])
if len(lines) == 1:
# find new size based on brace
j = cur.find('{', 0, i)
if j >= 0:
j += 2 # account for the brace and the space after it
size -= j
padding = " " * j
cur = cur[i + 1:]
else:
lines.append(padding + cur)
return lines
class EmitVisitor(asdl.VisitorBase):
"""Visit that emits lines"""
def __init__(self):
super(EmitVisitor, self).__init__()
def open(self, name, refersToSimpleNode=1, useDataOutput=0):
self.file = open("%s.java" % name, "wb")
self.file.write("// Autogenerated AST node\n")
self.file.write('package org.python.pydev.parser.jython.ast;\n')
self.emit("", 0)
if refersToSimpleNode:
self.file.write('import org.python.pydev.parser.jython.SimpleNode;\n')
self.file.write('import java.util.Arrays;\n')
# if useDataOutput:
# print >> self.file, 'import java.io.DataOutputStream;'
# print >> self.file, 'import java.io.IOException;'
self.file.write('\n')
def close(self):
self.file.close()
def emit(self, s, depth):
# XXX reflow long lines?
lines = reflow_lines(s, depth)
for line in lines:
line = (" " * TABSIZE * depth) + line + "\n"
self.file.write(line)
# This step will add a 'simple' boolean attribute to all Sum and Product
# nodes and add a 'typedef' link to each Field node that points to the
# Sum or Product node that defines the field.
class AnalyzeVisitor(EmitVisitor):
index = 0
def makeIndex(self):
self.index += 1
return self.index
def visitModule(self, mod):
self.types = {}
for dfn in mod.dfns:
self.types[str(dfn.name)] = dfn.value
for dfn in mod.dfns:
self.visit(dfn)
def visitType(self, type, depth=0):
self.visit(type.value, type.name, depth)
def visitSum(self, sum, name, depth):
sum.simple = 1
for t in sum.types:
if t.fields:
sum.simple = 0
break
for t in sum.types:
if not sum.simple:
t.index = self.makeIndex()
self.visit(t, name, depth)
def visitProduct(self, product, name, depth):
product.simple = 0
product.index = self.makeIndex()
for f in product.fields:
self.visit(f, depth + 1)
def visitConstructor(self, cons, name, depth):
for f in cons.fields:
self.visit(f, depth + 1)
def visitField(self, field, depth):
field.typedef = self.types.get(str(field.type))
# The code generator itself.
#
class JavaVisitor(EmitVisitor):
def visitModule(self, mod):
for dfn in mod.dfns:
self.visit(dfn)
def visitType(self, type, depth=0):
self.visit(type.value, type.name, depth)
def visitSum(self, sum, name, depth):
if sum.simple:
self.simple_sum(sum, name, depth)
else:
self.sum_with_constructor(sum, name, depth)
def simple_sum(self, sum, name, depth):
self.open("%sType" % name, refersToSimpleNode=0)
self.emit("public interface %(name)sType {" % locals(), depth)
for i in range(len(sum.types)):
type = sum.types[i]
self.emit("public static final int %s = %d;" % (type.name, i + 1),
depth + 1)
self.emit("", 0)
self.emit("public static final String[] %sTypeNames = new String[] {" %
name, depth + 1)
self.emit('"<undef>",', depth + 2)
for type in sum.types:
self.emit('"%s",' % type.name, depth + 2)
self.emit("};", depth + 1)
self.emit("}", depth)
self.close()
def sum_with_constructor(self, sum, name, depth):
self.open("%sType" % name)
self.emit("public abstract class %(name)sType extends SimpleNode {" %
locals(), depth)
#fabioz: HACK WARNING: Moved the suite body to suiteType!
if str(name) == 'suite':
self.emit("public stmtType[] body;", depth + 1)
#HACK WARNING: Moved the suite body to suiteType!
self.emit("}", depth)
self.close()
for t in sum.types:
self.visit(t, name, depth)
def visitProduct(self, product, name, depth):
self.open("%sType" % name, useDataOutput=1)
self.emit("public final class %(name)sType extends SimpleNode {" % locals(), depth)
for f in product.fields:
self.visit(f, depth + 1)
self.emit("", depth)
self.javaMethods(product, name, "%sType" % name, product.fields,
depth + 1)
self.emit("}", depth)
self.close()
def visitConstructor(self, cons, name, depth):
self.open(cons.name, useDataOutput=1)
enums = []
for f in cons.fields:
if f.typedef and f.typedef.simple:
enums.append("%sType" % f.type)
if enums:
s = "implements %s " % ", ".join(enums)
else:
s = ""
self.emit("public final class %s extends %sType %s{" %
(cons.name, name, s), depth)
#fabioz: HACK WARNING: Moved the suite body to suiteType!
if str(name) != 'suite':
for f in cons.fields:
self.visit(f, depth + 1)
#HACK WARNING: Moved the suite body to suiteType!
self.emit("", depth)
self.javaMethods(cons, cons.name, cons.name, cons.fields, depth + 1)
self.emit("}", depth)
self.close()
def javaMethods(self, type, clsname, ctorname, fields, depth):
# The java ctors
fpargs = ", ".join([self.fieldDef(f) for f in fields])
self.emit("public %s(%s) {" % (ctorname, fpargs), depth)
for f in fields:
self.emit("this.%s = %s;" % (f.name, f.name), depth + 1)
if str(ctorname) == 'Suite':
self.emit("if(body != null && body.length > 0){", depth + 1)
self.emit("beginColumn = body[0].beginColumn;", depth + 2)
self.emit("beginLine = body[0].beginLine;", depth + 2)
self.emit("}", depth + 1)
self.emit("}", depth)
self.emit("", 0)
if fpargs:
fpargs += ", "
#fabioz: Removed the consructor with the parent that set the beginLine/Col, as this wasn't used and added some
#confusion because the parent wasn't properly set -- if a parent is actually set, it's set later in the parsing (because
#the parent is resolved after the child).
# Creates something as:
# public Attribute(exprType value, NameTokType attr, int ctx, SimpleNode
# parent) {
# this(value, attr, ctx);
# this.beginLine = parent.beginLine;
# this.beginColumn = parent.beginColumn;
# }
# self.emit("public %s(%sSimpleNode parent) {" % (ctorname, fpargs), depth)
# self.emit("this(%s);" %
# ", ".join([str(f.name) for f in fields]), depth+1)
# self.emit("this.beginLine = parent.beginLine;", depth+1);
# self.emit("this.beginColumn = parent.beginColumn;", depth+1);
# self.emit("}", depth)
self.emit("@Override", depth)
self.emit("public int hashCode() {", depth)
self.emit("final int prime = 31;", depth + 1)
self.emit("int result = 1;", depth + 1)
for f in fields:
jType = self.jType(f)
if f.seq:
self.emit("result = prime * result + Arrays.hashCode(%s);" % (f.name,), depth + 1)
elif jType == 'int':
self.emit("result = prime * result + %s;" % (f.name,), depth + 1)
elif jType == 'boolean':
self.emit("result = prime * result + (%s ? 17 : 137);" % (f.name,), depth + 1)
else:
self.emit("result = prime * result + ((%s == null) ? 0 : %s.hashCode());" % (f.name, f.name), depth + 1)
self.emit("return result;", depth+1)
self.emit("}", depth)
#equals()
self.emit("", 0)
self.emit("@Override", depth)
self.emit("public boolean equals(Object obj) {", depth)
self.emit("if (this == obj) return true;", depth + 1)
self.emit("if (obj == null) return false;", depth + 1)
self.emit("if (getClass() != obj.getClass()) return false;", depth + 1)
self.emit("%s other = (%s) obj;" % (ctorname, ctorname,), depth + 1)
for f in fields:
jType = self.jType(f)
if f.seq:
self.emit('if (!Arrays.equals(%s, other.%s)) return false;' % (f.name, f.name,), depth + 1)
elif jType in ('int', 'boolean'):
self.emit('if(this.%s != other.%s) return false;' % (f.name, f.name,), depth + 1)
else:
self.emit('if (%s == null) { if (other.%s != null) return false;}' % (f.name, f.name,), depth + 1)
self.emit('else if (!%s.equals(other.%s)) return false;' % (f.name, f.name,), depth + 1)
self.emit("return true;", depth + 1)
self.emit("}", depth)
#createCopy()
self.emit("@Override", depth)
self.emit("public %s createCopy() {" % (ctorname,), depth)
self.emit("return createCopy(true);", depth + 1)
self.emit("}", depth)
self.emit("@Override", depth)
self.emit("public %s createCopy(boolean copyComments) {" % (ctorname,), depth)
params = []
copy_i = 0
for f in fields:
jType = self.jType(f)
if jType in ('int', 'boolean', 'String', 'Object'):
if f.seq:
self.emit('%s[] new%s;' % (jType, copy_i), depth + 1)
self.emit('if(this.%s != null){' % (f.name,), depth + 1)
#int[] new0 = new int[this.ops.length];
#System.arraycopy(this.ops, 0, new0, 0, this.ops.length);
self.emit('new%s = new %s[this.%s.length];' % (copy_i, jType, f.name), depth + 2)
self.emit('System.arraycopy(this.%s, 0, new%s, 0, this.%s.length);' % (f.name, copy_i, f.name), depth + 2)
self.emit('}else{', depth + 1)
self.emit('new%s = this.%s;' % (copy_i, f.name), depth + 2)
self.emit('}', depth + 1)
params.append('new%s' % (copy_i,))
copy_i += 1
else:
params.append(str(f.name))
else:
if f.seq:
#comprehensionType[] new0 = new comprehensionType[this.generators.length];
#for(int i=0;i<this.generators.length;i++){
# new0[i] = (comprehensionType) this.generators[i] != null?this.generators[i].createCopy():null;
#}
self.emit('%s[] new%s;' % (jType, copy_i), depth + 1)
self.emit('if(this.%s != null){' % (f.name,), depth + 1)
self.emit('new%s = new %s[this.%s.length];' % (copy_i, jType, f.name), depth + 1)
self.emit('for(int i=0;i<this.%s.length;i++){' % (f.name), depth + 1)
self.emit('new%s[i] = (%s) (this.%s[i] != null? this.%s[i].createCopy(copyComments):null);' % (copy_i, jType, f.name, f.name), depth + 2)
self.emit('}', depth + 1)
self.emit('}else{', depth + 1)
self.emit('new%s = this.%s;' % (copy_i, f.name), depth + 2)
self.emit('}', depth + 1)
params.append('new%s' % (copy_i,))
copy_i += 1
else:
params.append('%s!=null?(%s)%s.createCopy(copyComments):null' % (f.name, jType, f.name))
params = ", ".join(params)
self.emit("%s temp = new %s(%s);" %
(ctorname, ctorname, params), depth + 1)
self.emit("temp.beginLine = this.beginLine;", depth + 1);
self.emit("temp.beginColumn = this.beginColumn;", depth + 1);
def EmitSpecials(s):
self.emit('if(this.specials%s != null && copyComments){' % s, depth + 1)
self.emit(' for(Object o:this.specials%s){' % s, depth + 1)
self.emit(' if(o instanceof commentType){', depth + 1)
self.emit(' commentType commentType = (commentType) o;', depth + 1)
self.emit(' temp.getSpecials%s().add(commentType.createCopy(copyComments));' % s, depth + 1)
self.emit(' }', depth + 1)
self.emit(' }', depth + 1)
self.emit('}', depth + 1)
EmitSpecials('Before')
EmitSpecials('After')
self.emit("return temp;", depth + 1);
self.emit("}", depth)
self.emit("", 0)
# The toString() method
self.emit("@Override", depth)
self.emit("public String toString() {", depth)
self.emit('StringBuffer sb = new StringBuffer("%s[");' % clsname,
depth + 1)
for f in fields:
self.emit('sb.append("%s=");' % f.name, depth + 1)
if not self.bltinnames.has_key(str(f.type)) and f.typedef.simple:
self.emit("sb.append(dumpThis(this.%s, %sType.%sTypeNames));" %
(f.name, f.type, f.type), depth + 1)
else:
self.emit("sb.append(dumpThis(this.%s));" % f.name, depth + 1)
if f != fields[-1]:
self.emit('sb.append(", ");', depth + 1)
self.emit('sb.append("]");', depth + 1)
self.emit("return sb.toString();", depth + 1)
self.emit("}", depth)
self.emit("", 0)
# # The pickle() method -- commented out, as it's not used within Pydev
# self.emit("public void pickle(DataOutputStream ostream) throws IOException {", depth)
# self.emit("pickleThis(%s, ostream);" % type.index, depth+1);
# for f in fields:
# self.emit("pickleThis(this.%s, ostream);" % f.name, depth+1)
# self.emit("}", depth)
# self.emit("", 0)
# The accept() method
self.emit("@Override", depth)
self.emit("public Object accept(VisitorIF visitor) throws Exception {", depth)
if clsname == ctorname:
self.emit('return visitor.visit%s(this);' % clsname, depth + 1)
else:
self.emit('if (visitor instanceof VisitorBase) {', depth + 1)
self.emit('((VisitorBase) visitor).traverse(this);', depth + 2)
self.emit('} else {' % clsname, depth + 1)
self.emit('traverse(visitor);', depth + 2)
self.emit('}', depth + 1)
self.emit('return null;' % clsname, depth + 1)
self.emit("}", depth)
self.emit("", 0)
# The visitChildren() method
self.emit("@Override", depth)
self.emit("public void traverse(VisitorIF visitor) throws Exception {", depth)
for f in fields:
if self.bltinnames.has_key(str(f.type)):
continue
if f.typedef.simple:
continue
if f.seq:
self.emit('if (%s != null) {' % f.name, depth + 1)
self.emit('for (int i = 0; i < %s.length; i++) {' % f.name,
depth + 2)
self.emit('if (%s[i] != null) {' % f.name, depth + 3)
self.emit('%s[i].accept(visitor);' % f.name, depth + 4)
self.emit('}' % f.name, depth + 3)
self.emit('}', depth + 2)
self.emit('}', depth + 1)
else:
self.emit('if (%s != null) {' % f.name, depth + 1)
self.emit('%s.accept(visitor);' % f.name, depth + 2)
self.emit('}' % f.name, depth + 1)
self.emit('}', depth)
self.emit("", 0)
def visitField(self, field, depth):
self.emit("public %s;" % self.fieldDef(field), depth)
bltinnames = {
'bool' : 'boolean',
'int' : 'int',
'identifier' : 'String',
'string' : 'String',
'object' : 'Object', # was PyObject
}
def jType(self, field):
jtype = str(field.type)
if field.typedef and field.typedef.simple:
jtype = 'int'
else:
jtype = self.bltinnames.get(jtype, jtype + 'Type')
return jtype
def fieldDef(self, field):
jtype = self.jType(field)
name = field.name
seq = field.seq and "[]" or ""
return "%(jtype)s%(seq)s %(name)s" % locals()
class VisitorVisitor(EmitVisitor):
def __init__(self):
EmitVisitor.__init__(self)
self.ctors = []
def visitModule(self, mod):
for dfn in mod.dfns:
self.visit(dfn)
self.open("VisitorIF", refersToSimpleNode=0)
self.emit('public interface VisitorIF {', 0)
for ctor in self.ctors:
self.emit("public Object visit%s(%s node) throws Exception;" %
(ctor, ctor), 1)
self.emit('}', 0)
self.close()
self.open("ISimpleNodeSwitch", refersToSimpleNode=0)
self.emit('public interface ISimpleNodeSwitch {', 0)
for ctor in self.ctors:
self.emit("public void visit(%s node);" %
(ctor,), 1)
self.emit('}', 0)
self.close()
self.open("VisitorBase")
self.emit('public abstract class VisitorBase implements VisitorIF {', 0)
for ctor in self.ctors:
self.emit("public Object visit%s(%s node) throws Exception {" %
(ctor, ctor), 1)
self.emit("Object ret = unhandled_node(node);", 2)
self.emit("traverse(node);", 2)
self.emit("return ret;", 2)
self.emit('}', 1)
self.emit('', 0)
self.emit("abstract protected Object unhandled_node(SimpleNode node) throws Exception;", 1)
self.emit("abstract public void traverse(SimpleNode node) throws Exception;", 1)
self.emit('}', 0)
self.close()
def visitType(self, type, depth=1):
self.visit(type.value, type.name, depth)
def visitSum(self, sum, name, depth):
if not sum.simple:
for t in sum.types:
self.visit(t, name, depth)
def visitProduct(self, product, name, depth):
pass
def visitConstructor(self, cons, name, depth):
self.ctors.append(cons.name)
class ChainOfVisitors:
def __init__(self, *visitors):
self.visitors = visitors
def visit(self, object):
for v in self.visitors:
v.visit(object)
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.argv.append('Python.asdl')
mod = asdl.parse(sys.argv[1])
if not asdl.check(mod):
sys.exit(1)
c = ChainOfVisitors(AnalyzeVisitor(),
JavaVisitor(),
VisitorVisitor())
c.visit(mod)
|
akurtakov/Pydev
|
plugins/org.python.pydev.parser/src/org/python/pydev/parser/jython/ast/asdl_java.py
|
Python
|
epl-1.0
| 20,001
|
class Solution(object):
def combinationSum4(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
dp = [0 for _ in xrange(target + 1)]
dp[0] = 1
for i in xrange(1, target + 1):
for j in nums:
if j <= i:
dp[i] += dp[i - j]
return dp[target]
|
Jacy-Wang/MyLeetCode
|
CombinationSumIV377.py
|
Python
|
gpl-2.0
| 391
|
from routersploit.modules.exploits.cameras.honeywell.hicc_1100pt_password_disclosure import Exploit
def test_success(target):
""" Test scenario: successful check """
route_mock = target.get_route_mock("/cgi-bin/readfile.cgi", methods=["GET"])
route_mock.return_value = (
'var Adm_ID="admin";'
'var Adm_Pass1="admin";'
'var Adm_Pass2="admin";'
'var Language="en";'
'var Logoff_Time="0";'
)
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 80
exploit.target = target.host
exploit.port = target.port
assert exploit.check()
assert exploit.run() is None
|
dasseclab/dasseclab
|
clones/routersploit/tests/exploits/cameras/honeywell/test_hicc_1100pt_password_disclosure.py
|
Python
|
gpl-2.0
| 662
|
from setuptools import setup, find_packages
from os.path import join, dirname
setup(
name='avitopub',
version='1.0.1',
description="Avito auto publish",
author="Denis Epifanov",
author_email="epifanov.denis@gmail.com",
license="MIT",
py_modules=['avitopub'],
script='avitopub.py',
long_description=open(join(dirname(__file__), 'README.md')).read(),
entry_points={
'console_scripts':
['avitopub = avitopub:main']
},
install_requires=['grab', 'lxml'],
keywords=['avito', ],
platforms="all",
classifiers=[
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Communications :: Email',
'Topic :: Utilities',],
)
|
den-gts/avito-autopub
|
setup.py
|
Python
|
gpl-2.0
| 1,119
|
'''
Created on 11 May 2016
@author: wnm24546
'''
import matplotlib.pyplot as plt
import numpy as np
def makePlot(a=1, b=1.5):
fig = plt.figure()
ax = fig.add_axes([0.12, 0.12, 0.76, 0.76], polar=True)
r = np.arange(0, 8*np.pi, 0.01)
theta = a+b*r
ax.plot(theta, r, color='#00c000', lw=2)
plt.show()
fig.savefig(r"ArchimSpiral.svg")
makePlot()
|
mtwharmby/assorted-scripts
|
PythonPlayground/Spirals/Archimedean.py
|
Python
|
gpl-2.0
| 404
|
# Royal Render Plugin script for Nuke 5+
# Author: Royal Render, Holger Schoenberger, Binary Alchemy
# Last change: v 7.0.24
# Copyright (c) Holger Schoenberger - Binary Alchemy
# rrInstall_Copy: \plugins\
# rrInstall_Change_File_delete: \plugins\menu.py, before "# Help menu", "m = menubar.addMenu(\"RRender\");\nm.addCommand(\"Submit Comp\", \"nuke.load('rrSubmit_Nuke_5'), rrSubmit_Nuke_5()\")\n\n"
# rrInstall_Change_File: \plugins\menu.py, before "# Help menu", "m = menubar.addMenu(\"RRender\");\nm.addCommand(\"Submit Comp\", \"nuke.load('rrSubmit_Nuke_5'), rrSubmit_Nuke()\")\nm.addCommand(\"Submit Shotgun Nodes\", \"nuke.load('rrSubmit_Nuke_5'), rrSubmit_Nuke_Shotgun()\")\n\n"
import nuke
import os
import sys
import platform
import random
import string
from xml.etree.ElementTree import ElementTree, Element, SubElement
#####################################################################################
# This function has to be changed if an app should show info and error dialog box #
#####################################################################################
def writeInfo(msg):
print(msg)
nuke.message(msg)
def writeError(msg):
# print(msg)
nuke.message(msg)
##############################################
# JOB CLASS #
##############################################
class rrJob(object):
def __init__(self):
self.clear()
def clear(self):
self.version = ""
self.software = ""
self.renderer = ""
self.RequiredLicenses = ""
self.sceneName = ""
self.sceneDatabaseDir = ""
self.seqStart = 0
self.seqEnd = 100
self.seqStep = 1
self.seqFileOffset = 0
self.seqFrameSet = ""
self.imageWidth = 99
self.imageHeight = 99
self.imageDir = ""
self.imageFileName = ""
self.imageFramePadding = 4
self.imageExtension = ""
self.imagePreNumberLetter = ""
self.imageSingleOutput = False
self.imageStereoR = ""
self.imageStereoL = ""
self.sceneOS = ""
self.camera = ""
self.layer = ""
self.channel = ""
self.maxChannels = 0
self.channelFileName = []
self.channelExtension = []
self.isActive = False
self.sendAppBit = ""
self.preID = ""
self.waitForPreID = ""
self.CustomA = ""
self.CustomB = ""
self.CustomC = ""
self.LocalTexturesFile = ""
# from infix.se (Filip Solomonsson)
def indent(self, elem, level=0):
i = "\n" + level * ' '
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
for e in elem:
self.indent(e, level + 1)
if not e.tail or not e.tail.strip():
e.tail = i + " "
if not e.tail or not e.tail.strip():
e.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
return True
def subE(self, r, e, text):
sub = SubElement(r, e)
if (type(text) == unicode ):
sub.text = text.encode('utf8')
else:
sub.text = str(text).decode("utf8")
return sub
def writeToXMLstart(self, submitOptions ):
rootElement = Element("rrJob_submitFile")
rootElement.attrib["syntax_version"] = "6.0"
self.subE(rootElement, "DeleteXML", "1")
self.subE(rootElement, "SubmitterParameter", submitOptions)
# YOU CAN ADD OTHER NOT SCENE-INFORMATION PARAMETERS USING THIS FORMAT:
# self.subE(jobElement,"SubmitterParameter","PARAMETERNAME=" + PARAMETERVALUE_AS_STRING)
return rootElement
def writeToXMLJob(self, rootElement):
jobElement = self.subE(rootElement, "Job", "")
self.subE(jobElement, "Software", self.software)
self.subE(jobElement, "Renderer", self.renderer)
self.subE(jobElement, "RequiredLicenses", self.RequiredLicenses)
self.subE(jobElement, "Version", self.version)
self.subE(jobElement, "SceneName", self.sceneName)
self.subE(jobElement, "SceneDatabaseDir", self.sceneDatabaseDir)
self.subE(jobElement, "IsActive", self.isActive)
self.subE(jobElement, "SeqStart", self.seqStart)
self.subE(jobElement, "SeqEnd", self.seqEnd)
self.subE(jobElement, "SeqStep", self.seqStep)
self.subE(jobElement, "SeqFileOffset", self.seqFileOffset)
self.subE(jobElement, "SeqFrameSet", self.seqFrameSet)
self.subE(jobElement, "ImageWidth", int(self.imageWidth))
self.subE(jobElement, "ImageHeight", int(self.imageHeight))
self.subE(jobElement, "ImageDir", self.imageDir)
self.subE(jobElement, "ImageFilename", self.imageFileName)
self.subE(jobElement, "ImageFramePadding", self.imageFramePadding)
self.subE(jobElement, "ImageExtension", self.imageExtension)
self.subE(jobElement, "ImageSingleOutput", self.imageSingleOutput)
self.subE(jobElement, "ImagePreNumberLetter", self.imagePreNumberLetter)
self.subE(jobElement, "ImageStereoR", self.imageStereoR)
self.subE(jobElement, "ImageStereoL", self.imageStereoL)
self.subE(jobElement, "SceneOS", self.sceneOS)
self.subE(jobElement, "Camera", self.camera)
self.subE(jobElement, "Layer", self.layer)
self.subE(jobElement, "Channel", self.channel)
self.subE(jobElement, "SendAppBit", self.sendAppBit)
self.subE(jobElement, "PreID", self.preID)
self.subE(jobElement, "WaitForPreID", self.waitForPreID)
self.subE(jobElement, "CustomA", self.CustomA)
self.subE(jobElement, "CustomB", self.CustomB)
self.subE(jobElement, "CustomC", self.CustomC)
self.subE(jobElement, "LocalTexturesFile", self.LocalTexturesFile)
for c in range(0,self.maxChannels):
self.subE(jobElement,"ChannelFilename",self.channelFileName[c])
self.subE(jobElement,"ChannelExtension",self.channelExtension[c])
return True
def writeToXMLEnd(self, f,rootElement):
xml = ElementTree(rootElement)
self.indent(xml.getroot())
if not f == None:
xml.write(f)
f.close()
else:
print("No valid file has been passed to the function")
try:
f.close()
except:
pass
return False
return True
##############################################
# Global Functions #
##############################################
def getRR_Root():
if os.environ.has_key('RR_ROOT'):
return os.environ['RR_ROOT']
HCPath="%"
if ((sys.platform.lower() == "win32") or (sys.platform.lower() == "win64")):
HCPath="%RRLocationWin%"
elif (sys.platform.lower() == "darwin"):
HCPath="%RRLocationMac%"
else:
HCPath="%RRLocationLx%"
if HCPath[0]!="%":
return HCPath
writeError("This plugin was not installed via rrWorkstationInstaller!")
def getNewTempFileName():
random.seed()
if ((sys.platform.lower() == "win32") or (sys.platform.lower() == "win64")):
if os.environ.has_key('TEMP'):
nam=os.environ['TEMP']
else:
nam=os.environ['TMP']
nam+="\\"
else:
nam="/tmp/"
nam+="rrSubmitNuke_"
nam+=str(random.randrange(1000,10000,1))
nam+=".xml"
return nam
def getRRSubmitterPath():
''' returns the rrSubmitter filename '''
rrRoot = getRR_Root()
if ((sys.platform.lower() == "win32") or (sys.platform.lower() == "win64")):
rrSubmitter = rrRoot+"\\win__rrSubmitter.bat"
elif (sys.platform.lower() == "darwin"):
rrSubmitter = rrRoot+"/bin/mac/rrSubmitter.app/Contents/MacOS/rrSubmitter"
else:
rrSubmitter = rrRoot+"/lx__rrSubmitter.sh"
return rrSubmitter
def getOSString():
if ((sys.platform.lower() == "win32") or (sys.platform.lower() == "win64")):
return "win"
elif (sys.platform.lower() == "darwin"):
return "osx"
else:
return "lx"
def submitJobsToRR(jobList,submitOptions):
tmpFileName = getNewTempFileName()
tmpFile = open(tmpFileName, "w")
xmlObj= jobList[0].writeToXMLstart(submitOptions)
for submitjob in jobList:
submitjob.writeToXMLJob(xmlObj)
ret = jobList[0].writeToXMLEnd(tmpFile,xmlObj)
if ret:
#writeInfo("Job written to " + tmpFile.name)
pass
else:
writeError("Error - There was a problem writing the job file to " + tmpFile.name)
os.system(getRRSubmitterPath()+" \""+tmpFileName+"\"")
###########################################
# Read Nuke file #
###########################################
def rrSubmit_fillGlobalSceneInfo(newJob):
newJob.version = nuke.NUKE_VERSION_STRING
newJob.software = "Nuke"
newJob.sceneOS = getOSString()
newJob.sceneName = nuke.root().name()
newJob.seqStart = nuke.root().firstFrame()
newJob.seqEnd = nuke.root().lastFrame()
newJob.imageFileName = ""
def isGizmo(node):
gizmo = isinstance(node, nuke.Gizmo)
return gizmo
def isScriptedOutput(pathScripted, gizmo):
pathScripted=pathScripted.lower()
if ( (pathScripted.find("root.name")>=0) or (pathScripted.find("root().name")>=0) ):
return True
if (gizmo and (pathScripted.find("[value")>=0)):
return True
return False
def getAllWriteNodes():
allNo=nuke.allNodes()
writeNo=[]
for gz in allNo:
if isGizmo(gz):
with gz:
gList = nuke.allNodes('Write') + nuke.allNodes('DeepWrite')
for gnode in gList:
if (gnode['disable'].value()):
continue
pathScripted=gnode['file'].value()
if ((pathScripted== None) or (len(pathScripted)<3)):
continue
writeNo.append(gz)
break
writeNo=writeNo+ nuke.allNodes('Write') + nuke.allNodes('DeepWrite')
return writeNo
def rrSubmit_CreateAllJob(jobList,noLocalSceneCopy):
newJob= rrJob()
rrSubmit_fillGlobalSceneInfo(newJob)
nList = getAllWriteNodes()
mainNode = True
nViews=nuke.views()
useStereoFlag=False
if (len(nViews)==2):
useStereoFlag=True
newJob.imageStereoR=nViews[0]
newJob.imageStereoL=nViews[1]
for node in nList:
if (node['disable'].value()):
continue
pathScripted=""
writeNode = node
if isGizmo(node):
with node:
gList = nuke.allNodes('Write') + nuke.allNodes('DeepWrite')
for gnode in gList:
if (gnode['disable'].value()):
continue
pathScripted=gnode['file'].value()
if ((pathScripted== None) or (len(pathScripted)<3)):
continue
writeNode = gnode
if (isScriptedOutput(pathScripted,True)):
noLocalSceneCopy[0]=True
else:
pathScripted=writeNode['file'].value()
if ((pathScripted== None) or (len(pathScripted)<3)):
continue
if (mainNode):
if (writeNode['use_limit'].value()):
newJob.seqStart = writeNode['first'].value()
newJob.seqEnd = writeNode['last'].value()
newJob.imageFileName= nuke.filename(writeNode)
if (newJob.seqStart==newJob.seqEnd and (newJob.imageFileName.find("#")<0)):
newJob.imageSingleOutput = True
if (useStereoFlag):
if (newJob.imageFileName.find("%V")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%V","<Stereo>")
elif (newJob.imageFileName.find("%v")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%v","<Stereo>")
newJob.imageStereoR=newJob.imageStereoR[0]
newJob.imageStereoL=newJob.imageStereoL[0]
else:
useStereoFlag=False
mainNode = False
else:
newJob.maxChannels= newJob.maxChannels + 1
if (useStereoFlag):
newJob.channelFileName.append(string.replace(string.replace(nuke.filename(writeNode),"%v","<Stereo>"),"%V","<Stereo>"))
else:
newJob.channelFileName.append(string.replace(string.replace(nuke.filename(writeNode),"%v",nViews[0][0]),"%V",nViews[0]))
newJob.channelExtension.append("")
if (not useStereoFlag):
if ( (newJob.imageFileName.find("%V")>=0) or (newJob.imageFileName.find("%v")>=0)):
for vn in range(1, len(nViews)):
newJob.maxChannels= newJob.maxChannels + 1
newJob.channelFileName.append(string.replace(string.replace(newJob.imageFileName,"%v",nViews[vn][0]),"%V",nViews[vn]))
newJob.channelExtension.append("")
newJob.imageFileName = string.replace(string.replace(newJob.imageFileName,"%v",nViews[0][0]),"%V",nViews[0])
#if there is an .avi outout, place it as main output to RR knows that this job can only be send to one client at once
for C in range(0, newJob.maxChannels):
if (newJob.channelFileName[C].endswith(".avi") or newJob.channelFileName[C].endswith(".mov")):
tempName=newJob.channelFileName[C]
newJob.channelFileName[C]=newJob.imageFileName
newJob.imageFileName=tempName
break
newJob.layer= "** All **"
newJob.isActive = True
jobList.append(newJob)
def rrSubmit_CreateSingleJobs_Node(jobList,noLocalSceneCopy, node):
nViews=nuke.views()
if (node['disable'].value()):
return
pathScripted=""
writeNode = node
writeNodeName = writeNode['name'].value()
if isGizmo(node):
with node:
gList = nuke.allNodes('Write') + nuke.allNodes('DeepWrite')
for gnode in gList:
if (gnode['disable'].value()):
continue
pathScripted=gnode['file'].value()
if ((pathScripted== None) or (len(pathScripted)<3)):
continue
writeNode = gnode
if (isScriptedOutput(pathScripted,True)):
noLocalSceneCopy[0]=True
else:
pathScripted=writeNode['file'].value()
if ((pathScripted== None) or (len(pathScripted)<3)):
return
newJob= rrJob()
rrSubmit_fillGlobalSceneInfo(newJob)
useStereoFlag=False
if (len(nViews)==2):
useStereoFlag=True
newJob.imageStereoR=nViews[0]
newJob.imageStereoL=nViews[1]
if (writeNode['use_limit'].value()):
newJob.seqStart = writeNode['first'].value()
newJob.seqEnd = writeNode['last'].value()
newJob.imageFileName= nuke.filename(writeNode)
if ((newJob.imageFileName== None) or (len(newJob.imageFileName)<3)):
return
if (newJob.seqStart==newJob.seqEnd and (newJob.imageFileName.find("#")<0)):
newJob.imageSingleOutput = True
if (useStereoFlag):
if (newJob.imageFileName.find("%V")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%V","<Stereo>")
elif (newJob.imageFileName.find("%v")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%v","<Stereo>")
newJob.imageStereoR=newJob.imageStereoR[0]
newJob.imageStereoL=newJob.imageStereoL[0]
else:
useStereoFlag=False
elif ( (newJob.imageFileName.find("%V")>=0) or (newJob.imageFileName.find("%v")>=0)):
for vn in range(1, len(nViews)):
newJob.maxChannels= newJob.maxChannels + 1
newJob.channelFileName.append(string.replace(string.replace(newJob.imageFileName,"%v",nViews[vn][0]),"%V",nViews[vn]))
newJob.channelExtension.append("")
newJob.imageFileName = string.replace(string.replace(newJob.imageFileName,"%v",nViews[0][0]),"%V",nViews[0])
newJob.layer= writeNodeName
newJob.isActive = False
jobList.append(newJob)
def rrSubmit_CreateSingleJobs(jobList,noLocalSceneCopy):
nList = getAllWriteNodes()
nViews=nuke.views()
for node in nList:
if (node['disable'].value()):
continue
pathScripted=""
writeNode = node
writeNodeName = writeNode['name'].value()
if isGizmo(node):
with node:
gList = nuke.allNodes('Write') + nuke.allNodes('DeepWrite')
for gnode in gList:
if (gnode['disable'].value()):
continue
pathScripted=gnode['file'].value()
if ((pathScripted== None) or (len(pathScripted)<3)):
continue
writeNode = gnode
if (isScriptedOutput(pathScripted,True)):
noLocalSceneCopy[0]=True
else:
pathScripted=writeNode['file'].value()
if ((pathScripted== None) or (len(pathScripted)<3)):
continue
newJob= rrJob()
rrSubmit_fillGlobalSceneInfo(newJob)
useStereoFlag=False
if (len(nViews)==2):
useStereoFlag=True
newJob.imageStereoR=nViews[0]
newJob.imageStereoL=nViews[1]
if (writeNode['use_limit'].value()):
newJob.seqStart = writeNode['first'].value()
newJob.seqEnd = writeNode['last'].value()
newJob.imageFileName= nuke.filename(writeNode)
if ((newJob.imageFileName== None) or (len(newJob.imageFileName)<3)):
continue
if (newJob.seqStart==newJob.seqEnd and (newJob.imageFileName.find("#")<0)):
newJob.imageSingleOutput = True
if (useStereoFlag):
if (newJob.imageFileName.find("%V")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%V","<Stereo>")
elif (newJob.imageFileName.find("%v")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%v","<Stereo>")
newJob.imageStereoR=newJob.imageStereoR[0]
newJob.imageStereoL=newJob.imageStereoL[0]
else:
useStereoFlag=False
elif ( (newJob.imageFileName.find("%V")>=0) or (newJob.imageFileName.find("%v")>=0)):
for vn in range(1, len(nViews)):
newJob.maxChannels= newJob.maxChannels + 1
newJob.channelFileName.append(string.replace(string.replace(newJob.imageFileName,"%v",nViews[vn][0]),"%V",nViews[vn]))
newJob.channelExtension.append("")
newJob.imageFileName = string.replace(string.replace(newJob.imageFileName,"%v",nViews[0][0]),"%V",nViews[0])
newJob.layer= writeNodeName
newJob.isActive = False
jobList.append(newJob)
def rrSubmit_CreateSingleJobs_shotgun(jobList,noLocalSceneCopy):
import sgtk
eng = sgtk.platform.current_engine()
app = eng.apps["tk-nuke-writenode"]
nList = app.get_write_nodes()
nViews=nuke.views()
for nod in nList:
if (nod['disable'].value()):
continue
newJob= rrJob()
rrSubmit_fillGlobalSceneInfo(newJob)
useStereoFlag=False
if (len(nViews)==2):
useStereoFlag=True
newJob.imageStereoR=nViews[0]
newJob.imageStereoL=nViews[1]
newJob.imageFileName= app.get_node_render_path(nod)
if ((newJob.imageFileName== None) or (len(newJob.imageFileName)<3)):
continue
if (newJob.seqStart==newJob.seqEnd and (newJob.imageFileName.find("#")<0)):
newJob.imageSingleOutput = True
if (useStereoFlag):
if (newJob.imageFileName.find("%V")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%V","<Stereo>")
elif (newJob.imageFileName.find("%v")>=0):
newJob.imageFileName = string.replace(newJob.imageFileName,"%v","<Stereo>")
newJob.imageStereoR=newJob.imageStereoR[0]
newJob.imageStereoL=newJob.imageStereoL[0]
else:
useStereoFlag=False
elif ( (newJob.imageFileName.find("%V")>=0) or (newJob.imageFileName.find("%v")>=0)):
for vn in range(1, len(nViews)):
newJob.maxChannels= newJob.maxChannels + 1
newJob.channelFileName.append(string.replace(string.replace(newJob.imageFileName,"%v",nViews[vn][0]),"%V",nViews[vn]))
newJob.channelExtension.append("")
newJob.imageFileName = string.replace(string.replace(newJob.imageFileName,"%v",nViews[0][0]),"%V",nViews[0])
newJob.layer= app.get_node_name(nod)
newJob.renderer= "shotgun"
newJob.isActive = False
jobList.append(newJob)
def rrSubmit_addPluginLicenses(jobList):
n = nuke.allNodes()
plugins=""
for i in n:
if (i.Class().find(".sapphire.")>=0):
plugins="Sapphire"
break;
for i in n:
if (i.Class().find("pgBokeh")>=0):
plugins=plugins+";pgBokeh"
break;
for i in n:
if (i.Class().find(".revisionfx.rsmb")>=0):
plugins=plugins+";Reelsmart"
break;
for i in n:
if (i.Class().find(".myPlugin.")>=0):
plugins=plugins+";MyPlugin"
break;
if (len(plugins)>0):
for job in jobList:
job.RequiredLicenses=plugins
def rrSubmit_NukeXRequired():
n = nuke.allNodes()
for i in n:
if (i.Class().find(".furnace.")>=0):
return True
return False
def rrSubmit_Nuke_Shotgun():
#writeInfo ("rrSubmit v 7.0.24")
nuke.scriptSave()
CompName = nuke.root().name()
if ((CompName==None) or (len(CompName)==0)):
writeError("Nuke comp not saved!")
return
jobList= []
noLocalSceneCopy= [False]
rrSubmit_CreateSingleJobs_shotgun(jobList,noLocalSceneCopy)
submitOptions=""
if (noLocalSceneCopy[0]):
submitOptions=submitOptions+"AllowLocalSceneCopy=0~0 "
if (rrSubmit_NukeXRequired()):
submitOptions=submitOptions+" CONukeX=1~1 "
rrSubmit_addPluginLicenses(jobList)
submitJobsToRR(jobList,submitOptions)
def rrSubmit_Nuke():
#writeInfo ("rrSubmit v 7.0.24")
nuke.scriptSave()
CompName = nuke.root().name()
if ((CompName==None) or (len(CompName)==0)):
writeError("Nuke comp not saved!")
return
jobList= []
noLocalSceneCopy= [False]
rrSubmit_CreateAllJob(jobList,noLocalSceneCopy)
rrSubmit_CreateSingleJobs(jobList,noLocalSceneCopy)
submitOptions=""
if (noLocalSceneCopy[0]):
submitOptions=submitOptions+"AllowLocalSceneCopy=0~0 "
if (rrSubmit_NukeXRequired()):
submitOptions=submitOptions+" CONukeX=1~1 "
rrSubmit_addPluginLicenses(jobList)
submitJobsToRR(jobList,submitOptions)
def rrSubmit_Nuke_Node(node):
nuke.scriptSave()
CompName = nuke.root().name()
if ((CompName==None) or (len(CompName)==0)):
writeError("Nuke comp not saved!")
return
jobList= []
noLocalSceneCopy= [False]
rrSubmit_CreateSingleJobs_Node(jobList,noLocalSceneCopy, node)
submitOptions=""
if (noLocalSceneCopy[0]):
submitOptions=submitOptions+"AllowLocalSceneCopy=0~0 "
if (rrSubmit_NukeXRequired()):
submitOptions=submitOptions+" CONukeX=1~1 "
rrSubmit_addPluginLicenses(jobList)
submitJobsToRR(jobList,submitOptions)
|
michimussato/pypelyne2
|
pypelyne2/payload/rr/7.0.29__installer/files/render_apps/_submitplugins/rrSubmit_Nuke_5.py
|
Python
|
gpl-2.0
| 24,184
|
# blender CAM polygon_utils_cam.py (c) 2012 Vilem Novak
#
# ***** BEGIN GPL LICENSE BLOCK *****
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ***** END GPL LICENCE BLOCK *****
import math
import mathutils
#from mathutils import *
import curve_simplify
import shapely
from shapely.geometry import polygon as spolygon
from shapely import geometry as sgeometry
SHAPELY = True
# except:
# SHAPELY=False
def Circle(r, np):
c = []
pi = math.pi
v = mathutils.Vector((r, 0, 0))
e = mathutils.Euler((0, 0, 2.0 * math.pi / np))
for a in range(0, np):
c.append((v.x, v.y))
v.rotate(e)
p = spolygon.Polygon(c)
return p
def shapelyRemoveDoubles(p, optimize_threshold):
optimize_threshold *= 0.000001
# vecs=[]
soptions = ['distance', 'distance', 0.0, 5, optimize_threshold, 5, optimize_threshold]
for ci, c in enumerate(p.boundary): # in range(0,len(p)):
veclist = []
for v in c:
veclist.append(mathutils.Vector((v[0], v[1])))
# progress(len(veclist))
s = curve_simplify.simplify_RDP(veclist, soptions)
# progress(len(s))
nc = []
for i in range(0, len(s)):
nc.append(c[s[i]])
if len(nc) > 2:
pnew.addContour(nc, p.isHole(ci))
else:
pnew.addContour(p[ci], p.isHole(ci))
# progress(time.time()-t)
return pnew
def shapelyToMultipolygon(anydata):
if anydata.type == 'MultiPolygon':
return anydata
elif anydata.type == 'Polygon':
if not anydata.is_empty:
return shapely.geometry.MultiPolygon([anydata])
else:
return sgeometry.MultiPolygon()
else:
print(anydata.type, 'shapely conversion aborted')
return sgeometry.MultiPolygon()
def shapelyToCoords(anydata):
p = anydata
seq = []
# print(p.type)
# print(p.geom_type)
if p.is_empty:
return seq
elif p.type == 'Polygon':
# print('polygon')
clen = len(p.exterior.coords)
# seq = sgeometry.asMultiLineString(p)
seq = [p.exterior.coords]
# print(len(p.interiors))
for interior in p.interiors:
seq.append(interior.coords)
elif p.type == 'MultiPolygon':
clen = 0
seq = []
for sp in p:
clen += len(sp.exterior.coords)
seq.append(sp.exterior.coords)
for interior in sp.interiors:
seq.append(interior.coords)
elif p.type == 'MultiLineString':
seq = []
for linestring in p:
seq.append(linestring.coords)
elif p.type == 'LineString':
seq = []
seq.append(p.coords)
elif p.type == 'MultiPoint':
return;
elif p.type == 'GeometryCollection':
# print(dir(p))
# print(p.geometryType, p.geom_type)
clen = 0
seq = []
# print(p.boundary.coordsd)
for sp in p: # TODO
# seq.append(shapelyToCoords(sp))
clen += len(sp.exterior.coords)
seq.append(sp.exterior.coords)
for interior in sp.interiors:
seq.extend(interior.coords)
# for g in p.geom:
# print(g.type)
return seq
def shapelyToCurve(name, p, z):
import bpy, bmesh
from bpy_extras import object_utils
verts = []
edges = []
vi = 0
ci = 0
# for c in p.exterior.coords:
# print(p.type)
seq = shapelyToCoords(p)
w = 1 # weight
curvedata = bpy.data.curves.new(name=name, type='CURVE')
curvedata.dimensions = '3D'
objectdata = bpy.data.objects.new(name, curvedata)
objectdata.location = (0, 0, 0) # object origin
bpy.context.collection.objects.link(objectdata)
for c in seq:
polyline = curvedata.splines.new('POLY')
polyline.points.add(len(c) - 1)
for num in range(len(c)):
x, y = c[num][0], c[num][1]
polyline.points[num].co = (x, y, z, w)
bpy.context.view_layer.objects.active = objectdata
objectdata.select_set(state=True)
for c in objectdata.data.splines:
c.use_cyclic_u = True
# objectdata.data.show_handles = False
# objectdata.data.show_normal_face = False
return objectdata # bpy.context.active_object
|
vilemnovak/blendercam
|
scripts/addons/cam/polygon_utils_cam.py
|
Python
|
gpl-2.0
| 4,940
|
# Copyright (C) 2013 Adam Stokes <adam.stokes@ubuntu.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, UbuntuPlugin
class Azure(Plugin, UbuntuPlugin):
""" Microsoft Azure Client Plugin
"""
plugin_name = 'azure'
packages = ('walinuxagent',)
def setup(self):
self.add_copy_specs(["/var/log/waagent*",
"/var/lib/cloud",
"/etc/default/kv-kvp-daemon-init",
"/sys/module/hv_netvsc/parameters/ring_size",
"/sys/module/hv_storvsc/parameters/storvsc_ringbuffer_size"])
# vim: et ts=4 sw=4
|
portante/sosreport
|
sos/plugins/azure.py
|
Python
|
gpl-2.0
| 1,294
|
from checkio_referee import RefereeBase, covercodes, representations, ENV_NAME
import settings_env
from tests import TESTS
cover = """def cover(func, data):
return func(*[str(x) for x in data])
"""
class Referee(RefereeBase):
TESTS = TESTS
ENVIRONMENTS = settings_env.ENVIRONMENTS
DEFAULT_FUNCTION_NAME = "common_words"
FUNCTION_NAMES = {
ENV_NAME.JS_NODE: "commonWords"
}
ENV_COVERCODE = {
ENV_NAME.PYTHON: covercodes.py_unwrap_args,
ENV_NAME.JS_NODE: covercodes.js_unwrap_args
}
CALLED_REPRESENTATIONS = {
ENV_NAME.PYTHON: representations.unwrap_arg_representation,
ENV_NAME.JS_NODE: representations.unwrap_arg_representation,
}
|
Empire-of-Code-Puzzles/checkio-empire-common-words
|
verification/src/referee.py
|
Python
|
gpl-2.0
| 717
|
# UFO-launcher - A multi-platform virtual machine launcher for the UFO OS
#
# Copyright (c) 2008-2009 Agorabox, Inc.
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from distutils.core import setup
import py2exe
from py2exe.build_exe import py2exe as BuildExe
import os, sys
import glob
manifest = """<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="requireAdministrator"/>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC90.CRT" version="9.0.21022.8" processorArchitecture="x86" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>
"""
sys.path.append("..")
sys.path.append("../src")
sys.path.append("../clamav/src")
sys.path.append("../clamav/src/DNS")
sys.path.append("../clamav/src/custom_clamav")
setup(#zipfile = "bin\\library.zip",
options = {'py2exe': { 'dist_dir': "dist/bin",
'bundle_files': 3,
'includes': ['sip', 'win32com.server.util', 'pythoncom'],
'excludes' : [ "Tkconstants", "Tkinter", "tcl" ],
# "PyQt4.QtCore", "PyQt4.QtGui", "PyQt4.QtNetwork", "PyQt4" ],
# 'dll_excludes': [ "PyQt4\\QtCore.pyd", "PyQt4\\QtGui.pyd",
# "PyQt4\\QtNetwork.pyd",
# "QtCore4.dll", "QtGui4.dll", "QtNetwork4.dll" ],
"typelibs": [('{46137EEC-703B-4FE5-AFD4-7C9BBBBA0259}', 0, 1, 3)],
}},
windows = [{'script': "../src/launcher.py",
"icon_resources" : [(1, "../graphics/UFO.ico")],
"other_resources": [(24, 1, manifest)],
}],
)
|
vienin/vlaunch
|
setup/setup.py
|
Python
|
gpl-2.0
| 2,715
|
# -*- coding: utf-8 -*-
"""
blohg.rst_parser.directives
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Module with the custom blohg reStructuredText directives.
:copyright: (c) 2010-2013 by Rafael Goncalves Martins
:license: GPL-2, see LICENSE for more details.
"""
from docutils import nodes, statemachine
from docutils.utils.error_reporting import ErrorString
from docutils.io import FileInput
from docutils.parsers.rst import directives, Directive
from docutils.parsers.rst.directives.images import Image, Figure
from docutils.parsers.rst.directives.misc import Include
from flask import current_app, url_for
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import get_lexer_by_name, TextLexer
from urllib import pathname2url
from blohg.file_like import BlohgFile
from blohg.rst_parser.nodes import iframe_flash_video
import posixpath
__all__ = ['Vimeo', 'Youtube', 'Math', 'MathJax', 'Code', 'SourceCode', 'AttachmentImage',
'AttachmentFigure', 'SubPages', 'IncludeHg']
GOOGLETEX_URL = 'https://chart.googleapis.com/chart?cht=tx&chl='
def align(argument):
return directives.choice(argument, ('left', 'center', 'right'))
def boolean(argument):
return directives.choice(argument, ('true', 'false'))
class Vimeo(Directive):
"""reStructuredText directive that creates an embed object to display
a video from Vimeo
Usage example::
.. vimeo:: QFwQIRwuAM0
:align: center
:height: 344
:width: 425
"""
required_arguments = 1
optional_arguments = 0
option_spec = {
'height': directives.length_or_unitless,
'width': directives.length_or_percentage_or_unitless,
'border': directives.length_or_unitless,
'align': align,
'allowfullscreen': boolean,
}
has_content = False
def run(self):
self.options['uri'] = 'http://player.vimeo.com/video/' \
+ self.arguments[0]
self.options.setdefault('width', '425px')
self.options.setdefault('height', '344px')
self.options.setdefault('align', 'center')
self.options.setdefault('border', '0')
self.options.setdefault('allowfullscreen', 'true')
return [iframe_flash_video('', **self.options)]
class Youtube(Directive):
"""reStructuredText directive that creates an embed object to display
a video from Youtube
Usage example::
.. youtube:: QFwQIRwuAM0
:align: center
:height: 344
:width: 425
"""
required_arguments = 1
optional_arguments = 0
option_spec = {
'height': directives.length_or_unitless,
'width': directives.length_or_percentage_or_unitless,
'border': directives.length_or_unitless,
'align': align,
'allowfullscreen': boolean,
}
has_content = False
def run(self):
self.options['uri'] = 'http://www.youtube.com/embed/%s' % \
self.arguments[0]
self.options['thumbnail_uri'] = \
'http://img.youtube.com/vi/%s/hqdefault.jpg' % self.arguments[0]
self.options.setdefault('width', '425px')
self.options.setdefault('height', '344px')
self.options.setdefault('align', 'center')
self.options.setdefault('border', '0')
self.options.setdefault('allowfullscreen', 'true')
return [iframe_flash_video('', **self.options)]
class Code(Directive):
"""reStructuredText directive that creates a pre tag suitable for
decoration with http://alexgorbatchev.com/SyntaxHighlighter/
Usage example::
.. source:: python
print "Hello, World!"
.. raw:: html
<script type="text/javascript" src="http://alexgorbatchev.com/pub/sh/current/scripts/shCore.js"></script>
<script type="text/javascript" src="http://alexgorbatchev.com/pub/sh/current/scripts/shBrushPython.js"></script>
<link type="text/css" rel="stylesheet" href="http://alexgorbatchev.com/pub/sh/current/styles/shCoreDefault.css"/>
<script type="text/javascript">SyntaxHighlighter.defaults.toolbar=false; SyntaxHighlighter.all();</script>
"""
required_arguments = 1
optional_arguments = 0
has_content = True
def run(self):
self.assert_has_content()
self.options['brush'] = self.arguments[0]
html = '''\
<pre class="brush: %s">
%s
</pre>
'''
return [nodes.raw('', html % (self.options['brush'],
"\n".join(self.content).replace('<', '<')),
format='html')]
class SourceCode(Directive):
"""reStructuredText directive that does syntax highlight using Pygments.
Usage example::
.. sourcecode:: python
:linenos:
print "Hello, World!"
The ``linenos`` option enables the line numbering.
To be able to use this directive you should generate a CSS file with the
style definitions, using the ``pygmentize`` script, shipped with Pygments.
::
$ pygmentize -S friendly -f html > static/pygments.css
Where ``friendly`` will be your Pygments style of choice.
This file should be included in the main template, usually ``base.html``::
<link type="text/css" media="screen" rel="stylesheet" href="{{
url_for('static', filename='pygments.css') }}" />
This directive is based on ``rst-directive.py``, created by Pygments
authors.
"""
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'linenos': directives.flag,
}
has_content = True
def run(self):
self.assert_has_content()
try:
lexer = get_lexer_by_name(self.arguments[0])
except ValueError:
# no lexer found - use the text one instead of an exception
lexer = TextLexer()
formatter = HtmlFormatter(noclasses=False)
if 'linenos' in self.options:
formatter.linenos = 2 # inline
parsed = highlight(u'\n'.join(self.content), lexer, formatter)
return [nodes.raw('', parsed, format='html')]
class Math(Image):
"""reStructuredText directive that creates an image HTML object to
display a LaTeX equation, using Google Chart API.
Usage example::
.. math::
\\frac{x^2}{1+x}
"""
required_arguments = 0
has_content = True
def run(self):
self.assert_has_content()
if not 'align' in self.options:
self.options['align'] = 'center'
tmp = pathname2url(' '.join([(i == '' and '\\\\' or i.strip()) \
for i in self.content]))
self.arguments.append('%s%s' % (GOOGLETEX_URL, tmp))
return Image.run(self)
class MathJax(Directive):
"""reStructuredText directive that simply returns a math html fragment suitable for rendering by MathJax.
The latex math equations are simply wrapped by an HTML div tag with mathjax class for further CSS decoration.
Use conventional LaTeX to write math equations.
Note that $ signs or \begin{equation} etc. should be no longer omitted.
Auto-numbering is possible by configuring MathJax before loading MathJax, via::
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
TeX: { equationNumbers: { autoNumber: "AMS" } }
});
</script>
Usage example::
.. mathjax::
$$\frac{x^2}{1+x}\label{frac_eq}$$
for a displayed numbered equation with a reference. Use "\eqref{frac_eq}" in normal way to cite the equation number.
LaTeX math \begin{equation}, \begin{align}, etc. are all supported.
See MathJax official websites for more information.
"""
required_arguments = 0
has_content = True
def run(self):
self.assert_has_content()
html = '''\
<div class="mathjax">
%s
</div>
'''
return [nodes.raw('', html % (
"\n".join(self.content).replace('<', '<'), ),
format='html')]
class AttachmentImage(Image):
def run(self):
my_file = directives.uri(self.arguments[0])
full_path = posixpath.join(current_app.config['ATTACHMENT_DIR'],
my_file)
if full_path not in current_app.blohg.changectx.files:
raise self.error(
'Error in "%s" directive: File not found: %s.' % (
self.name, full_path
)
)
self.arguments[0] = url_for('attachments', filename=my_file,
_external=True)
return Image.run(self)
class AttachmentFigure(Figure):
def run(self):
my_file = directives.uri(self.arguments[0])
full_path = posixpath.join(current_app.config['ATTACHMENT_DIR'],
my_file)
if full_path not in current_app.blohg.changectx.files:
raise self.error(
'Error in "%s" directive: File not found: %s.' % (
self.name, full_path
)
)
self.arguments[0] = url_for('attachments', filename=my_file,
_external=True)
return Figure.run(self)
class SubPages(Directive):
"""reStructuredText directive that creates a bullet-list with the subpages
of the current page, or of a given page.
Usage example::
.. subpages::
Or::
.. subpages:: projects
Supposing that you have a directory called ``content/projects`` and some
reStructuredText files on it. Subdirectories are also allowed.
It is also possible to change the way the bullet-list is sorted, using the
options ``sort-by`` and ``sort-order``::
.. subpages::
:sort-by: slug
:sort-order: desc
Available options for ``sort-by`` are ``slug`` (default option), ``title``
and ``date``, and for ``sort-order`` are ``asc`` (default option) and
``desc``.
This directive will just show the files from the root of the directory.
It's not recursive.
"""
required_arguments = 0
optional_arguments = 1
option_spec = {
'sort-by': lambda x: directives.choice(x, ('slug', 'title', 'date')),
'sort-order': lambda x: directives.choice(x, ('asc', 'desc'))
}
has_content = False
def run(self):
self.options.setdefault('sort-by', 'slug')
self.options.setdefault('sort-order', 'asc')
if len(self.arguments) == 0:
prefix = ':repo:%s' % current_app.config['CONTENT_DIR']
source = self.state.document.current_source or ''
if not source.startswith(prefix):
raise self.severe('Problem with "%s" directive path:\npath ' \
'isn\'t allowed: %s' % (self.name, source))
source = source[len(prefix)+1:]
source = source.rstrip('/index%s' % current_app.config['POST_EXT'])
source = source.rstrip(current_app.config['POST_EXT'])
self.arguments.append(source)
tmp_metadata = []
final_metadata = []
splited_dir = len(self.arguments[0]) > 0 \
and self.arguments[0].split('/') or []
for metadata in current_app.blohg.content.get_all():
splited_slug = metadata.slug.split('/')
if metadata.slug.startswith(self.arguments[0]) and \
(len(splited_dir) + 1 == len(splited_slug)):
tmp_metadata.append(metadata)
def key_func(metadata):
if self.options['sort-by'] == 'title':
return metadata.get('link_title', metadata.title)
return getattr(metadata, self.options['sort-by'])
for metadata in sorted(tmp_metadata, key=key_func,
reverse=(self.options['sort-order'] == 'desc')):
link = url_for('views.content', slug=metadata.slug)
link_title = metadata.get('link_title', metadata.title)
reference = nodes.reference(link, link_title, refuri=link)
final_metadata.append(nodes.list_item('',
nodes.paragraph('', '',
reference)))
return [nodes.bullet_list('', *final_metadata)]
class IncludeHg(Include):
def run(self):
if not self.state.document.settings.file_insertion_enabled:
raise self.warning('"%s" directive disabled.' % self.name)
path = directives.path(self.arguments[0])
# ALL the included files are relative to the repository root.
# we need to remove absolute paths
if path.startswith('/'):
raise self.severe('Problem with "%s" directive path:\npath ' \
'should be relative' % self.name)
encoding = self.options.get(
'encoding', self.state.document.settings.input_encoding)
tab_width = self.options.get(
'tab-width', self.state.document.settings.tab_width)
try:
self.state.document.settings.record_dependencies.add(path)
include_file = FileInput(
source=BlohgFile(path), encoding=encoding,
error_handler=(self.state.document.settings.\
input_encoding_error_handler),
handle_io_errors=None)
except IOError, error:
raise self.severe(u'Problems with "%s" directive path:\n%s.' %
(self.name, ErrorString(error)))
startline = self.options.get('start-line', None)
endline = self.options.get('end-line', None)
try:
if startline or (endline is not None):
lines = include_file.readlines()
rawtext = ''.join(lines[startline:endline])
else:
rawtext = include_file.read()
except UnicodeError, error:
raise self.severe(u'Problem with "%s" directive:\n%s' %
(self.name, ErrorString(error)))
# start-after/end-before: no restrictions on newlines in match-text,
# and no restrictions on matching inside lines vs. line boundaries
after_text = self.options.get('start-after', None)
if after_text:
# skip content in rawtext before *and incl.* a matching text
after_index = rawtext.find(after_text)
if after_index < 0:
raise self.severe('Problem with "start-after" option of "%s" '
'directive:\nText not found.' % self.name)
rawtext = rawtext[after_index + len(after_text):]
before_text = self.options.get('end-before', None)
if before_text:
# skip content in rawtext after *and incl.* a matching text
before_index = rawtext.find(before_text)
if before_index < 0:
raise self.severe('Problem with "end-before" option of "%s" '
'directive:\nText not found.' % self.name)
rawtext = rawtext[:before_index]
if 'literal' in self.options:
# Convert tabs to spaces, if `tab_width` is positive.
if tab_width >= 0:
text = rawtext.expandtabs(tab_width)
else:
text = rawtext
literal_block = nodes.literal_block(rawtext, text, source=path)
literal_block.line = 1
return [literal_block]
else:
include_lines = statemachine.string2lines(
rawtext, tab_width, convert_whitespace=1)
self.state_machine.insert_input(include_lines, path)
return []
index = {
'vimeo': Vimeo,
'youtube': Youtube,
'math': Math,
'mathjax': MathJax,
'code': Code,
'sourcecode': SourceCode,
'attachment-image': AttachmentImage,
'attachment-figure': AttachmentFigure,
'subpages': SubPages,
'include-hg': IncludeHg,
'include': IncludeHg,
}
|
mknecht/blohg
|
blohg/rst_parser/directives.py
|
Python
|
gpl-2.0
| 16,077
|
# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''commands to sign and verify changesets'''
import os, tempfile, binascii
from mercurial import util, commands, match, cmdutil, error
from mercurial import node as hgnode
from mercurial.i18n import _
cmdtable = {}
command = cmdutil.command(cmdtable)
# Note for extension authors: ONLY specify testedwith = 'internal' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
# leave the attribute unspecified.
testedwith = 'internal'
class gpg(object):
def __init__(self, path, key=None):
self.path = path
self.key = (key and " --local-user \"%s\"" % key) or ""
def sign(self, data):
gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
return util.filter(data, gpgcmd)
def verify(self, data, sig):
""" returns of the good and bad signatures"""
sigfile = datafile = None
try:
# create temporary files
fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
fp = os.fdopen(fd, 'wb')
fp.write(sig)
fp.close()
fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
fp = os.fdopen(fd, 'wb')
fp.write(data)
fp.close()
gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
"\"%s\" \"%s\"" % (self.path, sigfile, datafile))
ret = util.filter("", gpgcmd)
finally:
for f in (sigfile, datafile):
try:
if f:
os.unlink(f)
except OSError:
pass
keys = []
key, fingerprint = None, None
for l in ret.splitlines():
# see DETAILS in the gnupg documentation
# filter the logger output
if not l.startswith("[GNUPG:]"):
continue
l = l[9:]
if l.startswith("VALIDSIG"):
# fingerprint of the primary key
fingerprint = l.split()[10]
elif l.startswith("ERRSIG"):
key = l.split(" ", 3)[:2]
key.append("")
fingerprint = None
elif (l.startswith("GOODSIG") or
l.startswith("EXPSIG") or
l.startswith("EXPKEYSIG") or
l.startswith("BADSIG")):
if key is not None:
keys.append(key + [fingerprint])
key = l.split(" ", 2)
fingerprint = None
if key is not None:
keys.append(key + [fingerprint])
return keys
def newgpg(ui, **opts):
"""create a new gpg instance"""
gpgpath = ui.config("gpg", "cmd", "gpg")
gpgkey = opts.get('key')
if not gpgkey:
gpgkey = ui.config("gpg", "key", None)
return gpg(gpgpath, gpgkey)
def sigwalk(repo):
"""
walk over every sigs, yields a couple
((node, version, sig), (filename, linenumber))
"""
def parsefile(fileiter, context):
ln = 1
for l in fileiter:
if not l:
continue
yield (l.split(" ", 2), (context, ln))
ln += 1
# read the heads
fl = repo.file(".hgsigs")
for r in reversed(fl.heads()):
fn = ".hgsigs|%s" % hgnode.short(r)
for item in parsefile(fl.read(r).splitlines(), fn):
yield item
try:
# read local signatures
fn = "localsigs"
for item in parsefile(repo.vfs(fn), fn):
yield item
except IOError:
pass
def getkeys(ui, repo, mygpg, sigdata, context):
"""get the keys who signed a data"""
fn, ln = context
node, version, sig = sigdata
prefix = "%s:%d" % (fn, ln)
node = hgnode.bin(node)
data = node2txt(repo, node, version)
sig = binascii.a2b_base64(sig)
keys = mygpg.verify(data, sig)
validkeys = []
# warn for expired key and/or sigs
for key in keys:
if key[0] == "ERRSIG":
ui.write(_("%s Unknown key ID \"%s\"\n")
% (prefix, shortkey(ui, key[1][:15])))
continue
if key[0] == "BADSIG":
ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
continue
if key[0] == "EXPSIG":
ui.write(_("%s Note: Signature has expired"
" (signed by: \"%s\")\n") % (prefix, key[2]))
elif key[0] == "EXPKEYSIG":
ui.write(_("%s Note: This key has expired"
" (signed by: \"%s\")\n") % (prefix, key[2]))
validkeys.append((key[1], key[2], key[3]))
return validkeys
@command("sigs", [], _('hg sigs'))
def sigs(ui, repo):
"""list signed changesets"""
mygpg = newgpg(ui)
revs = {}
for data, context in sigwalk(repo):
node, version, sig = data
fn, ln = context
try:
n = repo.lookup(node)
except KeyError:
ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
continue
r = repo.changelog.rev(n)
keys = getkeys(ui, repo, mygpg, data, context)
if not keys:
continue
revs.setdefault(r, [])
revs[r].extend(keys)
for rev in sorted(revs, reverse=True):
for k in revs[rev]:
r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
ui.write("%-30s %s\n" % (keystr(ui, k), r))
@command("sigcheck", [], _('hg sigcheck REV'))
def sigcheck(ui, repo, rev):
"""verify all the signatures there may be for a particular revision"""
mygpg = newgpg(ui)
rev = repo.lookup(rev)
hexrev = hgnode.hex(rev)
keys = []
for data, context in sigwalk(repo):
node, version, sig = data
if node == hexrev:
k = getkeys(ui, repo, mygpg, data, context)
if k:
keys.extend(k)
if not keys:
ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
return
# print summary
ui.write("%s is signed by:\n" % hgnode.short(rev))
for key in keys:
ui.write(" %s\n" % keystr(ui, key))
def keystr(ui, key):
"""associate a string to a key (username, comment)"""
keyid, user, fingerprint = key
comment = ui.config("gpg", fingerprint, None)
if comment:
return "%s (%s)" % (user, comment)
else:
return user
@command("sign",
[('l', 'local', None, _('make the signature local')),
('f', 'force', None, _('sign even if the sigfile is modified')),
('', 'no-commit', None, _('do not commit the sigfile after signing')),
('k', 'key', '',
_('the key id to sign with'), _('ID')),
('m', 'message', '',
_('use text as commit message'), _('TEXT')),
('e', 'edit', False, _('invoke editor on commit messages')),
] + commands.commitopts2,
_('hg sign [OPTION]... [REV]...'))
def sign(ui, repo, *revs, **opts):
"""add a signature for the current or given revision
If no revision is given, the parent of the working directory is used,
or tip if no revision is checked out.
The ``gpg.cmd`` config setting can be used to specify the command
to run. A default key can be specified with ``gpg.key``.
See :hg:`help dates` for a list of formats valid for -d/--date.
"""
with repo.wlock():
return _dosign(ui, repo, *revs, **opts)
def _dosign(ui, repo, *revs, **opts):
mygpg = newgpg(ui, **opts)
sigver = "0"
sigmessage = ""
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
if revs:
nodes = [repo.lookup(n) for n in revs]
else:
nodes = [node for node in repo.dirstate.parents()
if node != hgnode.nullid]
if len(nodes) > 1:
raise error.Abort(_('uncommitted merge - please provide a '
'specific revision'))
if not nodes:
nodes = [repo.changelog.tip()]
for n in nodes:
hexnode = hgnode.hex(n)
ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
hgnode.short(n)))
# build data
data = node2txt(repo, n, sigver)
sig = mygpg.sign(data)
if not sig:
raise error.Abort(_("error while signing"))
sig = binascii.b2a_base64(sig)
sig = sig.replace("\n", "")
sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
# write it
if opts['local']:
repo.vfs.append("localsigs", sigmessage)
return
if not opts["force"]:
msigs = match.exact(repo.root, '', ['.hgsigs'])
if any(repo.status(match=msigs, unknown=True, ignored=True)):
raise error.Abort(_("working copy of .hgsigs is changed "),
hint=_("please commit .hgsigs manually"))
sigsfile = repo.wfile(".hgsigs", "ab")
sigsfile.write(sigmessage)
sigsfile.close()
if '.hgsigs' not in repo.dirstate:
repo[None].add([".hgsigs"])
if opts["no_commit"]:
return
message = opts['message']
if not message:
# we don't translate commit messages
message = "\n".join(["Added signature for changeset %s"
% hgnode.short(n)
for n in nodes])
try:
editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
repo.commit(message, opts['user'], opts['date'], match=msigs,
editor=editor)
except ValueError as inst:
raise error.Abort(str(inst))
def shortkey(ui, key):
if len(key) != 16:
ui.debug("key ID \"%s\" format error\n" % key)
return key
return key[-8:]
def node2txt(repo, node, ver):
"""map a manifest into some text"""
if ver == "0":
return "%s\n" % hgnode.hex(node)
else:
raise error.Abort(_("unknown signature version"))
|
seewindcn/tortoisehg
|
src/hgext/gpg.py
|
Python
|
gpl-2.0
| 10,197
|
import uuid
from src.common.database import Database
import datetime
__author__ = 'jslvtr'
class Post(object):
def __init__(self, blog_id, title, content, author, created_date=datetime.datetime.utcnow(), _id=None):
self.blog_id = blog_id
self.title = title
self.content = content
self.author = author
self.created_date = created_date
self._id = uuid.uuid4().hex if _id is None else _id
def save_to_mongo(self):
Database.insert(collection='posts',
data=self.json())
def json(self):
return {
'_id': self._id,
'blog_id': self.blog_id,
'author': self.author,
'content': self.content,
'title': self.title,
'created_date': self.created_date
}
@classmethod
def from_mongo(cls, id):
post_data = Database.find_one(collection='posts', query={'_id': id})
return cls(**post_data)
@staticmethod
def from_blog(id):
return [post for post in Database.find(collection='posts', query={'blog_id': id})]
|
brunotougeiro/python
|
udemy-python-web-apps/web_blog-master/src/models/post.py
|
Python
|
gpl-2.0
| 1,111
|
#coding:utf-8
import GJDB
import sys
db = GJDB.GJDB()
db.crawl()
db.selectDB('crawler_ds')
db.selectData('set names utf8;')
#sys.exit()
xqxxs = open('C:/users/suchao/desktop/bsgscxq.txt','r').readlines()
tmpFile = open('tmpFilexq.txt','w')
for xqxx in xqxxs:
name, source, url = xqxx.split('\t')
sql = "SELECT city,district,developer,community,'','',prop_company,prop_fee,plot_ratio,gree_coverage,'',building_year,address FROM crawler_house_xiaoqu WHERE claw_date='20131230' AND city IN ('北京','广州','深圳','上海') AND community='"+name+"' AND url='"+url[:-1]+"';"
print sql
result = db.selectData(sql)
if len(result) >=1:
result = '\t'.join(list(db.selectData(sql)[0]))
tmpFile.write(result+'\n')
|
hfutsuchao/Python2.6
|
xiaoquUGC/小区入库SQL生成.py
|
Python
|
gpl-2.0
| 761
|
from io import BytesIO
from pytest import mark, raises
from translate.convert import po2php, test_convert
from translate.storage import po
class TestPO2Php:
def po2php(self, posource):
"""helper that converts po source to .php source without requiring files"""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
convertor = po2php.po2php()
return convertor.convertstore(inputpo)
def merge2php(self, phpsource, posource):
"""helper that merges po translations to .php source without requiring files"""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
templatefile = BytesIO(phpsource.encode())
# templatephp = php.phpfile(templatefile)
convertor = po2php.rephp(templatefile, inputpo)
outputphp = convertor.convertstore().decode()
print(outputphp)
return outputphp
def test_convertphp(self):
"""test convertphp helper"""
posource = """#: $lang['name']
msgid "value"
msgstr "waarde"
"""
phptemplate = """$lang['name'] = 'value';
"""
phpexpected = b"""<?php
$lang['name'] = 'waarde';
"""
inputfile = BytesIO(posource.encode())
templatefile = BytesIO(phptemplate.encode())
outputfile = BytesIO()
assert po2php.convertphp(inputfile, outputfile, templatefile) == 1
assert outputfile.getvalue() == phpexpected
def test_convertphp_notemplate(self):
"""test convertphp helper without template"""
posource = """#: $lang['name']
msgid "value"
msgstr "waarde"
"""
inputfile = BytesIO(posource.encode())
outputfile = BytesIO()
with raises(ValueError):
po2php.convertphp(inputfile, outputfile, None)
def test_convertphp_empty_template(self):
"""test convertphp helper with empty translation"""
posource = """#: $lang['name']
msgid "value"
msgstr ""
"""
inputfile = BytesIO(posource.encode())
templatefile = BytesIO(b"")
outputfile = BytesIO()
assert (
po2php.convertphp(inputfile, outputfile, templatefile, False, 100) is False
)
assert outputfile.getvalue() == b""
def test_merging_simple(self):
"""check the simplest case of merging a translation"""
posource = """#: $lang['name']
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
$lang['name'] = 'value';
"""
phpexpected = """<?php
$lang['name'] = 'waarde';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_space_preservation(self):
"""check that we preserve any spacing in php files when merging"""
posource = """#: $lang['name']
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
$lang['name'] = 'value';
"""
phpexpected = """<?php
$lang['name'] = 'waarde';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_preserve_unused_statement(self):
"""check that we preserve any unused statements in php files when merging"""
posource = """#: $lang['name']
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
error_reporting(E_ALL);
$lang['name'] = 'value';
"""
phpexpected = """<?php
$lang['name'] = 'waarde';
"""
phpfile = self.merge2php(phptemplate, posource)
assert phpfile == phpexpected
def test_not_translated_multiline(self):
"""check that we preserve not translated multiline strings in php files when merging"""
posource = """#: $lang['name']
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
$lang['name'] = 'value';
$lang['second'] = "
value";
"""
phpexpected = """<?php
$lang['name'] = 'waarde';
$lang['second'] = "
value";
"""
phpfile = self.merge2php(phptemplate, posource)
assert phpfile == phpexpected
def test_merging_blank_entries(self):
"""check that we can correctly merge entries that are blank in the template"""
posource = r'''#: accesskey-accept
msgid ""
"_: accesskey-accept\n"
""
msgstr ""'''
phptemplate = """<?php
$lang['accesskey-accept'] = '';
"""
phpexpected = """<?php
$lang['accesskey-accept'] = '';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_merging_fuzzy(self):
"""check merging a fuzzy translation"""
posource = """#: %24lang%5B+%27name%27+%5D
#, fuzzy
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
$lang['name'] = 'value';
"""
phpexpected = """<?php
$lang['name'] = 'value';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_locations_with_spaces(self):
"""check that a location with spaces in php but spaces removed in PO is used correctly"""
posource = """#: %24lang%5B+%27name%27+%5D
msgid "value"
msgstr "waarde"\n"""
phptemplate = """<?php
$lang[ 'name' ] = 'value';
"""
phpexpected = """<?php
$lang[ 'name' ] = 'waarde';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_inline_comments(self):
"""check that we include inline comments from the template. Bug 590"""
posource = """#: %24lang%5B+%27name%27+%5D
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
$lang[ 'name' ] = 'value'; //inline comment
"""
phpexpected = """<?php
//inline comment
$lang[ 'name' ] = 'waarde';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_block_comments(self):
"""check that we include block comments from the template"""
posource = """#: %24lang%5B+%27name%27+%5D
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
/* some comment */
$lang[ 'name' ] = 'value';
"""
phpexpected = """<?php
/* some comment */
$lang[ 'name' ] = 'waarde';
"""
phpfile = self.merge2php(phptemplate, posource)
assert phpfile == phpexpected
def test_named_variables(self):
"""check that we convert correctly if using named variables."""
posource = """#: $dictYear
msgid "Year"
msgstr "Jaar"
"""
phptemplate = """<?php
$dictYear = 'Year';
"""
phpexpected = """<?php
$dictYear = 'Jaar';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_multiline(self):
"""
Check that we convert multiline strings correctly.
Bug 1296.
"""
posource = r"""#: $string['upgradesure']
msgid ""
"Your Moodle files have been changed, and you are\n"
"about to automatically upgrade your server to this version:\n"
"<p><b>$a</b></p>\n"
"<p>Once you do this you can not go back again.</p>\n"
"<p>Are you sure you want to upgrade this server to this version?</p>"
msgstr ""
"""
phptemplate = """<?php
$string['upgradesure'] = 'Your Moodle files have been changed, and you are
about to automatically upgrade your server to this version:
<p><b>$a</b></p>
<p>Once you do this you can not go back again.</p>
<p>Are you sure you want to upgrade this server to this version?</p>';\n"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phptemplate
def test_hash_comment(self):
"""check that we convert # comments correctly."""
posource = """#: $variable
msgid "stringy"
msgstr "stringetjie"
"""
phptemplate = """<?php
# inside alt= stuffies
$variable = 'stringy';
"""
phpexpected = """<?php
# inside alt= stuffies
$variable = 'stringetjie';
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_arrays(self):
"""check that we can handle arrays"""
posource = """#: $lang->'name'
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
$lang = array(
'name' => 'value',
);
"""
phpexpected = """<?php
$lang = array(
'name' => 'waarde',
);
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_named_nested_array(self):
"""check that we can handle nested arrays"""
posource = """#: $lang->'codes'->'name'
msgid "value"
msgstr "waarde"
"""
phptemplate = """<?php
$lang = array(
'codes' => array(
'name' => 'value',
),
);
"""
phpexpected = """<?php
$lang = array(
'codes' => array(
'name' => 'waarde',
),
);
"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
def test_unnamed_nested_arrays(self):
posource = """
#: return->'name1'
msgid "source1"
msgstr "target1"
#: return->'list1'->'l1'
msgid "source_l1_1"
msgstr "target_l1_1"
#: return->'list1'->'list2'->'l1'
msgid "source_l1_l2_l1"
msgstr "target_l1_l2_l1"
#: return->'list1'->'l3'
msgid "source_l1_3"
msgstr "target_l1_3"
#: return->'name2'
msgid "source2"
msgstr "target2"
"""
phptemplate = """<?php
return array(
'name1' => 'source1',
'list1' => array(
'l1' => 'source_l1_1',
'list2' => array(
'l1' => 'source_l1_l2_l1',
),
'l3' => 'source_l1_3',
),
'name2' => 'source2',
);"""
phpexpected = """<?php
return array(
'name1' => 'target1',
'list1' => array(
'l1' => 'target_l1_1',
'list2' => array(
'l1' => 'target_l1_l2_l1',
),
'l3' => 'target_l1_3',
),
'name2' => 'target2',
);\n"""
phpfile = self.merge2php(phptemplate, posource)
print(phpfile)
assert phpfile == phpexpected
@mark.xfail(reason="Need to review if we want this behaviour")
def test_merging_propertyless_template(self):
"""check that when merging with a template with no property values that we copy the template"""
posource = ""
proptemplate = "# A comment\n"
propexpected = proptemplate
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == [propexpected]
class TestPO2PhpCommand(test_convert.TestConvertCommand, TestPO2Php):
"""Tests running actual po2php commands on files"""
convertmodule = po2php
defaultoptions = {"progress": "none"}
def test_help(self, capsys):
"""tests getting help"""
options = super().test_help(capsys)
options = self.help_check(options, "-t TEMPLATE, --template=TEMPLATE")
options = self.help_check(options, "--threshold=PERCENT")
options = self.help_check(options, "--fuzzy")
options = self.help_check(options, "--nofuzzy", last=True)
|
miurahr/translate
|
translate/convert/test_po2php.py
|
Python
|
gpl-2.0
| 11,039
|
# Screen scheduler test classes.
#
# This file is part of Simpleline Text UI library.
#
# Copyright (C) 2020 Red Hat, Inc.
#
# Simpleline is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Simpleline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Simpleline. If not, see <https://www.gnu.org/licenses/>.
#
# Author(s): Jiri Konecny <jkonecny@redhat.com>
#
import unittest
from unittest import mock
from simpleline.event_loop.main_loop import MainLoop
from simpleline.render.screen import UIScreen
from simpleline.render.screen_scheduler import ScreenScheduler
from simpleline.render.screen_stack import ScreenStack, ScreenStackEmptyException
class Scheduler_TestCase(unittest.TestCase):
def setUp(self):
self.stack = None
self.scheduler = None
def create_scheduler_with_stack(self):
self.stack = ScreenStack()
self.scheduler = ScreenScheduler(event_loop=mock.MagicMock(), scheduler_stack=self.stack)
def pop_last_item(self, remove=True):
return self.stack.pop(remove)
def test_create_scheduler(self):
scheduler = ScreenScheduler(MainLoop())
self.assertTrue(isinstance(scheduler._screen_stack, ScreenStack)) # pylint: disable=protected-access
def test_scheduler_quit_screen(self):
def test_callback():
pass
scheduler = ScreenScheduler(MainLoop())
self.assertEqual(scheduler.quit_screen, None)
scheduler.quit_screen = test_callback
self.assertEqual(scheduler.quit_screen, test_callback)
def test_nothing_to_render(self):
self.create_scheduler_with_stack()
self.assertTrue(self.scheduler.nothing_to_render)
self.assertTrue(self.stack.empty())
self.scheduler.schedule_screen(UIScreen())
self.assertFalse(self.scheduler.nothing_to_render)
self.assertFalse(self.stack.empty())
def test_schedule_screen(self):
self.create_scheduler_with_stack()
screen = UIScreen()
self.scheduler.schedule_screen(screen)
test_screen = self.pop_last_item(False)
self.assertEqual(test_screen.ui_screen, screen)
self.assertEqual(test_screen.args, None) # empty field - no arguments
self.assertFalse(test_screen.execute_new_loop)
# Schedule another screen, new one will be added to the bottom of the stack
new_screen = UIScreen()
self.scheduler.schedule_screen(new_screen)
# Here should still be the old screen
self.assertEqual(self.pop_last_item().ui_screen, screen)
# After removing the first we would find the second screen
self.assertEqual(self.pop_last_item().ui_screen, new_screen)
def test_replace_screen_with_empty_stack(self):
self.create_scheduler_with_stack()
with self.assertRaises(ScreenStackEmptyException):
self.scheduler.replace_screen(UIScreen())
def test_replace_screen(self):
self.create_scheduler_with_stack()
old_screen = UIScreen()
screen = UIScreen()
self.scheduler.schedule_screen(old_screen)
self.scheduler.replace_screen(screen)
self.assertEqual(self.pop_last_item(False).ui_screen, screen)
new_screen = UIScreen()
self.scheduler.replace_screen(new_screen)
self.assertEqual(self.pop_last_item().ui_screen, new_screen)
# The old_screen was replaced so the stack is empty now
self.assertTrue(self.stack.empty())
def test_replace_screen_with_args(self):
self.create_scheduler_with_stack()
old_screen = UIScreen()
screen = UIScreen()
self.scheduler.schedule_screen(old_screen)
self.scheduler.replace_screen(screen, "test")
test_screen = self.pop_last_item()
self.assertEqual(test_screen.ui_screen, screen)
self.assertEqual(test_screen.args, "test")
# The old_screen was replaced so the stack is empty now
self.assertTrue(self.stack.empty())
def test_switch_screen_with_empty_stack(self):
self.create_scheduler_with_stack()
screen = UIScreen()
self.scheduler.push_screen(screen)
self.assertEqual(self.pop_last_item().ui_screen, screen)
def test_switch_screen(self):
self.create_scheduler_with_stack()
screen = UIScreen()
new_screen = UIScreen()
self.scheduler.schedule_screen(screen)
self.scheduler.push_screen(new_screen)
test_screen = self.pop_last_item()
self.assertEqual(test_screen.ui_screen, new_screen)
self.assertEqual(test_screen.args, None)
self.assertEqual(test_screen.execute_new_loop, False)
# We popped the new_screen so the old screen should stay here
self.assertEqual(self.pop_last_item().ui_screen, screen)
self.assertTrue(self.stack.empty())
def test_switch_screen_with_args(self):
self.create_scheduler_with_stack()
screen = UIScreen()
self.scheduler.push_screen(screen, args="test")
self.assertEqual(self.pop_last_item(False).ui_screen, screen)
self.assertEqual(self.pop_last_item().args, "test")
@mock.patch('simpleline.render.screen_scheduler.ScreenScheduler._draw_screen')
def test_switch_screen_modal_empty_stack(self, _):
self.create_scheduler_with_stack()
screen = UIScreen()
self.scheduler.push_screen_modal(screen)
self.assertEqual(self.pop_last_item().ui_screen, screen)
@mock.patch('simpleline.render.screen_scheduler.ScreenScheduler._draw_screen')
def test_switch_screen_modal(self, _):
self.create_scheduler_with_stack()
screen = UIScreen()
new_screen = UIScreen()
self.scheduler.schedule_screen(screen)
self.scheduler.push_screen_modal(new_screen)
test_screen = self.pop_last_item()
self.assertEqual(test_screen.ui_screen, new_screen)
self.assertEqual(test_screen.args, None)
self.assertEqual(test_screen.execute_new_loop, True)
@mock.patch('simpleline.render.screen_scheduler.ScreenScheduler._draw_screen')
def test_switch_screen_modal_with_args(self, _):
self.create_scheduler_with_stack()
screen = UIScreen()
self.scheduler.push_screen_modal(screen, args="test")
self.assertEqual(self.pop_last_item(False).ui_screen, screen)
|
rhinstaller/python-simpleline
|
tests/units/main/scheduler_test.py
|
Python
|
gpl-2.0
| 6,810
|
NAME = 'rados'
CFLAGS = []
LDFLAGS = []
LIBS = ['-lrados']
GCC_LIST = ['rados']
import __main__
has_rados_ioctx_pool_requires_alignment2 = __main__.test_snippet("""
#include <rados/librados.h>
int main()
{
rados_ioctx_t ctx = NULL;
rados_ioctx_pool_requires_alignment2(ctx, NULL);
rados_ioctx_pool_required_alignment2(ctx, NULL);
return 0;
}
""", LIBS=['-lrados'])
if has_rados_ioctx_pool_requires_alignment2:
CFLAGS.append('-DHAS_RADOS_POOL_REQUIRES_ALIGNMENT2')
|
chundi/uwsgi
|
plugins/rados/uwsgiplugin.py
|
Python
|
gpl-2.0
| 487
|
def fizz_buzz(n):
fin_list = []
for number in range(1, n + 1):
if number % 3 == 0 and number % 5 == 0:
fin_list.append('fizzbuzz')
elif number % 3 == 0:
fin_list.append('fizz')
elif number % 5 == 0:
fin_list.append('buzz')
else:
fin_list.append(number)
return fin_list
|
jcode89/Iron_Coder_Solutions
|
fizzbuzz.py
|
Python
|
gpl-2.0
| 361
|
# #
# Copyright 2009-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Dispatch function for container packages
:author: Shahzeb Siddiqui (Pfizer)
:author: Kenneth Hoste (HPC-UGent)
:author: Mohamed Abidi (Bright Computing)
"""
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.config import build_option
from easybuild.tools.containers.docker import DockerContainer # noqa
from easybuild.tools.containers.singularity import SingularityContainer # noqa
_log = fancylogger.getLogger('tools.containers.common') # pylint: disable=C0103
def containerize(easyconfigs):
"""
Generate container recipe + (optionally) image
"""
_log.experimental("support for generating container recipes and images (--containerize/-C)")
container_type = build_option('container_type')
_log.info("Creating %s container", container_type)
try:
klass = globals()["%sContainer" % container_type.capitalize()]
except KeyError:
raise EasyBuildError("Unknown container type specified: %s", container_type)
klass(easyconfigs).generate()
|
gppezzi/easybuild-framework
|
easybuild/tools/containers/common.py
|
Python
|
gpl-2.0
| 2,113
|
from utility.timestamp import TimestampedValue
from utility.enums import BOOL
from entity import Entity
# Turrets, inhib, nexus, drake, nash, jungle monsters
class Objective(Entity):
def __init__(self):
Entity.__init__(self)
self.isUp = TimestampedValue('i', BOOL.UNKNOWN)
|
fl4v/botlane
|
world_model/objective.py
|
Python
|
gpl-2.0
| 295
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Cut out and concatenate sections of a file
# access as pycut.py from mythDVBcut.sh
import sys, os
#print sys.argv
######################
## For tests
##
## echo "0123456789A123456789B123456789C123456789D123456789E123456789F123456789" > ~/test.txt
##
## fn1 = './test.txt'
## fn2 = './temp.txt'
## chunks = [ 3, 12, 35, 47, 53, 68 ]
## buflen = 5
## Doesn't recognise '~/test.txt' but this, or full path, seems ok
## python pycut.py './test.txt' './temp.txt' 3 12 35 47 53 68
# Zambesi HD
#./printcutlist /home/john/Mythrecs/1054_20120328222600.mpg
# Generates byte-mode cutlist for use with Project-X - and here
#CollectionPanel.CutMode=0
#fn1 = '/mnt/f10store/myth/reca/1054_20120323002600old.mpg'
#fn2 = '/mnt/sam1/recb/1054_20120323002600.mpg'
#chunks = [ 390284804, 4556742872 ]
#buflen = 1024*1024
#
########################
fn1 = sys.argv[1] # input file
fn2 = sys.argv[2] # output file
chunks = map( int, sys.argv [ 3 : ] ) # start and end bytes of chunks in infile
buflen = 1024*1024
#bignum = 10000000000 # for use as EOF if needed
# less likely to be surprised if we use the actual filesize here
print "infile ", fn1
print "outfile ", fn2
print "switchpoints ", chunks
#######################
# sanity checks
chunklen = len(chunks)
if chunklen != 2 * ( chunklen / 2 ) :
# chunks.append(bignum)
chunks.append( 1 + os.path.getsize(fn1))
chunklen = len(chunks)
# adjust chunk-endpoints in the hope of keeping chain linkage in the data intact
n = 1
while n < chunklen :
chunks[n] += -1
n += 2
n=0
while n < chunklen - 2 :
if chunks[n] > chunks[n+1] :
print "Quitting: switchpoints out of order"
sys.exit(98)
n += 1
print "Adjusted switchpoints ", chunks
n = 0
m = 0
offset = [ 0 ]
while n < chunklen - 1 :
m += 1 + chunks[ n+1 ] - chunks[ n ]
offset.append( m )
n += 2
print
print "Byte offsets of cutpoints in output file: ", offset
print "DB table is recordedseek, mark (framecount) is type 9."
##################################
# Don't touch stuff below here
## byte numbering starts at 0 and output includes both chunk-endpoints
i=0
j=0
imax = 40 # buffers per star
jmax = 25 # stars per line
print # for progress display
chnklim = len(chunks) - 1
nchnk = 0
chstart=chunks[nchnk]
chend=chunks[nchnk + 1]
bufstart = 0
f1 = open(fn1, 'rb')
f2 = open(fn2, 'wb')
while True :
data = f1.read(buflen)
lendat = len(data)
if lendat == 0 :
break
bufend = bufstart + lendat
while chstart < bufend :
if chend < bufend :
f2.write(data[chstart - bufstart : chend - bufstart + 1 ])
nchnk += 2
if nchnk > chnklim : # job done
chstart = bufend + buflen*2 # kill further looping
break
chstart = chunks[nchnk]
chend = chunks[nchnk + 1]
else :
f2.write(data[chstart - bufstart : ])
chstart = bufend
bufstart += lendat
i += 1 # progress display
if i > imax :
sys.stdout.write("*")
sys.stdout.flush()
i = 0
j += 1
if j > jmax :
print
j = 0
f1.close()
f2.close()
print
|
frederickjh/mythdvbcut
|
pycut.py
|
Python
|
gpl-2.0
| 3,274
|
#!/usr/bin/env python
import sys
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
from optparse import OptionParser
import random
import patterns
def letter2num(c):
if (not cmp(c,'A')):
elem = '0'
elif (not cmp(c,'T')):
elem = '1'
elif (not cmp(c,'G')):
elem = '2'
elif (not cmp(c,'C')):
elem = '3'
else:
elem = '4'
return elem
parser = OptionParser()
parser.add_option('-k', '--klength', dest = 'klength', help = "the length of kmer")
parser.add_option('-n', '--num', dest = 'num', help = "the amount of queries")
parser.add_option('-o', '--output', dest = 'output', help = "the filename of box query file")
parser.add_option('-r', '--read', dest = 'read', help = " the filename of read")
parser.add_option('-b', '--boxsize', dest = 'boxsize', help = "size of box query")
parser.add_option('-p', '--pattern', dest = 'pattern', help = "pattern id")
(options, args) = parser.parse_args(sys.argv[1:])
klength = int(options.klength)
num = int(options.num)
read_filename = options.read
if read_filename[-1] == 'q':
formatStr = "fastq"
else:
formatStr = "fasta"
query_filename = options.output
boxsize = int(options.boxsize)
patternId = options.pattern
alphabet = ['A', 'T', 'G', 'C']
query_file = open(query_filename, 'w')
patternDistributionFile = open("pattern_dist" + str(klength), 'w')
record_list = list(SeqIO.parse(read_filename,formatStr))
maxIndex = len(record_list) - 1
query_list = []
pHash = {}
i = 0
while i < num:
# pick up a read randomly
readIndex = random.randrange(0, maxIndex)
record = record_list[readIndex]
seqStr = str(record.seq[:])
end =len(seqStr) - klength
query_letter = '';
query = '';
r = random.randrange(0, end)
kmer = seqStr[r : r+klength]
#print kmer
if patternId != None:
pid = patterns.getPattern(kmer)
if pid in pHash:
pHash[pid] += 1
else:
pHash[pid] = 1
#print "############################ pid = " + str(pid)
if pid != int(patternId) and int(patternId) != 0:
#i += 1 # result query number may not equal to num !!!
continue
box = random.randrange(r, r+klength)
for p in range(r, r+klength):
if boxsize >= 2 and p == box:
query += "("
for t in range(0, boxsize):
query += alphabet[t]
query += ")"
else:
query += seqStr[p]
# id is very important for multiple K but name and desc is optional
# and only for user to check query info
query_seq = SeqIO.SeqRecord(Seq(query,generic_dna), id = str(i), description="dim="+str(klength))
query_list.append(query_seq)
i += 1
SeqIO.write(query_list, query_file, "fasta");
query_file.close()
for key in pHash:
patternDistributionFile.write(str(key) + " " + str(pHash[key]) + "\n")
patternDistributionFile.close()
#print aln_ref+'\n'+aln_sample
|
updownlife/multipleK
|
bin/random_boxquery/random_fast_boxquery.py
|
Python
|
gpl-2.0
| 2,845
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
stdm
A QGIS plugin
Securing land and property rights for all
-------------------
begin : 2014-03-04
copyright : (C) 2014 by GLTN
email : njoroge.solomon@yahoo.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from stdm.data import FilePaths
from PyQt4.QtCore import *
from PyQt4.QtGui import *
class LicenseDocument(object):
def __init__(self):
self.file = None
self.filehandler = FilePaths()
def open_license_file(self):
'''get the path to the license file'''
self.file = self.filehandler.STDMLicenseDoc()
#self.file=docFile
def read_license_info(self):
'''read license information for user '''
try:
self.open_license_file()
with open(self.file, 'r')as inf:
lic_data = inf.read()
return lic_data
except IOError as ex:
raise ex
def text_font(self):
'''set document font'''
doc_font=QFont('Helvetica [Cronyx]',10,QFont.Bold)
#docFont.setBold(True)
return doc_font
|
olivierdalang/stdm
|
data/license_doc.py
|
Python
|
gpl-2.0
| 1,999
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Albatos s.r.l. (<http://www.albatos.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Tax Stamp",
"version": "1.0",
"depends": ["account"],
"author": "Agile Business Group",
"description": """Tax Stamp as invoice lines. This module allows to specify which invoice lines should be used for computing the taxable amount""",
'website': 'http://www.agilebg.com',
'init_xml': [
'invoice_view.xml'],
'update_xml': [
],
'demo_xml': [],
'installable': True,
'active': False,
}
|
syci/domsense-agilebg-addons
|
tax_stamp/__terp__.py
|
Python
|
gpl-2.0
| 1,444
|
import sys
import inspect
import os.path
class TestCase(object):
'''Base class for test cases'''
def __init__(self):
self.errors = False
def run(tr):
'''tr is TestRunner instance.'''
raise NotImplemented()
def assert_equals(self, a, b):
if a != b:
caller = inspect.stack()[-2]
print "%s:%s in %s" % caller[1:4]
print " %s" % caller[-2][0].strip()
print " value is " + repr(a) + ", expected " + repr(b)
self.errors = True
def assert_range(self, a, minval, maxval):
if a < minval or a > maxval:
caller = inspect.stack()[-2]
print "%s:%s in %s" % caller[1:4]
print " %s" % caller[-2][0].strip()
print " value is " + repr(a) + ", expected to be in range " + repr(minval) + " to " + repr(maxval)
self.errors = True
class TestBasicVideo(TestCase):
def run(self, tr):
params = {
'COMPRESSION': 'jpegenc',
'CONTAINER': 'avimux',
'AUDIOCOMPRESSION': 'avenc_ac3',
'NUM_BUFFERS': '256',
'LIPSYNC': '-1',
'PRE_WHITE_DURATION':'5000',
'PRE_MARKS_DURATION':'0',
'POST_WHITE_DURATION':'0',
'OUTPUT': 'output.avi'
}
r = tr.run_test(params)
self.assert_equals(r['demuxer'], 'avidemux')
self.assert_equals(r['video_decoder'], 'jpegdec')
self.assert_equals(r['audio_decoder'], 'a52dec')
self.assert_equals(r['resolution'], [1920, 1080])
self.assert_equals(r['framerate'], 24.0)
self.assert_equals(r['markers_found'], 27)
self.assert_equals(r['markers'][3]['interval'], 1)
self.assert_equals(r['markers'][4]['interval'], 2)
self.assert_equals(r['markers'][5]['interval'], 4)
self.assert_equals(r['markers'][6]['interval'], 8)
self.assert_equals(r['markers'][7]['interval'], 16)
self.assert_equals(r['markers'][8]['interval'], 32)
self.assert_equals(r['markers'][9]['interval'], 64)
self.assert_equals(r['markers'][10]['interval'], 128)
self.assert_range(r['video_structure']['header_frames'], 80, 300)
self.assert_equals(r['video_structure']['content_frames'], 256)
self.assert_equals(r['video_structure']['trailer_frames'], 0)
self.assert_equals(r['lipsync']['audio_markers'], 0)
self.assert_equals(r['lipsync']['video_markers'], 0)
self.assert_equals(r['warnings'], [])
class TestLipsync(TestCase):
def run(self, tr):
params = {
'COMPRESSION': 'x264enc speed-preset=2',
'CONTAINER': 'qtmux',
'AUDIOCOMPRESSION': 'identity',
'NUM_BUFFERS': '240',
'LIPSYNC': '2000',
'PRE_WHITE_DURATION':'5000',
'PRE_MARKS_DURATION':'0',
'POST_WHITE_DURATION':'5000',
'OUTPUT': 'output.mov',
'PREPROCESS': '! videoscale ! video/x-raw,width=640,height=480',
'LAYOUT': os.path.join(tr.tvg_path, "layout_fpsonly.bmp")
}
r = tr.run_test(params)
self.assert_equals(r['demuxer'], 'qtdemux')
self.assert_equals(r['video_decoder'], 'avdec_h264')
self.assert_equals(r['audio_decoder'], '(null)')
self.assert_equals(r['resolution'], [640, 480])
self.assert_equals(r['framerate'], 24.0)
self.assert_equals(r['markers_found'], 1)
self.assert_range(r['video_structure']['header_frames'], 80, 300)
self.assert_equals(r['video_structure']['content_frames'], 240)
self.assert_range(r['video_structure']['trailer_frames'], 80, 300)
self.assert_equals(r['lipsync']['audio_markers'], 5)
self.assert_equals(r['lipsync']['video_markers'], 5)
self.assert_range(r['lipsync']['audio_delay_min_ms'], -1.0, 1.0)
self.assert_range(r['lipsync']['audio_delay_max_ms'], -1.0, 1.0)
self.assert_equals(r['warnings'], [])
|
OptoFidelity/TVG
|
tests/testcases.py
|
Python
|
gpl-2.0
| 3,815
|
"""capisuite.core
This module exposes the built-in core of capisuite.
"""
__author__ = "Hartmut Goebel <h.goebel@crazy-compilers.com>"
__copyright__ = "Copyright (c) 2004 by Hartmut Goebel"
__version__ = "$Revision: 0.0 $"
__credits__ = "This file is part of www.capisuite.de; thanks to Gernot Hillier"
__license__ = """
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
# _capisuite may only be imported when running within capisuite
try:
# import all capisuite symbols in the namespace "_capisuite.symbol"
import _capisuite
# now add symbols directly used by the scripts to our namespace
from _capisuite import log,error,SERVICE_VOICE,SERVICE_FAXG3,CallGoneError
except ImportError:
pass
#########
###
### ATTENTION: This interface is not yet stable. You may expect
### changes until capisuite 0.5 is released!
###
#########
class Capi:
def __init__(self, handle):
"""
handle: a capi handle as received from _capisuite (given to the idle
function as parameter)
"""
self._handle = handle
def __repr__(self):
return 'Capi(%(_handle)s)' % self.__dict__
def call_voice(self, controller, call_from, call_to,
timeout, clir=0):
"""
Initiate an outgoing call with service voice and wait for
successful connection.
This will initiate an outgoing call and choose voice as
service, so you can use the audio commands (like audio_receive()
and audio_send()) with this connection. After this command has
finished, the call is connected successfully or the given
timeout has exceeded. The timeout is measured beginning at the
moment when the call is signalled (it's "ringing") to the
called party.
Parameters:
controller: ISDN controller ID to use
call_from: own number to use (string)
call_to: the number to call (string)
timeout: timeout in seconds to wait for connection establishment
clir: disable sending of own number (default=0, send number)
On success returns a call object; on failure returns an
error_code.
"""
call, result = _capisuite.call_voice(self._handle, controller,
call_from, call_to,
timeout, clir)
if result:
return None, result
return Call(call, SERVICE_VOICE, call_from, call_to), None
def call_faxG3(self, controller, call_from, call_to,
timeout, stationID, headline, clir=0):
"""
Initiate an outgoing call with service faxG3 and wait for
successful connection.
This will initiate an outgoing call and choose fax group 3 as
service, so you can use the fax commands (like fax_send() and
fax_receive()) with this connection. After this command has
finished, the call is connected successfully or the given
timeout has exceeded. The timeout is measured beginning at the
moment when the call is signalled (it's "ringing") to the
called party.
Parameters:
controller: ISDN controller ID to use
call_from: own number to use (string)
call_to: the number to call (string)
timeout: timeout in seconds to wait for connection establishment
faxStationID: fax station ID (string)
faxHeadline: fax headline to print on every page (string)
clir: disable sending of own number (default=0, send number)
On success returns a call object; on failure returns an
error_code.
"""
call, result = _capisuite.call_faxG3(self._handle, controller,
call_from, call_to,
timeout, stationID, headline)
if result:
return None, result
return Call(call, SERVICE_FAXG3, call_from, call_to), None
class Call:
def __init__(self, handle, service, call_from, call_to):
"""
handle: a call handle as received from _capisuite
NB: A Call instance is never True to ease testing results from
Capi.call_...()
"""
self._handle = handle
self.service = service
self.from_nr = call_from
self.to_nr = call_to
###--- python stuff --###
def __nonzero__(self):
# 'if Call()' must never be true to allow easier results from
# Capi.call_...()
return 0
def __str__(self):
return str(self._handle)
def __repr__(self):
# todo: add service, call_from, call_to
return ('Call(%(_handle)s, service=%(service)s, '
'from_nr=%(from_nr)s, to_nr%(to_nr)s)') % self.__dict__
###--- general --###
def disconnect(self):
"""
Disconnect connection.
This will cause an immediate disconnection. It should be
always the last command in every flow of a script.
Returns a tuple of two result values. The first is the
disconnect cause of the physical connection, the second the
disconnect cause of the logical connection. See CAPI spec for
the logical causes and ETS 300 102-01 for the physical causes.
"""
result = _capisuite.disconnect(self._handle)
return result
def reject(self, rejectCause):
"""
Reject an incoming call.
If you don't want to accept an incoming call for any reason
(e.g. if it has a service or comes from a number you don't
want to accept), use this command. There are several reasons
you can give when rejecting a call. Some important ones are:
rejectCause: cause to signal when rejecting call. This may be one of
1 = ignore call
2 = normal call clearing
3 = user busy
7 = incompatible destination
8 = destination out of order
0x34A9 = temporary failure
"""
_capisuite.reject(self._handle, rejectCause)
def log(self, message, level):
"""
Log a connection dependent message.
This function writes a message to the CapiSuite log. As all messages
written with it are prefixed with the current call reference, you
should use it for connection-dependant messages (e.g. information about
handling *this* call).
If you want to log messages of general nature not associated with a
certain call (e.g. problem in reading configuration files), please use
core.log instead.
message: the log message to be written
level: parameter for CapiSuite log_level used (0=vital .. 3=debug info)
"""
_capisuite.log(message, level, self._handle)
###--- DTMF support --###
def enable_DTMF(self):
"""
Enable recognition of DTMF tones.
"""
_capisuite.enable_DTMF(self._handle)
def disable_DTMF(self):
"""
Disable recognition of DTMF tones.
"""
_capisuite.disable_DTMF(self._handle)
def read_DTMF(self, timeout, min_digits=0, max_digits=0):
"""
Read the received DTMF tones or wait for a certain amount of
them.
This function allows to just read in the DTMF tones which were
already received. But it also supports to wait for a certain
amount of DTMF tones if you want the user to always input some
digits at a certain step in your script.
You can specify how much DTMF tones you want in several ways -
see the parameter description. To just see if something was
entered before, use capisuite.read_DTMF(0). If you want to get
at least 1 and mostly 4 digits and want to wait 5 seconds for
additional digits, you'll use capisuite.read_DTMF(5,1,4).
Valid DTMF characters are '0'...'9','A'...'D' and two special
fax tones: 'X' (CNG), 'Y' (CED)
timeout: timeout in seconds after which reading is terminated;
only applied after min_digits have been read! (-1 =
infinite)
min_digits: minimum number of digits which must be read in ANY
case, i.e. timout doesn't count here (default: 0)
max_digits: maximum number of digits to read; aborts
immediately enough digits are read) (default:
0=infinite, i.e. wait until timeout is reached)
Returns a string containing the characters read.
"""
# todo: descibe what A...D means and where '#' and '*' go
return _capisuite.read_DTMF(self._handle, timeout,
min_digits, max_digits)
###--- voice calls ---###
def connect_voice (self, delay=0):
"""
Accept an incoming call and connect with voice service.
This will accept an incoming call and choose voice as service,
so you can use the audio commands (like audio_receive() and
audio_send()) with this connection. After this command has
finished, the call is connected successfully.
It's also possible to accept a call with some delay. This is
useful for an answering machine if you want to fetch a call
with your phone before your computer answers it.
delay: delay in seconds _before_ connection will be established
(default: 0=immediate connect)
"""
_capisuite.connect_voice(self._handle, delay)
def audio_receive(self, filename, timeout, silence_timeout=0,
exit_DTMF=0):
"""
Receive an audio file in a speech mode connection.
This functions receives an audio file. It can recognize
silence in the signal and timeout after a given period of
silence, after a general timeout or after the reception of a
DTMF signal.
If the recording was finished because of silence_timeout, the
silence will be truncated away.
If DTMF abort is enabled, the command will also abort
immediately if DTMF was received before it is called. This
allows to abort subsequent audio receive and send commands
with one DTMF signal w/o the need to check for received DTMF
after each command.
The connction must be in audio mode (by connect_voice()),
otherwise an exception will be caused.
The created file will be saved in bit-reversed A-Law format, 8
kHz mono. Use sox to convert it to a normal wav file.
filename: where to save the received message.
timeout: receive length in seconds (-1 = infinite).
silence_timeout: abort after x seconds of silence (default: no timeout)
exit_DTMF: abort sending when a DTMF signal is received (default: 0)
Returns duration of receiving in seconds.
"""
return _capisuite.audio_receive(self._handle, filename, timeout,
silence_timeout, exit_DTMF)
def audio_send(self, filename, exit_DTMF=0):
"""
Send an audio file in a speech mode connection.
This function sends an audio file, which must be in
bit-inversed A-Law format. Thus files can be created with eg.
sox using the suffix ".la". It supports abortion if DTMF
signal is received.
If DTMF abort is enabled, the command will also abort
immediately if DTMF was received before it is called. That
allows you to abort subsequent audio receive and send commands
with one DTMF signal w/o the need to check for received DTMF
after each command.
The connction must be in audio mode (use connect_voice()),
otherwise an exception will be caused.
filename: file to send
exit_DTMF: abort sending when a DTMF signal is received (default: 0)
Returns duration of send in seconds.
"""
return _capisuite.audio_send(self._handle, filename, exit_DTMF)
def switch_to_faxG3(self, faxStationID, faxHeadline):
"""
Switch a connection from voice mode to fax mode.
This will switch from voice mode to fax group 3 after you have
connected, so you can use the fax commands afterwards.
Attention: Not all ISDN cards or CAPI driver support this
command.
faxStationID: the station ID to use (string)
faxHeadline: the fax headline to use (string)
Returns a FaxInfo instance.
"""
faxInfo = _capisuite.switch_to_faxG3(self._handle,
faxStationID, faxHeadline)
self.service = SERVICE_FAXG3
log('faxinfo: %s' % repr(faxInfo), 3)
if not faxInfo:
return FaxInfo()
return FaxInfo(*faxInfo)
###--- fax calls --###
def connect_faxG3(self, faxStationID, faxHeadline, delay=0):
"""
Accept an incoming call and connect with fax (analog, group 3) service.
This will accept an incoming call and choose fax group 3 as
service, so you can use the fax commands (like fax_receive())
with this connection. After this command has finished, the
call is connected successfully.
It's also possible to accept a call with some delay. This is
useful if eg. you want to have to fetch a call with your phone
before your computer answers it.
faxStationID: the station ID to use (string)
faxHeadline: the fax headline to use (string)
delay: delay in seconds _before_ connection will be established
(default: 0=immediate connect)
Returns a FaxInfo instance.
"""
faxInfo = _capisuite.connect_faxG3(self._handle, faxStationID,
faxHeadline, delay)
log('faxinfo: %s' % repr(faxInfo), 3)
if not faxInfo:
return FaxInfo()
return FaxInfo(*faxInfo)
def fax_send(self, faxfilename):
"""
Send a fax in a fax mode connection.
This command sends an analog fax (fax group 3). It starts the
send and waits for the end of the connection. So it should be
the last command before disconnect().
The connction must be in fax mode (use capi.call_faxG3() or
call.switch_to_faxG3()), otherwise an exception will be caused.
The file to be sent must be in the Structured Fax File (SFF)
format.
faxfilename: file to send
"""
faxInfo = _capisuite.fax_send(self._handle, faxfilename)
log('faxinfo: %s' % repr(faxInfo), 3)
if not faxInfo:
return FaxInfo()
return FaxInfo(*faxInfo)
def fax_receive(self, filename):
"""
Receive a fax in a fax mode connection.
This command receives an analog fax (fax group 3). It starts
the reception and waits for the end of the connection. So it
should be the last command before disconnect().
The connction must be in fax mode (use capi.call_faxG3() or
call.switch_to_faxG3()), otherwise an exception will be caused.
The created file will be saved in the Structured Fax File
(SFF) format.
filename: where to save the received fax.
"""
faxInfo = _capisuite.fax_receive(self._handle, filename)
log('faxinfo: %s' % repr(faxInfo), 3)
if not faxInfo:
return FaxInfo()
return FaxInfo(*faxInfo)
class FaxInfo:
def __init__(self, stationID='', rate=0, hiRes=0, format=0, numPages=0):
self.stationID = stationID
self.bitRate = rate
self.hiRes = hiRes
self.resolution = hiRes and "hiRes" or "loRes"
# cff: color fax; sff: normal black-and-white fax
self.format = format and 'cff' or 'sff'
self.color = format and 'color' or 'b&w'
self.numPages = numPages
def as_dict(self):
d = {}
for a in ('stationID', 'bitRate', 'resolution',
'hiRes', 'format', 'color', 'numPages'):
d[a] = getattr(self, a)
return d
# implemented in _capisuite:
#
#def error(...):
# pass
#def log(...):
# pass
|
larsimmisch/capisuite
|
src/capisuite-py/core.py
|
Python
|
gpl-2.0
| 16,528
|
# -*- coding: utf-8 -*-
"""
brickv (Brick Viewer)
Copyright (C) 2011-2015 Olaf Lüke <olaf@tinkerforge.com>
Copyright (C) 2012 Bastian Nordmeyer <bastian@tinkerforge.com>
Copyright (C) 2012-2015 Matthias Bolte <matthias@tinkerforge.com>
flashing.py: GUI for flashing features
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from brickv.ui_flashing import Ui_Flashing
from brickv.bindings.brick_master import BrickMaster
from brickv.bindings.ip_connection import IPConnection, Error, base58encode, \
base58decode, BASE58, uid64_to_uid32
from brickv.imu_calibration import parse_imu_calibration, IMU_CALIBRATION_URL
from PyQt4.QtCore import Qt, QTimer
from PyQt4.QtGui import QApplication, QColor, QDialog, QMessageBox, \
QProgressDialog, QStandardItemModel, QStandardItem, QBrush
from brickv.samba import SAMBA, SAMBAException, SAMBARebootError, get_serial_ports
from brickv.infos import get_version_string
from brickv.utils import get_main_window, get_home_path, get_open_file_name, \
get_modeless_dialog_flags
from brickv.esp_flash import ESPROM
from brickv import infos
import zipfile
import os
import urllib2
import time
import struct
import math
import traceback
from serial import SerialException
LATEST_VERSIONS_URL = 'http://download.tinkerforge.com/latest_versions.txt'
FIRMWARE_URL = 'http://download.tinkerforge.com/firmwares/'
SELECT = 'Select...'
CUSTOM = 'Custom...'
NO_BRICK = 'No Brick found'
NO_EXTENSION = 'No Extension found'
NO_BOOTLOADER = 'No Brick in Bootloader found'
def error_to_name(e):
if e.value == Error.TIMEOUT:
return 'Timeout'
elif e.value == Error.NOT_CONNECTED:
return 'No TCP/IP connection'
elif e.value == Error.INVALID_PARAMETER:
return 'Invalid parameter'
elif e.value == Error.NOT_SUPPORTED:
return 'Not supported'
else:
return e.message
class ProgressWrapper(object):
def __init__(self, progress):
self.progress = progress
def reset(self, title, length):
self.progress.setLabelText(title)
self.progress.setMaximum(length)
self.progress.setValue(0)
self.progress.show()
def update(self, value):
self.progress.setValue(value)
QApplication.processEvents()
def cancel(self):
self.progress.cancel()
def setMaximum(self, value):
self.progress.setMaximum(value)
class FlashingWindow(QDialog, Ui_Flashing):
def __init__(self, parent):
QDialog.__init__(self, parent, get_modeless_dialog_flags())
self.setupUi(self)
self.tool_infos = {}
self.firmware_infos = {}
self.plugin_infos = {}
self.brick_infos = []
self.extension_infos = []
self.refresh_updates_pending = False
self.parent = parent
self.tab_widget.currentChanged.connect(self.tab_changed)
self.button_serial_port_refresh.clicked.connect(self.refresh_serial_ports)
self.combo_firmware.currentIndexChanged.connect(self.firmware_changed)
self.button_firmware_save.clicked.connect(self.firmware_save_clicked)
self.button_firmware_browse.clicked.connect(self.firmware_browse_clicked)
self.button_uid_load.clicked.connect(self.uid_load_clicked)
self.button_uid_save.clicked.connect(self.uid_save_clicked)
self.combo_brick.currentIndexChanged.connect(self.brick_changed)
self.combo_port.currentIndexChanged.connect(self.port_changed)
self.combo_plugin.currentIndexChanged.connect(self.plugin_changed)
self.button_plugin_save.clicked.connect(self.plugin_save_clicked)
self.button_plugin_browse.clicked.connect(self.plugin_browse_clicked)
self.combo_extension.currentIndexChanged.connect(self.extension_changed)
self.combo_extension_firmware.currentIndexChanged.connect(self.extension_firmware_changed)
self.button_extension_firmware_save.clicked.connect(self.extension_firmware_save_clicked)
self.button_extension_firmware_browse.clicked.connect(self.extension_firmware_browse_clicked)
infos.get_infos_changed_signal().connect(self.update_bricks)
self.label_update_tool.hide()
self.label_no_update_connection.hide()
self.label_no_firmware_connection.hide()
self.label_no_plugin_connection.hide()
self.label_no_extension_firmware_connection.hide()
self.refresh_serial_ports()
self.combo_firmware.addItem(CUSTOM)
self.combo_firmware.setDisabled(True)
self.firmware_changed(0)
self.combo_plugin.addItem(CUSTOM)
self.combo_plugin.setDisabled(True)
self.plugin_changed(0)
self.combo_extension_firmware.addItem(CUSTOM)
self.combo_extension_firmware.setDisabled(True)
self.extension_firmware_changed(0)
self.brick_changed(0)
self.extension_changed(0)
self.update_tree_view_model_labels = ['Name', 'UID', 'Installed', 'Latest']
self.update_tree_view_model = QStandardItemModel(self)
self.update_tree_view.setModel(self.update_tree_view_model)
self.update_tree_view.setSortingEnabled(True)
self.update_tree_view.header().setSortIndicator(0, Qt.AscendingOrder)
self.update_button_refresh.clicked.connect(self.refresh_updates_clicked)
self.update_button_bricklets.clicked.connect(self.auto_update_bricklets_clicked)
self.update_ui_state()
self.update_bricks()
self.update_extensions()
def refresh_latest_version_info(self, progress):
self.tool_infos = {}
self.firmware_infos = {}
self.plugin_infos = {}
self.combo_firmware.clear()
self.combo_plugin.clear()
self.combo_firmware.setDisabled(False)
self.combo_plugin.setDisabled(False)
progress.setLabelText('Discovering latest versions on tinkerforge.com')
progress.setMaximum(0)
progress.setValue(0)
progress.show()
okay = True
try:
response = urllib2.urlopen(LATEST_VERSIONS_URL, timeout=10)
latest_versions_data = response.read()
response.close()
except urllib2.URLError:
okay = False
progress.cancel()
self.combo_firmware.setDisabled(True)
self.combo_plugin.setDisabled(True)
self.popup_fail('Updates / Flashing', 'Latest version information on tinkerforge.com is not available (error code 1). Please report this to info@tinkerforge.com.\n\nFirmwares and plugins can be flashed from local files only.')
if okay:
def report_malformed(error_code):
progress.cancel()
self.combo_firmware.setDisabled(True)
self.combo_plugin.setDisabled(True)
self.popup_fail('Updates / Flashing', 'Latest version information on tinkerforge.com is malformed (error code {0}). Please report this to info@tinkerforge.com.\n\nFirmwares and plugins can be flashed from local files only.'.format(error_code))
for line in latest_versions_data.split('\n'):
line = line.strip()
if len(line) < 1:
continue
parts = line.split(':')
if len(parts) != 3:
okay = False
report_malformed(2)
break
latest_version_parts = parts[2].split('.')
if len(latest_version_parts) != 3:
okay = False
report_malformed(3)
break
try:
latest_version = int(latest_version_parts[0]), int(latest_version_parts[1]), int(latest_version_parts[2])
except:
okay = False
report_malformed(4)
break
if parts[0] == 'tools':
tool_info = infos.ToolInfo()
tool_info.firmware_version_latest = latest_version
self.tool_infos[parts[1]] = tool_info
elif parts[0] == 'bricks':
self.refresh_firmware_info(parts[1], latest_version)
elif parts[0] == 'bricklets':
self.refresh_plugin_info(parts[1], latest_version)
if okay:
# update combo_firmware
if len(self.firmware_infos) > 0:
self.combo_firmware.addItem(SELECT)
self.combo_firmware.insertSeparator(self.combo_firmware.count())
for firmware_info in sorted(self.firmware_infos.values(), key=lambda x: x.name):
name = '{0} ({1}.{2}.{3})'.format(firmware_info.name, *firmware_info.firmware_version_latest)
self.combo_firmware.addItem(name, firmware_info.url_part)
if self.combo_firmware.count() > 0:
self.combo_firmware.insertSeparator(self.combo_firmware.count())
# update combo_plugin
if len(self.plugin_infos) > 0:
self.combo_plugin.addItem(SELECT)
self.combo_plugin.insertSeparator(self.combo_plugin.count())
for plugin_info in sorted(self.plugin_infos.values(), key=lambda x: x.name):
name = '{0} ({1}.{2}.{3})'.format(plugin_info.name, *plugin_info.firmware_version_latest)
self.combo_plugin.addItem(name, plugin_info.url_part)
if self.combo_plugin.count() > 0:
self.combo_plugin.insertSeparator(self.combo_plugin.count())
self.combo_firmware.addItem(CUSTOM)
self.firmware_changed(0)
self.combo_plugin.addItem(CUSTOM)
self.plugin_changed(0)
self.update_ui_state()
def refresh_firmware_info(self, url_part, latest_version):
name = url_part
if name.endswith('_v2'):
name = name.replace('_v2', '_2.0')
if name in ['dc', 'imu', 'imu_2.0']:
name = name.upper()
words = name.split('_')
parts = []
for word in words:
parts.append(word[0].upper() + word[1:])
name = ' '.join(parts)
firmware_info = infos.FirmwareInfo()
firmware_info.name = name
firmware_info.url_part = url_part
firmware_info.firmware_version_latest = latest_version
self.firmware_infos[url_part] = firmware_info
def refresh_plugin_info(self, url_part, latest_version):
name = url_part
if name.endswith('_v2'):
name = name.replace('_v2', '_2.0')
if name in ['gps', 'ptc', 'rs232']:
name = name.upper()
elif name.startswith('lcd_'):
name = name.replace('lcd_', 'LCD_')
if url_part.startswith('lcd_20x4_'):
name = name.replace('_v11', '_1.1').replace('_v12', '_1.2')
elif name.startswith('io'):
name = name.replace('io', 'IO-')
elif name.endswith('_ir'):
name = name.replace('_ir', '_IR')
elif name.endswith('_us'):
name = name.replace('_us', '_US')
elif name.startswith('led_'):
name = name.replace('led_', 'LED_')
words = name.split('_')
parts = []
for word in words:
parts.append(word[0].upper() + word[1:])
name = ' '.join(parts)
name = name.replace('Voltage Current', 'Voltage/Current')
name = name.replace('Nfc Rfid', 'NFC/RFID')
name = name.replace('0 20ma', '0-20mA')
plugin_info = infos.PluginInfo()
plugin_info.name = name
plugin_info.url_part = url_part
plugin_info.firmware_version_latest = latest_version
self.plugin_infos[url_part] = plugin_info
def update_bricks(self):
self.brick_infos = []
self.combo_brick.clear()
items = {}
for info in infos.get_brick_infos():
items[info.get_combo_item()] = info
for item in sorted(items.keys()):
self.brick_infos.append(items[item])
self.combo_brick.addItem(item)
if self.combo_brick.count() == 0:
self.combo_brick.addItem(NO_BRICK)
self.update_ui_state()
def create_progress_bar(self, title):
progress = QProgressDialog(self)
progress.setAutoClose(False)
progress.setWindowTitle(title)
progress.setCancelButton(None)
progress.setWindowModality(Qt.WindowModal)
return progress
def popup_ok(self, title, message):
QMessageBox.information(self, title, message, QMessageBox.Ok)
def popup_fail(self, title, message):
QMessageBox.critical(self, title, message, QMessageBox.Ok)
def refresh_serial_ports(self):
progress = self.create_progress_bar('Discovering')
current_text = self.combo_serial_port.currentText()
self.combo_serial_port.clear()
try:
progress.setLabelText('Discovering serial ports')
progress.setMaximum(0)
progress.setValue(0)
progress.show()
ports = get_serial_ports()
except:
progress.cancel()
self.combo_serial_port.addItem(NO_BOOTLOADER)
self.update_ui_state()
self.popup_fail('Brick', 'Could not discover serial ports')
else:
preferred_index = None
for port in ports:
if preferred_index is None:
if 'ttyACM' in port[0] or \
'ttyUSB' in port[0] or \
'usbmodemfd' in port[0] or \
'AT91 USB to Serial Converter' in port[1] or \
'GPS Camera Detect' in port[1] or \
'Bossa Program Port' in port[1]:
preferred_index = self.combo_serial_port.count()
if len(port[1]) > 0 and port[0] != port[1]:
self.combo_serial_port.addItem(u'{0} - {1}'.format(port[0], port[1]), port[0])
else:
self.combo_serial_port.addItem(port[0], port[0])
if self.combo_serial_port.count() == 0:
self.combo_serial_port.addItem(NO_BOOTLOADER)
elif preferred_index is not None:
self.combo_serial_port.setCurrentIndex(preferred_index)
else:
index = self.combo_serial_port.findText(current_text)
if index >= 0:
self.combo_serial_port.setCurrentIndex(index)
self.update_ui_state()
progress.cancel()
def update_ui_state(self):
is_firmware_select = self.combo_firmware.currentText() == SELECT
is_firmware_custom = self.combo_firmware.currentText() == CUSTOM
is_no_bootloader = self.combo_serial_port.currentText() == NO_BOOTLOADER
has_bricklet_ports = self.combo_port.count() > 0
self.combo_serial_port.setEnabled(not is_no_bootloader)
self.combo_port.setEnabled(has_bricklet_ports)
self.combo_plugin.setEnabled(has_bricklet_ports and self.combo_plugin.count() > 1)
self.button_firmware_save.setEnabled(not is_firmware_select and not is_no_bootloader)
self.edit_custom_firmware.setEnabled(is_firmware_custom)
self.button_firmware_browse.setEnabled(is_firmware_custom)
self.edit_uid.setEnabled(has_bricklet_ports)
self.button_uid_load.setEnabled(has_bricklet_ports)
self.button_uid_save.setEnabled(has_bricklet_ports)
is_plugin_select = self.combo_plugin.currentText() == SELECT
is_plugin_custom = self.combo_plugin.currentText() == CUSTOM
is_no_brick = self.combo_brick.currentText() == NO_BRICK
self.combo_brick.setEnabled(not is_no_brick)
self.button_plugin_save.setEnabled(not is_plugin_select and not is_no_brick)
self.edit_custom_plugin.setEnabled(is_plugin_custom)
self.button_plugin_browse.setEnabled(is_plugin_custom)
is_extension_firmware_select = self.combo_extension_firmware.currentText() == SELECT
is_extension_firmware_custom = self.combo_extension_firmware.currentText() == CUSTOM
is_no_extension = self.combo_extension.currentText() == NO_EXTENSION
self.combo_extension.setEnabled(not is_no_extension)
self.button_extension_firmware_save.setEnabled(not is_extension_firmware_select and not is_no_extension)
self.edit_custom_extension_firmware.setEnabled(is_extension_firmware_custom)
self.button_extension_firmware_browse.setEnabled(is_extension_firmware_custom)
self.tab_widget.setTabEnabled(2, len(self.brick_infos) > 0)
self.tab_widget.setTabEnabled(3, len(self.extension_infos) > 0)
def firmware_changed(self, index):
self.update_ui_state()
def firmware_browse_clicked(self):
if len(self.edit_custom_firmware.text()) > 0:
last_dir = os.path.dirname(os.path.realpath(self.edit_custom_firmware.text()))
else:
last_dir = get_home_path()
filename = get_open_file_name(get_main_window(), 'Open Firmware', last_dir, '*.bin')
if len(filename) > 0:
self.edit_custom_firmware.setText(filename)
def firmware_save_clicked(self):
port_name = self.combo_serial_port.itemData(self.combo_serial_port.currentIndex())
try:
samba = SAMBA(port_name)
except SAMBAException as e:
self.refresh_serial_ports()
self.popup_fail('Brick', 'Could not connect to Brick: {0}'.format(str(e)))
return
except SerialException as e:
self.refresh_serial_ports()
self.popup_fail('Brick', str(e)[0].upper() + str(e)[1:])
return
except:
self.refresh_serial_ports()
self.popup_fail('Brick', 'Could not connect to Brick')
return
progress = ProgressWrapper(self.create_progress_bar('Flashing'))
samba.progress = progress
current_text = self.combo_firmware.currentText()
# Get firmware
name = None
version = None
if current_text == SELECT:
return
elif current_text == CUSTOM:
firmware_file_name = self.edit_custom_firmware.text()
try:
with open(firmware_file_name, 'rb') as f:
firmware = f.read()
except IOError:
progress.cancel()
self.popup_fail('Brick', 'Could not read firmware file')
return
else:
url_part = self.combo_firmware.itemData(self.combo_firmware.currentIndex())
name = self.firmware_infos[url_part].name
version = self.firmware_infos[url_part].firmware_version_latest
progress.reset('Downloading {0} Brick firmware {1}.{2}.{3}'.format(name, *version), 0)
response = None
try:
response = urllib2.urlopen(FIRMWARE_URL + 'bricks/{0}/brick_{0}_firmware_{1}_{2}_{3}.bin'.format(url_part, *version), timeout=10)
except urllib2.URLError:
pass
beta = 5
while response is None and beta > 0:
try:
response = urllib2.urlopen(FIRMWARE_URL + 'bricks/{0}/brick_{0}_firmware_{2}_{3}_{4}_beta{1}.bin'.format(url_part, beta, *version), timeout=10)
except urllib2.URLError:
beta -= 1
if response is None:
progress.cancel()
self.popup_fail('Brick', 'Could not download {0} Brick firmware {1}.{2}.{3}'.format(name, *version))
return
try:
length = int(response.headers['Content-Length'])
progress.setMaximum(length)
progress.update(0)
QApplication.processEvents()
firmware = ''
chunk = response.read(1024)
while len(chunk) > 0:
firmware += chunk
progress.update(len(firmware))
chunk = response.read(1024)
response.close()
except urllib2.URLError:
progress.cancel()
self.popup_fail('Brick', 'Could not download {0} Brick firmware {1}.{2}.{3}'.format(name, *version))
return
# Get IMU UID
imu_uid = None
imu_calibration = None
lock_imu_calibration_pages = False
if name == 'IMU':
# IMU 1.0.9 and earlier have a bug in their flash locking that makes
# them unlook the wrong pages. Therefore, the calibration pages
# must not be locked for this versions
if version[1] > 0 or (version[1] == 0 and version[2] > 9):
lock_imu_calibration_pages = True
try:
imu_uid = base58encode(uid64_to_uid32(samba.read_uid64()))
except SerialException as e:
progress.cancel()
self.popup_fail('Brick', 'Could read UID of IMU Brick: {0}'.format(str(e)))
return
except:
progress.cancel()
self.popup_fail('Brick', 'Could read UID of IMU Brick')
return
result = QMessageBox.question(self, 'IMU Brick',
'Restore factory calibration for IMU Brick [{0}] from tinkerforge.com?'.format(imu_uid),
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
# Download IMU calibration
if result == QMessageBox.Yes:
progress.reset('Downloading factory calibration for IMU Brick', 0)
try:
imu_calibration_text = ''
response = urllib2.urlopen(IMU_CALIBRATION_URL + '{0}.txt'.format(imu_uid), timeout=10)
chunk = response.read(1024)
while len(chunk) > 0:
imu_calibration_text += chunk
chunk = response.read(1024)
response.close()
except urllib2.HTTPError as e:
if e.code == 404:
imu_calibration_text = None
self.popup_ok('IMU Brick', 'No factory calibration for IMU Brick [{0}] available'.format(imu_uid))
else:
progress.cancel()
self.popup_fail('IMU Brick', 'Could not download factory calibration for IMU Brick [{0}]'.format(imu_uid))
return
except urllib2.URLError:
progress.cancel()
self.popup_fail('IMU Brick', 'Could not download factory calibration for IMU Brick [{0}]'.format(imu_uid))
return
if imu_calibration_text is not None:
if len(imu_calibration_text) == 0:
progress.cancel()
self.popup_fail('IMU Brick', 'Could not download factory calibration for IMU Brick [{0}]'.format(imu_uid))
return
try:
imu_calibration_matrix = parse_imu_calibration(imu_calibration_text)
# Ensure proper temperature relation
if imu_calibration_matrix[5][1][7] <= imu_calibration_matrix[5][1][3]:
imu_calibration_matrix[5][1][7] = imu_calibration_matrix[5][1][3] + 1
imu_calibration_array = imu_calibration_matrix[0][1][:6] + \
imu_calibration_matrix[1][1][:3] + \
imu_calibration_matrix[2][1][:6] + \
imu_calibration_matrix[3][1][:3] + \
imu_calibration_matrix[4][1][:6] + \
imu_calibration_matrix[5][1][:8]
imu_calibration = struct.pack('<32h', *imu_calibration_array)
except:
progress.cancel()
self.popup_fail('IMU Brick', 'Could not parse factory calibration for IMU Brick [{0}]'.format(imu_uid))
return
# Flash firmware
def report_result(reboot_okay):
if current_text == CUSTOM:
if reboot_okay:
message = 'Successfully restarted Brick!'
else:
message = 'Manual restart of Brick required!'
else:
if reboot_okay:
message = 'Successfully restarted {0} Brick!'.format(name)
else:
message = 'Manual restart of {0} Brick required!'.format(name)
if current_text == CUSTOM:
self.popup_ok('Brick', 'Successfully flashed firmware.\n' + message)
elif imu_calibration is not None:
self.popup_ok('Brick', 'Successfully flashed {0} Brick firmware {1}.{2}.{3}.\n'.format(name, *version) +
'Successfully restored factory calibration.\n' + message)
else:
self.popup_ok('Brick', 'Successfully flashed {0} Brick firmware {1}.{2}.{3}.\n'.format(name, *version) +
message)
try:
samba.flash(firmware, imu_calibration, lock_imu_calibration_pages)
# close serial device before showing dialog, otherwise exchanging
# the brick while the dialog is open will force it to show up as ttyACM1
samba = None
progress.cancel()
report_result(True)
except SAMBARebootError as e:
samba = None
progress.cancel()
self.refresh_serial_ports()
report_result(False)
except SAMBAException as e:
samba = None
progress.cancel()
self.refresh_serial_ports()
self.popup_fail('Brick', 'Could not flash Brick: {0}'.format(str(e)))
except SerialException as e:
samba = None
progress.cancel()
self.refresh_serial_ports()
self.popup_fail('Brick', 'Could not flash Brick: {0}'.format(str(e)))
except:
samba = None
progress.cancel()
self.refresh_serial_ports()
self.popup_fail('Brick', 'Could not flash Brick')
def uid_save_clicked(self):
device, port = self.current_device_and_port()
uid = self.edit_uid.text()
if len(uid) == 0:
self.popup_fail('Bricklet', 'UID cannot be empty')
return
for c in uid:
if c not in BASE58:
self.popup_fail('Bricklet', "UID cannot contain '{0}'".format(c))
return
try:
if base58decode(uid) > 0xFFFFFFFF:
self.popup_fail('Bricklet', 'UID is too long')
return
except:
self.popup_fail('Bricklet', 'UID is invalid')
return
try:
self.parent.ipcon.write_bricklet_uid(device, port, uid)
except Error as e:
self.popup_fail('Bricklet', 'Could not write UID: ' + error_to_name(e))
return
try:
uid_read = self.parent.ipcon.read_bricklet_uid(device, port)
except Error as e:
self.popup_fail('Bricklet', 'Could not read written UID: ' + error_to_name(e))
return
if uid == uid_read:
self.popup_ok('Bricklet', 'Successfully wrote UID.\nNew UID will be used after reset of the connected Brick.')
else:
self.popup_fail('Bricklet', 'Could not write UID: Verification failed')
def uid_load_clicked(self):
device, port = self.current_device_and_port()
try:
uid = self.parent.ipcon.read_bricklet_uid(device, port)
except Error as e:
self.edit_uid.setText('')
self.popup_fail('Bricklet', 'Could not read UID: ' + error_to_name(e))
return
self.edit_uid.setText(uid)
def brick_changed(self, index):
self.combo_port.clear()
if index < 0 or len(self.brick_infos) == 0:
self.combo_port.addItems(['A', 'B', 'C', 'D'])
return
brick_info = self.brick_infos[index]
first_index = None
for key in sorted(brick_info.bricklets.keys()):
bricklet_info = brick_info.bricklets[key]
if bricklet_info is None:
self.combo_port.addItem(key.upper())
else:
if first_index == None:
first_index = self.combo_port.count()
name = '{0}: {1}'.format(key.upper(), bricklet_info.get_combo_item())
self.combo_port.addItem(name, bricklet_info.url_part)
if first_index != None:
self.combo_port.setCurrentIndex(first_index)
self.update_ui_state()
def port_changed(self, index):
self.edit_uid.setText('')
if index < 0:
self.combo_plugin.setCurrentIndex(0)
return
url_part = self.combo_port.itemData(index)
if url_part == None or len(url_part) == 0:
self.combo_plugin.setCurrentIndex(0)
return
i = self.combo_plugin.findData(url_part)
if i < 0:
self.combo_plugin.setCurrentIndex(0)
else:
self.combo_plugin.setCurrentIndex(i)
b = self.combo_brick.currentIndex()
p = self.combo_port.currentIndex()
if b < 0 or p < 0:
return
self.edit_uid.setText(self.brick_infos[b].bricklets[('a', 'b', 'c', 'd')[p]].uid)
def plugin_changed(self, index):
self.update_ui_state()
def download_bricklet_plugin(self, progress, url_part, name, version, popup=False):
progress.setLabelText('Downloading {0} Bricklet plugin {1}.{2}.{3}'.format(name, *version))
progress.setMaximum(0)
progress.show()
response = None
try:
response = urllib2.urlopen(FIRMWARE_URL + 'bricklets/{0}/bricklet_{0}_firmware_{1}_{2}_{3}.bin'.format(url_part, *version), timeout=10)
except urllib2.URLError:
pass
beta = 5
while response is None and beta > 0:
try:
response = urllib2.urlopen(FIRMWARE_URL + 'bricklets/{0}/bricklet_{0}_firmware_{2}_{3}_{4}_beta{1}.bin'.format(url_part, beta, *version), timeout=10)
except urllib2.URLError:
beta -= 1
if response is None:
progress.cancel()
if popup:
self.popup_fail('Bricklet', 'Could not download {0} Bricklet plugin {1}.{2}.{3}'.format(name, *version))
return None
try:
length = int(response.headers['Content-Length'])
progress.setMaximum(length)
progress.setValue(0)
QApplication.processEvents()
plugin = []
chunk = response.read(256)
while len(chunk) > 0:
plugin += map(ord, chunk) # Convert plugin to list of bytes
progress.setValue(len(plugin))
chunk = response.read(256)
response.close()
except urllib2.URLError:
progress.cancel()
if popup:
self.popup_fail('Bricklet', 'Could not download {0} Bricklet plugin {1}.{2}.{3}'.format(name, *version))
return None
return plugin
def write_bricklet_plugin(self, plugin, device, port, name, progress, popup=True):
# Write
progress.setLabelText('Writing plugin: ' + name)
progress.setMaximum(0)
progress.setValue(0)
progress.show()
plugin_chunks = []
offset = 0
while offset < len(plugin):
chunk = plugin[offset:offset + IPConnection.PLUGIN_CHUNK_SIZE]
if len(chunk) < IPConnection.PLUGIN_CHUNK_SIZE:
chunk += [0] * (IPConnection.PLUGIN_CHUNK_SIZE - len(chunk))
plugin_chunks.append(chunk)
offset += IPConnection.PLUGIN_CHUNK_SIZE
progress.setMaximum(len(plugin_chunks))
position = 0
for chunk in plugin_chunks:
try:
self.parent.ipcon.write_bricklet_plugin(device, port, position, chunk)
except Error as e:
progress.cancel()
if popup:
self.popup_fail('Bricklet', 'Could not write Bricklet plugin: ' + error_to_name(e))
return False
position += 1
progress.setValue(position)
time.sleep(0.015)
QApplication.processEvents()
time.sleep(0.1)
# Verify
progress.setLabelText('Verifying written plugin: ' + name)
progress.setMaximum(len(plugin_chunks))
progress.setValue(0)
progress.show()
time.sleep(0.1)
position = 0
for chunk in plugin_chunks:
try:
read_chunk = list(self.parent.ipcon.read_bricklet_plugin(device, port, position))
except Error as e:
progress.cancel()
if popup:
self.popup_fail('Bricklet', 'Could not read Bricklet plugin back for verification: ' + error_to_name(e))
return False
if read_chunk != chunk:
progress.cancel()
if popup:
self.popup_fail('Bricklet', 'Could not flash Bricklet plugin: Verification error')
return False
position += 1
progress.setValue(position)
time.sleep(0.015)
QApplication.processEvents()
return True
def plugin_save_clicked(self):
progress = self.create_progress_bar('Flashing')
current_text = self.combo_plugin.currentText()
# Get plugin
if current_text == SELECT:
return
elif current_text == CUSTOM:
plugin_file_name = self.edit_custom_plugin.text()
try:
with open(plugin_file_name, 'rb') as f:
plugin = map(ord, f.read()) # Convert plugin to list of bytes
except IOError:
progress.cancel()
self.popup_fail('Bricklet', 'Could not read plugin file')
return
else:
url_part = self.combo_plugin.itemData(self.combo_plugin.currentIndex())
name = self.plugin_infos[url_part].name
version = self.plugin_infos[url_part].firmware_version_latest
plugin = self.download_bricklet_plugin(progress, url_part, name, version)
if not plugin:
return
# Flash plugin
device, port = self.current_device_and_port()
if current_text == CUSTOM:
if not self.write_bricklet_plugin(plugin, device, port, os.path.split(plugin_file_name)[-1], progress):
return
else:
if not self.write_bricklet_plugin(plugin, device, port, name, progress):
return
progress.cancel()
if current_text == CUSTOM:
self.popup_ok('Bricklet', 'Successfully flashed plugin.\nNew plugin will be used after reset of the connected Brick.')
else:
self.popup_ok('Bricklet', 'Successfully flashed {0} Bricklet plugin {1}.{2}.{3}.\nNew plugin will be used after reset of the connected Brick.'.format(name, *version))
def current_device_and_port(self):
port_names = ['a', 'b', 'c', 'd']
return (self.current_device(),
port_names[self.combo_port.currentIndex()])
def current_device(self):
try:
return self.brick_infos[self.combo_brick.currentIndex()].plugin.device
except:
return None
def plugin_browse_clicked(self):
last_dir = get_home_path()
if len(self.edit_custom_plugin.text()) > 0:
last_dir = os.path.dirname(os.path.realpath(self.edit_custom_plugin.text()))
filename = get_open_file_name(get_main_window(), 'Open Plugin', last_dir, '*.bin')
if len(filename) > 0:
self.edit_custom_plugin.setText(filename)
def auto_update_bricklets_clicked(self):
def brick_for_bricklet(bricklet):
for device_info in infos.get_brick_infos():
if bricklet.position in device_info.bricklets and \
device_info.bricklets[bricklet.position] == bricklet:
return device_info
progress = self.create_progress_bar('Auto-Updating Bricklets')
bricks_to_reset = set()
for device_info in infos.get_device_infos():
if device_info.type == 'bricklet':
if device_info.protocol_version == 2 and device_info.firmware_version_installed < device_info.firmware_version_latest:
plugin = self.download_bricklet_plugin(progress, device_info.url_part, device_info.name, device_info.firmware_version_latest)
if not plugin:
progress.cancel()
self.refresh_updates_clicked()
return
brick = brick_for_bricklet(device_info)
if self.write_bricklet_plugin(plugin, brick.plugin.device, device_info.position, device_info.name, progress):
bricks_to_reset.add(brick)
else:
progress.cancel()
self.refresh_updates_clicked()
return
elif device_info.type == 'brick':
for port in device_info.bricklets:
if not device_info.bricklets[port]:
continue
if device_info.bricklets[port].protocol_version == 1 and \
device_info.bricklets[port].firmware_version_installed < device_info.bricklets[port].firmware_version_latest:
plugin = self.download_bricklet_plugin(progress, device_info.bricklets[port].url_part,
device_info.bricklets[port].name,
device_info.bricklets[port].firmware_version_latest)
if not plugin:
progress.cancel()
self.refresh_updates_clicked()
return
brick = brick_for_bricklet(device_info.bricklets[port])
if self.write_bricklet_plugin(plugin, brick.plugin.device, port, device_info.bricklets[port].name, progress):
bricks_to_reset.add(brick)
else:
progress.cancel()
self.refresh_updates_clicked()
return
for brick in bricks_to_reset:
try:
brick.plugin.device.reset()
except:
pass
progress.setLabelText('Waiting for Bricks to reset')
progress.setMaximum(400)
progress.setValue(0)
for i in range(400):
time.sleep(0.03)
progress.setValue(i)
progress.cancel()
def tab_changed(self, i):
if i == 0 and self.refresh_updates_pending:
self.refresh_updates_clicked()
elif i == 2:
self.brick_changed(self.combo_brick.currentIndex())
self.port_changed(self.combo_port.currentIndex())
elif i == 3:
self.extension_changed(self.combo_extension.currentIndex())
def refresh_updates_clicked(self):
if self.tab_widget.currentIndex() != 0:
self.refresh_updates_pending = True
return
self.update_button_refresh.setDisabled(True)
self.refresh_updates_pending = False
url_part_proto1_map = {
# 'name': 'url_part'
'Ambient Light Bricklet': 'ambient_light',
'Analog In Bricklet': 'analog_in',
'Analog Out Bricklet': 'analog_out',
'Barometer Bricklet': 'barometer',
'Current12 Bricklet': 'current12',
'Current25 Bricklet': 'current25',
'Distance IR Bricklet': 'distance_ir',
'Dual Relay Bricklet': 'dual_relay',
'GPS Bricklet': 'gps',
'Humidity Bricklet': 'humidity',
'Industrial Digital In 4 Bricklet': 'industrial_digital_in_4',
'Industrial Digital Out 4 Bricklet': 'industrial_digital_out_4',
'Industrial Quad Relay Bricklet': 'industrial_quad_relay',
'IO-16 Bricklet': 'io16',
'IO-4 Bricklet': 'io4',
'Joystick Bricklet': 'joystick',
'LCD 16x2 Bricklet': 'lcd_16x2',
'LCD 20x4 Bricklet': 'lcd_20x4_v11',
'Linear Poti Bricklet': 'linear_poti',
'Piezo Buzzer Bricklet': 'piezo_buzzer',
'Rotary Poti Bricklet': 'rotary_poti',
'Temperature Bricklet': 'temperature',
'Temperature-IR Bricklet': 'temperature_ir',
'Voltage Bricklet': 'voltage',
'Voltage/Current Bricklet': 'voltage_current',
}
progress = self.create_progress_bar('Discovering')
try:
urllib2.urlopen("http://tinkerforge.com", timeout=10).read()
self.label_no_update_connection.hide()
self.label_no_firmware_connection.hide()
self.label_no_plugin_connection.hide()
except urllib2.URLError:
progress.cancel()
self.label_no_update_connection.show()
self.label_no_firmware_connection.show()
self.label_no_plugin_connection.show()
return
self.refresh_latest_version_info(progress)
def get_color_for_device(device):
if device.firmware_version_installed >= device.firmware_version_latest:
return None, False
if device.firmware_version_installed[0] <= 1:
return QBrush(Qt.red), True
return QBrush(QColor(255, 160, 55)), True
try:
infos.get_info(infos.UID_BRICKV).firmware_version_latest = self.tool_infos['brickv'].firmware_version_latest
except:
infos.get_info(infos.UID_BRICKV).firmware_version_latest = (0, 0, 0)
for device_info in infos.get_device_infos():
if device_info.type == 'brick':
try:
device_info.firmware_version_latest = self.firmware_infos[device_info.url_part].firmware_version_latest
except:
device_info.firmware_version_latest = (0, 0, 0)
elif device_info.type == 'bricklet':
try:
device_info.firmware_version_latest = self.plugin_infos[device_info.url_part].firmware_version_latest
except:
device_info.firmware_version_latest = (0, 0, 0)
progress.cancel()
self.update_tree_view_model.clear()
self.update_tree_view_model.setHorizontalHeaderLabels(self.update_tree_view_model_labels)
is_update = False
protocol1_errors = set()
items = []
for device_info in infos.get_infos():
if device_info.type == 'brick':
parent = [QStandardItem(device_info.name),
QStandardItem(device_info.uid),
QStandardItem(get_version_string(device_info.firmware_version_installed)),
QStandardItem(get_version_string(device_info.firmware_version_latest))]
color, update = get_color_for_device(device_info)
if update:
is_update = True
for item in parent:
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
item.setData(color, Qt.BackgroundRole)
parent[0].setData(device_info.uid, Qt.UserRole)
items.append(parent)
for port in device_info.bricklets:
if not device_info.bricklets[port] or device_info.bricklets[port].protocol_version == 1:
try:
protv, fw, name = device_info.plugin.device.get_protocol1_bricklet_name(port)
except:
protocol1_errors.add(device_info.uid)
child = [QStandardItem(port.upper() + ': Protocol 1.0 Bricklet with Error'),
QStandardItem(''),
QStandardItem(''),
QStandardItem('')]
for item in child:
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
item.setData(QBrush(Qt.magenta), Qt.BackgroundRole)
parent[0].appendRow(child)
continue
if protv == 1:
# Hack for LCD 20x4 Bricklet (name is not set early enough in firmware)
if fw == (1, 1, 1) and name == '':
name = 'LCD 20x4 Bricklet'
bricklet_info = infos.BrickletInfo()
bricklet_info.protocol_version = 1
bricklet_info.name = name
bricklet_info.position = port
bricklet_info.firmware_version_installed = tuple(fw)
device_info.bricklets[port] = bricklet_info
for key in url_part_proto1_map:
if key in device_info.bricklets[port].name:
bricklet_info.url_part = url_part_proto1_map[key]
break
try:
bricklet_info.firmware_version_latest = self.plugin_infos[bricklet_info.url_part].firmware_version_latest
except KeyError:
pass
if device_info.bricklets[port]:
child = [QStandardItem(port.upper() + ': ' + device_info.bricklets[port].name),
QStandardItem(device_info.bricklets[port].uid),
QStandardItem(get_version_string(device_info.bricklets[port].firmware_version_installed)),
QStandardItem(get_version_string(device_info.bricklets[port].firmware_version_latest))]
color, update = get_color_for_device(device_info.bricklets[port])
if update:
is_update = True
for item in child:
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
item.setData(color, Qt.BackgroundRole)
parent[0].appendRow(child)
elif device_info.type == 'tool' and 'Brick Viewer' in device_info.name:
parent = [QStandardItem(device_info.name),
QStandardItem(''),
QStandardItem(get_version_string(device_info.firmware_version_installed)),
QStandardItem(get_version_string(device_info.firmware_version_latest))]
color, update = get_color_for_device(device_info)
if update:
self.label_update_tool.show()
else:
self.label_update_tool.hide()
for item in parent:
item.setFlags(item.flags() & ~Qt.ItemIsEditable)
item.setData(color, Qt.BackgroundRole)
items.append(parent)
t = 0
if len(protocol1_errors) > 0:
# if there were protocol1 errors give the enumerate callback a
# chance to update the infos to have correct information to filter
# out false-positive protocol1 errors that were detected due to
# fast USB unplug
t = 200
QTimer.singleShot(t, lambda: self.refresh_updates_clicked_second_step(is_update, items, protocol1_errors))
def refresh_updates_clicked_second_step(self, is_update, items, protocol1_errors):
protocol1_error_still_there = False
# filter out false-positive protocol1 errors
for device_uid in protocol1_errors:
if infos.get_info(device_uid) != None:
protocol1_error_still_there = True
continue
for i in range(len(items)):
if items[i][0].data(Qt.UserRole) == device_uid:
del items[i]
break
for item in items:
self.update_tree_view_model.appendRow(item)
self.update_tree_view.expandAll()
self.update_tree_view.setColumnWidth(0, 260)
self.update_tree_view.setColumnWidth(1, 75)
self.update_tree_view.setColumnWidth(2, 75)
self.update_tree_view.setColumnWidth(3, 75)
self.update_tree_view.setSortingEnabled(True)
self.update_tree_view.header().setSortIndicator(0, Qt.AscendingOrder)
if is_update:
self.update_button_bricklets.setEnabled(True)
else:
self.update_button_bricklets.setEnabled(False)
self.brick_changed(self.combo_brick.currentIndex())
self.update_button_refresh.setDisabled(False)
if protocol1_error_still_there:
message = """
There was an error during the auto-detection of Bricklets with Protocol 1.0 plugins. Those cannot be updated automatically, but you can update them manually:
- Disconnect the affected Bricklets from their Brick and restart the Brick without the Bricklets.
- Ensure that the Brick shows up correctly.
- Connect the Bricklet to the Brick again, while the Brick is already running.
- Select the "Bricklet" tab and update the plugin manually.
"""
QMessageBox.critical(self, "Bricklet with Error", message, QMessageBox.Ok)
def extension_changed(self, index):
# Since we currently only have one extension with a firmware
# there is nothing to do here.
pass
def extension_firmware_changed(self, index):
self.update_ui_state()
def extension_firmware_save_clicked(self):
current_text = self.combo_extension_firmware.currentText()
progress = ProgressWrapper(self.create_progress_bar('Extension Flashing'))
try:
if current_text == SELECT:
return
elif current_text == CUSTOM:
firmware_file_name = self.edit_custom_extension_firmware.text()
if not zipfile.is_zipfile(firmware_file_name):
self.popup_fail('Extension Firmware', 'Firmware file does not have correct format')
progress.cancel()
return
files = []
zf = zipfile.ZipFile(firmware_file_name, 'r')
for name in zf.namelist():
files.append((int(name.replace('.bin', ''), 0), name))
progress.reset('Connecting to bootloader of WIFI Extension 2.0', 0)
progress.update(0)
master_info = self.extension_infos[self.combo_extension.currentIndex()]
master = None
# Find master from infos again, our info object may be outdated at this point
for info in infos.get_brick_infos():
if info.uid == master_info.uid:
master = info.plugin.device
if master == None:
self.popup_fail('Extension Firmware', 'Error during Extension flashing: Could not find choosen Master Brick')
return
esp = ESPROM(master)
esp.connect()
flash_mode = 0
flash_size_freq = 64
flash_info = struct.pack('BB', flash_mode, flash_size_freq)
for i, f in enumerate(files):
address = f[0]
image = zf.read(f[1])
progress.reset('Erasing flash ({0}/{1})'.format(i+1, len(files)), 0)
progress.update(0)
blocks = math.ceil(len(image)/float(esp.ESP_FLASH_BLOCK))
esp.flash_begin(blocks*esp.ESP_FLASH_BLOCK, address)
seq = 0
progress.reset('Writing flash ({0}/{1})'.format(i+1, len(files)), 100)
while len(image) > 0:
progress.update(100*(seq+1)/blocks)
block = image[0:esp.ESP_FLASH_BLOCK]
# Fix sflash config data
if address == 0 and seq == 0 and block[0] == '\xe9':
block = block[0:2] + flash_info + block[4:]
# Pad the last block
block = block + '\xff' * (esp.ESP_FLASH_BLOCK-len(block))
esp.flash_block(block, seq)
image = image[esp.ESP_FLASH_BLOCK:]
seq += 1
esp.flash_finish(False)
except:
progress.cancel()
self.popup_fail('Extension Firmware', 'Error during Extension flashing: ' + traceback.format_exc())
else:
progress.cancel()
master.reset()
self.popup_ok('Extension Firmware', 'Successfully flashed Extension firmware.\nMaster Brick will now automatically restart.')
def extension_firmware_browse_clicked(self):
if len(self.edit_custom_extension_firmware.text()) > 0:
last_dir = os.path.dirname(os.path.realpath(self.edit_custom_extension_firmware.text()))
else:
last_dir = get_home_path()
filename = get_open_file_name(get_main_window(), 'Open Extension Firmware', last_dir, '*.zbin')
if len(filename) > 0:
self.edit_custom_extension_firmware.setText(filename)
def update_extensions(self):
self.combo_extension.clear()
self.extension_infos = []
for info in infos.get_brick_infos():
if info.device_identifier == BrickMaster.DEVICE_IDENTIFIER:
if (info.extensions['ext0'] != None and info.extensions['ext0'].extension_type == BrickMaster.EXTENSION_TYPE_WIFI2) or \
(info.extensions['ext1'] != None and info.extensions['ext1'].extension_type == BrickMaster.EXTENSION_TYPE_WIFI2):
self.combo_extension.addItem(info.get_combo_item_extension())
self.extension_infos.append(info)
if self.combo_brick.count() == 0:
self.combo_brick.addItem(NO_EXTENSION)
self.update_ui_state()
|
D4wN/brickv
|
src/brickv/flashing.py
|
Python
|
gpl-2.0
| 55,747
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
E-I network connected with NEST topology
----------------------------------------
Simulation of a network consisting of an excitatory and an inhibitory
neuron population with distance-dependent connectivity.
The code bases on the script
brunel_alpha_nest.py
which is part of NEST ( http://nest-initiative.org/ )
implementing a random balanced network (with alpha-shaped synapses)
as in
Brunel N, Dynamics of Sparsely Connected Networks of Excitatory and
Inhibitory Spiking Neurons, Journal of Computational Neuroscience 8,
183–208 (2000).
In contrast to the original version which does not take network
geometry into account, distance-dependent connections are here established
using the NEST topology module and a spatially confined external stimulus is
added.
The script writes to the output folder 'out_raw':
- neuron positions
- population GIDs
- plot of spike raster
- raw spike data in .gdf format for VIOLA
- configuration file (for raw data) for VIOLA
Usage:
::
python topo_brunel_alpha_nest.py out_raw
'''
'''
Importing all necessary modules for simulation, analysis and plotting.
'''
import sys
# JURECA: remove global matplotlib from path such that local install can be found
try:
sys.path.remove('/usr/local/software/jureca/Stages/2016a/software/SciPy-Stack/2016a-intel-para-2016a-Python-2.7.11/lib/python2.7/site-packages/matplotlib-1.5.1-py2.7-linux-x86_64.egg')
except (ValueError, KeyError) as err:
pass
import os
import time
import glob
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.colors as mpc
import mpl_toolkits.mplot3d.art3d as art3d
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib.patches import Patch
#from matplotlib.colors import LinearSegmentedColormap
from matplotlib.ticker import MaxNLocator
from mpl_toolkits.mplot3d import Axes3D
import scipy
from scipy.optimize import fsolve
import numpy as np
from numpy import exp, random, zeros_like, r_
from multiprocessing import cpu_count
import json
import nest
nest.set_verbosity('M_WARNING')
import nest.topology as tp
print('matplotlib version: ' + matplotlib.__version__)
print('numpy version: ' + np.__version__)
print('scipy version: ' + scipy.__version__)
plt.rcParams.update({
'axes.xmargin': 0.0,
'axes.ymargin': 0.0,
})
random.seed(123456)
# base color definitions
hex_col_ex = '#595289' # blue with pastel character
hex_col_in = '#af143c' # red with pastel character
hex_col_stim = '#696969' # DimGray
'''
Assigning the current time to a variable in order to determine the
build time of the network.
'''
startbuild = time.time()
'''
Assigning the simulation parameters to variables.
'''
dt = 0.1 # Simulation time resolution in ms
simtime = 1500. # Simulation time in ms
transient = 500. # Simulation transient, discarding spikes at times < transient
'''
Definition of the parameters crucial for the network state.
'''
g = 4.5 # ratio inhibitory weight/excitatory weight (before: 5.0)
eta = 2. # external rate relative to threshold rate
epsilon = 0.1 # connection probability
'''
Definition of the number of neurons in the network.
'''
order = 5000 # (before: 2500)
NE = 4*order # number of excitatory neurons
NI = 1*order # number of inhibitory neurons
N_neurons = NE+NI # number of neurons in total
'''
Definition of connectivity parameters.
'''
CE = int(epsilon*NE) # number of excitatory synapses per neuron
CI = int(epsilon*NI) # number of inhibitory synapses per neuron
C_tot = int(CI+CE) # total number of synapses per neuron
'''
Initialization of the parameters of the integrate-and-fire neurons and
the synapses. The parameters of the neuron are stored in a dictionary.
The synaptic currents are normalized such that the amplitude of the
PSP is J.
'''
tauSyn = 0.5 # synaptic time constant in ms
tauMem = 20. # time constant of membrane potential in ms
CMem = 100. # capacitance of membrane in in pF
theta = 20. # membrane threshold potential in mV
tRef = 2. # refractory period in ms
neuron_params= {"C_m": CMem,
"tau_m": tauMem,
"tau_syn_ex": tauSyn,
"tau_syn_in": tauSyn,
"t_ref": tRef,
"E_L": 0.,
"V_reset": 0.,
"V_m": 0.,
"V_th": theta}
J_ex = 40. # postsynaptic amplitude in pA (before: 0.1 mV, converted to pA)
J_in = -g*J_ex # amplitude of inhibitory postsynaptic current
'''
Definition of the threshold rate, which is the external rate needed to fix
the membrane potential around its threshold (assuming just one external
connection). The rate of the Poisson generator is given in Hz. It is the
threshold rate multiplied with the relative rate eta.
'''
nu_th = (theta * CMem) / (J_ex*exp(1)*tauMem*tauSyn)
p_rate = eta * nu_th * 1000.
'''
Parameters for a spatially confined stimulus.
'''
stim_radius = 0.5 # radius of a circle in mm for location of stimulus
mask_radius_stim = 0.1 # mask radius of stimulus in mm around each parrot neuron
num_stim_conn = 300 # number of connections inside mask_radius_stim
stim_start = 1000. # start time of stimulus in ms
stim_duration = 50. # duration of the stimulus onset in ms
stim_rate = 300. # rate of parrot neurons in Hz during stimulus activation
'''
Definition of topology-specific parameters. Connection routines use fixed
indegrees = convergent connections with a fixed number of connections.
'''
extent_length = 4. # in mm (layer size = extent_length x extent_length)
sigma_ex = 0.3 # width of Gaussian profile for excitatory connections in mm
sigma_in = 0.3 # width of Gaussian profile for inhibitory connections in mm
delay_ex_c = 0.5 # constant term for linear distance-dep. delay in mm (exc.)
delay_ex_a = 0.5 # term for delay in mm
delay_in_c = 0.5 # term for linear distance-dep. delay in mm (inh.)
delay_in_a = 0.5 # term for delay in mm
delay_stim = 0.5 # delay between Poisson input to stimulus, and stimulus and exc.
pos_ex = list(((random.rand(2*NE) - 0.5) * extent_length).reshape(-1, 2))
pos_in = list(((random.rand(2*NI) - 0.5) * extent_length).reshape(-1, 2))
layerdict_ex = {
'extent' : [extent_length, extent_length],
'positions' : pos_ex,
'elements' : 'iaf_psc_alpha',
'edge_wrap' : True, # PBC
}
layerdict_in = {
'extent' : [extent_length, extent_length],
'positions' : pos_in,
'elements' : 'iaf_psc_alpha',
'edge_wrap' : True,
}
'''
The number of parrot neurons for the stimulus is computed by preserving the
density of excitatory neurons. The parrot neurons are placed inside a circle
around the center of the sheet.
'''
N_stim_square = int(NE * (2.*stim_radius)**2/extent_length**2)
pos_stim_square = list(((random.rand(2*N_stim_square) - 0.5) * 2.*stim_radius).reshape(-1, 2))
# discard those positions which do not fall into circle
pos_stim = []
for pos in pos_stim_square:
if pos[0]**2 + pos[1]**2 <= stim_radius**2:
pos_stim.append(pos)
N_stim = len(pos_stim)
layerdict_stim = {
'extent' : [extent_length, extent_length],
'positions' : pos_stim,
'elements' : 'parrot_neuron',
'edge_wrap' : True,
}
'''
Connection dictionaries are defined.
'''
conn_dict_ex = {
'connection_type': 'convergent',
'allow_autapses': False,
'allow_multapses': True,
'weights' : J_ex,
'delays' : {
'linear' : { # p(d) = c + a * d, d is distance
'c' : delay_ex_c,
'a' : delay_ex_a,
}
},
'kernel' : {
'gaussian' : {
'p_center' : 1.,
'sigma' : sigma_ex,
'mean' : 0.,
'c' : 0.,
}
},
'number_of_connections' : CE,
}
conn_dict_in = {
'connection_type': 'convergent',
'allow_autapses': False,
'allow_multapses': True,
'weights' : J_in,
'delays' : {
'linear' : {
'c' : delay_in_c,
'a' : delay_in_a,
}
},
'kernel' : {
'gaussian' : {
'p_center' : 1.,
'sigma' : sigma_in,
'mean' : 0.,
'c' : 0.,
}
},
'number_of_connections' : CI,
}
conn_dict_stim = {
'connection_type': 'divergent',
'weights' : J_ex,
'delays' : delay_stim,
'mask' : {
'circular' : {
'radius' : mask_radius_stim
}
},
'kernel' : 1.,
'number_of_connections' : num_stim_conn,
}
def cmap_white_to_color(hexcolor, num, whitemin=True):
'''
Create linear colormap.
'''
rgb = mpc.hex2color(hexcolor)
rs = np.linspace(1, rgb[0], num)
gs = np.linspace(1, rgb[1], num)
bs = np.linspace(1, rgb[2], num)
rgbs = zip(rs, gs, bs)
if not whitemin:
rgbs = rgbs[::-1] # switch order of colors
cmap = mpc.ListedColormap(tuple(rgbs))
return cmap
if __name__ == '__main__':
'''
Destination for spike output and definition of file prefixes.
'''
if len(sys.argv) != 2:
spike_output_path = 'out_raw'
else:
spike_output_path = sys.argv[-1]
label = 'spikes' # spike detectors
label_positions = 'neuron_positions' # neuron positions
'''
Create the file output destination folder if it does not exist.
Delete old simulation files if the folder is already present
'''
if not os.path.isdir(spike_output_path):
os.mkdir(spike_output_path)
else:
for fil in os.listdir(spike_output_path):
os.remove(os.path.join(spike_output_path, fil))
'''
Reset the simulation kernel.
Configuration of the simulation kernel by the previously defined time
resolution used in the simulation. Setting "print_time" to True prints
the already processed simulation time as well as its percentage of the
total simulation time.
'''
nest.ResetKernel()
nest.SetKernelStatus({"resolution": dt,
"print_time": False,
"overwrite_files": True,
'local_num_threads': cpu_count(),
'grng_seed': 234567})
print('total_num_virtual_procs: ' + str(nest.GetKernelStatus('total_num_virtual_procs')))
print("Building network")
'''
Configuration of the model `iaf_psc_alpha` and `poisson_generator`
using SetDefaults(). This function expects the model to be the
inserted as a string and the parameter to be specified in a
dictionary. All instances of theses models created after this point
will have the properties specified in the dictionary by default.
'''
nest.SetDefaults("iaf_psc_alpha", neuron_params)
'''
Creation of the topology layers for excitatory and inhibitory neurons.
GIDs and neuron positions are written to file.
'''
layer_in = tp.CreateLayer(layerdict_in)
layer_ex = tp.CreateLayer(layerdict_ex)
layer_stim = tp.CreateLayer(layerdict_stim)
tp.DumpLayerNodes(layer_ex, os.path.join(spike_output_path,
label_positions + '-0.dat'))
tp.DumpLayerNodes(layer_in, os.path.join(spike_output_path,
label_positions + '-1.dat'))
tp.DumpLayerNodes(layer_stim, os.path.join(spike_output_path,
label_positions + '-2.dat'))
nodes_ex = nest.GetChildren(layer_ex)[0] # nodes of ex/in neurons
nodes_in = nest.GetChildren(layer_in)[0]
nodes_stim = nest.GetChildren(layer_stim)[0]
'''
Distribute initial membrane voltages.
'''
for neurons in [nodes_ex, nodes_in]:
for neuron in neurons:
nest.SetStatus([neuron], {'V_m': theta * random.rand()})
'''
Create spike detectors for recording from the excitatory and the
inhibitory populations and a poisson generator as noise source.
The spike detectors are configured for writing to file.
'''
espikes = nest.Create("spike_detector")
ispikes = nest.Create("spike_detector")
stim_spikes = nest.Create("spike_detector")
nest.SetStatus(espikes,[{
"label": os.path.join(spike_output_path, label + "-0"),
"withtime": True,
"withgid": True,
"to_file": True,
"start" : transient,
}])
nest.SetStatus(ispikes,[{
"label": os.path.join(spike_output_path, label + "-1"),
"withtime": True,
"withgid": True,
"to_file": True,
"start" : transient,
}])
nest.SetStatus(stim_spikes,[{
"label": os.path.join(spike_output_path, label + "-2"),
"withtime": True,
"withgid": True,
"to_file": True,
"start" : transient,
}])
noise = nest.Create("poisson_generator", 1, {"rate": p_rate})
'''
External stimulus.
'''
pg_stim = nest.Create('poisson_generator', 1,
{'start': stim_start,
'stop': stim_start + stim_duration,
'rate': stim_rate})
print("Connecting devices")
'''
Definition of a synapse using `CopyModel`, which expects the model
name of a pre-defined synapse, the name of the customary synapse and
an optional parameter dictionary. The parameters defined in the
dictionary will be the default parameter for the customary
synapse. Here we define one synapse for the excitatory and one for the
inhibitory connections giving the previously defined weights
'''
nest.CopyModel("static_synapse","excitatory",{"weight":J_ex})
nest.CopyModel("static_synapse","inhibitory",{"weight":J_in})
'''
Connecting the previously defined poisson generator to the excitatory
and inhibitory neurons using the excitatory synapse. Since the poisson
generator is connected to all neurons in the population the default
rule ('all_to_all') of Connect() is used. The synaptic properties are
inserted via syn_spec which expects a dictionary when defining
multiple variables or a string when simply using a pre-defined
synapse.
'''
nest.Connect(noise, nodes_ex, syn_spec="excitatory")
nest.Connect(noise, nodes_in, syn_spec="excitatory")
'''
Connecting the excitatory, inhibitory and stimulus populations to the associated
spike detectors using excitatory synapses. Here the same shortcut for the
specification of the synapse as defined above is used.
'''
nest.Connect(nodes_ex, espikes, syn_spec="excitatory")
nest.Connect(nodes_in, ispikes, syn_spec="excitatory")
nest.Connect(nodes_stim, stim_spikes, syn_spec="excitatory")
print("Connecting network")
'''
Connecting the excitatory and inhibitory populations using the
pre-defined excitatory/inhibitory synapse and the connection dictionaries.
First, update the connection dictionaries with the synapses.
'''
conn_dict_ex['synapse_model'] = 'excitatory'
conn_dict_in['synapse_model'] = 'inhibitory'
conn_dict_stim['synapse_model'] = 'excitatory'
print("Excitatory connections")
tp.ConnectLayers(layer_ex, layer_ex, conn_dict_ex)
tp.ConnectLayers(layer_ex, layer_in, conn_dict_ex)
print("Inhibitory connections")
tp.ConnectLayers(layer_in, layer_ex, conn_dict_in)
tp.ConnectLayers(layer_in, layer_in, conn_dict_in)
'''
Connect spike generator of external stimulus with the excitatory neurons.
'''
tp.ConnectLayers(layer_stim, layer_ex, conn_dict_stim)
nest.Connect(pg_stim, nodes_stim, syn_spec={'weight': J_ex})
'''
Storage of the time point after the buildup of the network in a
variable.
'''
endbuild=time.time()
# # ConnPlotter test plot
# if True:
# import ConnPlotter as cpl
# nest.CopyModel("static_synapse","STIM", {"weight":stim_weight_scale*J_ex})
# conn_dict_stim['synapse_model'] = 'STIM' # somehow
# lList = [
# ('STIM', layerdict_stim),
# ('EX', layerdict_ex),
# ('IN', layerdict_in),
# ]
# cList = [
# ('STIM', 'EX', conn_dict_stim),
# ('EX', 'EX', conn_dict_ex),
# ('EX', 'IN', conn_dict_ex),
# ('IN', 'EX', conn_dict_in),
# ('IN', 'IN', conn_dict_in),
# ]
# synTypes = ((
# cpl.SynType('excitatory', J_ex, 'r'),
# cpl.SynType('inhibitory', J_in, 'b'),
# cpl.SynType('STIM', stim_weight_scale*J_ex, 'k')
# ),)
# s_cp = cpl.ConnectionPattern(lList, cList, synTypes=synTypes)
# s_cp.plot(colorLimits=[0,100])
# s_cp.plot(aggrSyns=True, colorLimits=[0,100])
# plt.show()
'''
Simulation of the network.
'''
print("Simulating")
nest.Simulate(simtime)
'''
Storage of the time point after the simulation of the network in a
variable.
'''
endsimulate= time.time()
'''
Reading out the total number of spikes received from the spike
detector connected to the excitatory population and the inhibitory
population.
'''
events_ex = nest.GetStatus(espikes,"n_events")[0]
events_in = nest.GetStatus(ispikes,"n_events")[0]
events_stim = nest.GetStatus(stim_spikes,"n_events")[0]
'''
Calculation of the average firing rate of the excitatory and the
inhibitory neurons by the simulation time. The
multiplication by 1000.0 converts the unit 1/ms to 1/s=Hz.
'''
rate_ex = events_ex/(simtime-transient)*1000./len(nodes_ex)
rate_in = events_in/(simtime-transient)*1000./len(nodes_in)
rate_stim = events_stim/(simtime-transient)*1000./len(nodes_in)
'''
Reading out the number of connections established using the excitatory
and inhibitory synapse model. The numbers are summed up resulting in
the total number of synapses.
'''
num_synapses = nest.GetDefaults("excitatory")["num_connections"]+\
nest.GetDefaults("inhibitory")["num_connections"]
'''
Establishing the time it took to build and simulate the network by
taking the difference of the pre-defined time variables.
'''
build_time = endbuild-startbuild
sim_time = endsimulate-endbuild
'''
Printing the network properties, firing rates and building times.
'''
print("Brunel network simulation (Python)")
print("Number of neurons : {0}".format(N_neurons))
# including devices and noise
print("Number of synapses: {0}".format(num_synapses))
# neurons + noise + spike detectors
print(" Exitatory : {0}".format(int(CE * N_neurons) + 2 * N_neurons))
print(" Inhibitory : {0}".format(int(CI * N_neurons)))
print("Excitatory rate : %.2f Hz" % rate_ex)
print("Inhibitory rate : %.2f Hz" % rate_in)
print("Stimulus rate : %.2f Hz" % rate_stim)
print("Building time : %.2f s" % build_time)
print("Simulation time : %.2f s" % sim_time)
nest.sli_run('memory_thisjob') # virtual memory size of NEST process
memory = nest.sli_pop()
print("Memory : %.2f kB" % memory)
'''
A dictionary for population parameters is created to allow for easier access.
'''
pops = {}
pops['EX'] = {}
pops['IN'] = {}
pops['STIM'] = {}
# neuron numbers
pops['EX']['N'] = NE
pops['IN']['N'] = NI
pops['STIM']['N'] = N_stim
# positions
pops['EX']['pos'] = pos_ex
pops['IN']['pos'] = pos_in
pops['STIM']['pos'] = pos_stim
# layer
pops['EX']['layer'] = layer_ex
pops['IN']['layer'] = layer_in
pops['STIM']['layer'] = layer_stim
# layerdict
pops['EX']['layerdict'] = layerdict_ex
pops['IN']['layerdict'] = layerdict_in
pops['STIM']['layerdict'] = layerdict_stim
# nodes
pops['EX']['nodes'] = nodes_ex
pops['IN']['nodes'] = nodes_in
pops['STIM']['nodes'] = nodes_stim
# rate
pops['EX']['rate'] = rate_ex
pops['IN']['rate'] = rate_in
pops['STIM']['rate'] = rate_stim
# events
pops['EX']['events'] = nest.GetStatus(espikes, 'events')[0]
pops['IN']['events'] = nest.GetStatus(ispikes, 'events')[0]
pops['STIM']['events'] = nest.GetStatus(stim_spikes, 'events')[0]
# population colors
pops['EX']['color'] = mpc.hex2color(hex_col_ex)
pops['IN']['color'] = mpc.hex2color(hex_col_in)
pops['STIM']['color'] = mpc.hex2color(hex_col_stim)
# dark connection colors (just darker than population colors)
pops['EX']['conn_color_dark'] = tuple(np.array(pops['EX']['color']) * 0.9) # darken
pops['IN']['conn_color_dark'] = tuple(np.array(pops['IN']['color']) * 0.9)
pops['STIM']['conn_color_dark'] = tuple(np.array(pops['EX']['color']) * 0.9)
# light connection colors (just lighter than population colors, note: <1)
pops['EX']['conn_color_light'] = tuple(np.array(pops['EX']['color']) * 1.4) # lighten
pops['IN']['conn_color_light'] = tuple(np.array(pops['IN']['color']) * 1.4)
pops['STIM']['conn_color_light'] = tuple(np.array(pops['EX']['color']) * 1.4)
# targets of the neuron type
pops['EX']['tgts'] = ['EX', 'IN']
pops['IN']['tgts'] = ['EX', 'IN']
pops['STIM']['tgts'] = ['EX']
'''
In the following, functions for rudimentary postprocessing are defined.
They are called at the bottom of the script.
First, spike files have to be merged and the population GIDs and configuration
files for VIOLA are written to file.
'''
def merge_spike_files():
'''
Merges spike files from different threads.
'''
print("Merging spike files")
for i, pop in enumerate(['EX', 'IN', 'STIM']):
old_filenames = glob.glob(os.path.join(spike_output_path, label + '-' + str(i) + '*.gdf'))
data = np.empty((0, 2))
for t in range(len(old_filenames)):
data = np.vstack([data, np.loadtxt(old_filenames[t])])
os.remove(old_filenames[t])
order = np.argsort(data[:, 1]) # sort spike times
data = data[order]
# write to new file having the same filename as for thread 0
new_filename = os.path.join(spike_output_path, label+'-'+ str(i) + '.gdf')
with open(new_filename, 'w') as f:
for line in data:
f.write('%d\t%.3f\n' % (line[0], line[1]))
f.close()
return
def write_population_GIDs():
'''
Writes first and last neuron GID of both poulations to file.
'''
print("Writing population GIDs")
fname = os.path.join(spike_output_path, 'population_GIDs.dat')
with open(fname, 'w') as f:
f.write('%d\t%d\n' % (nodes_ex[0], nodes_ex[-1]))
f.write('%d\t%d\n' % (nodes_in[0], nodes_in[-1]))
f.write('%d\t%d\n' % (nodes_stim[0], nodes_stim[-1]))
f.close()
return
'''
A configuration file for VIOLA.
'''
def create_viola_config_raw():
'''
Creates a configuration file for the visualization of raw simulation output
with VIOLA.
'''
# hex colors for VIOLA
popColors = []
for pop in ['EX', 'IN', 'STIM']:
popColors.append(mpc.rgb2hex(pops[pop]['color']))
# configuration dictionary for VIOLA
config_dict = {}
config_dict.update({
"popNum": 3,
"popNames": ','.join(['EX', 'IN', 'STIM']),
"spikesFiles": [label+'-%i.gdf' % X for X in [0,1,2]],
"timestamps": int(simtime / dt),
"resolution": dt,
"xSize": extent_length,
"ySize": extent_length,
"dataType": "neuron",
"posFiles": [label_positions+'-%i.dat' % X for X in [0,1,2]],
"timelineLength": 100,
"popColors": popColors,
})
with open(os.path.join(spike_output_path, 'config_raw.json'), 'w') as f:
json.dump(config_dict, f)
'''
Plotting functions for a network sketch and a ConnPlotter variant.
'''
def figure_network_sketch():
'''
Plots a network sketch and a illustrates connectivity using
ConnPlotter's style.
'''
print('Plotting network sketch')
red_conn_dens = 1 # show connections in steps of
dilute_neurons = 1 # show neurons in steps of
print(' Diluting connection density: {}'.format(red_conn_dens))
print(' Diluting number of neurons shown: {}'.format(dilute_neurons))
# set up figure
fig = plt.figure(figsize=(13,5))
# grid spec for left and right panel
gs1 = gridspec.GridSpec(1, 10, wspace=0.0, left=0.05, right=1.05, bottom=0., top=1)
ax1 = plt.subplot(gs1[0, 3:], projection='3d')
# plot connectivity using ConnPlotter's style
gs = gridspec.GridSpec(1, 3, wspace=0.5, left=0.05, right=1.)
gs0 = gridspec.GridSpecFromSubplotSpec(3, 2, subplot_spec=gs[0,0],
wspace=0.01)
lList = [
('STIM', layerdict_stim),
('EX', layerdict_ex),
('IN', layerdict_in),
]
conns = [[1, 0], [1, 1], [1, 1]]
pList = ['EX', 'IN']
cDicts = [conn_dict_stim, conn_dict_ex, conn_dict_in]
for i, ((pre, lDict), cDict, conn) in enumerate(zip(lList, cDicts, conns)):
for j, post in enumerate(pList):
ax = fig.add_subplot(gs0[i, j], aspect='equal')
extent = lDict['extent']
x = np.linspace(-extent[0]/2, extent[0]/2, 51) # changed from 101
y = np.linspace(-extent[1]/2, extent[1]/2, 51)
X,Y = np.meshgrid(x, y)
C = np.zeros(X.shape)
if conn[j]:
if 'kernel' not in cDict.keys() or cDict['kernel'] == 1.:
try:
weights = epsilon_stim = num_stim_conn*N_stim / NE * cDict['weights']
C[np.sqrt(X**2 + Y**2) <= cDict['mask']['circular']['radius']] = weights
# cmap = 'gray_r'
colors = [(1, 1, 1), (0, 0, 1)]
cmap = cmap_white_to_color(hex_col_ex, 64)
#cmap = LinearSegmentedColormap.from_list('reds', colors, N=64)
vmin = 0
vmax = weights
except KeyError as ae:
raise ae
elif type(cDict['kernel']) is dict:
try:
sigma = cDict['kernel']['gaussian']['sigma']
if 'mask' in cDict.keys():
mask = np.sqrt(X**2 + Y**2) <= cDict['mask']['circular']['radius']
weights = cDict['weights']*epsilon
C[mask] = weights*np.exp(-(X[mask]**2 + Y[mask]**2) / (2*sigma**2)) # / (2*np.pi*sigma**2)
if weights > 0:
colors = [(1, 1, 1), (0, 0, 1)]
cmap = cmap_white_to_color(hex_col_ex, 64)
#cmap = LinearSegmentedColormap.from_list('blues', colors, N=64)
vmin = 0
vmax = weights
else:
colors = [(1, 0, 0), (1, 1, 1)]
cmap = cmap_white_to_color(hex_col_in, 64)
#cmap = LinearSegmentedColormap.from_list('reds', colors, N=64)
vmin = weights
vmax = 0
else:
weights = cDict['weights']
C = weights*np.exp(-(X**2 + Y**2) / (2*sigma**2)) # / (2*np.pi*sigma**2)
if weights > 0:
colors = [(1, 1, 1), (0, 0, 1)]
cmap = cmap_white_to_color(hex_col_ex, 64)
#cmap = LinearSegmentedColormap.from_list('blues', colors, N=64)
vmin = 0
vmax = weights
else:
colors = [(1, 0, 0), (1, 1, 1)]
cmap = cmap_white_to_color(hex_col_in, 64, whitemin=False)
#cmap = LinearSegmentedColormap.from_list('reds', colors, N=64)
vmin = weights
vmax = 0
except KeyError as ae:
raise ae
else:
pass
cmap.set_bad('0.75')
im = ax.pcolormesh(X,Y,np.ma.array(C, mask=C==0), cmap=cmap, vmin=vmin, vmax=vmax)
# im = ax.pcolormesh(X,Y,C, cmap=cmap, vmin=vmin, vmax=vmax)
if j == (len(pList)-1):
bbox = np.array(ax.get_position())
cax = fig.add_axes([bbox[1][0]+0.01, bbox[0][1], 0.015, (bbox[1][1]-bbox[0][1])])
axcb = fig.colorbar(im, cax=cax, orientation='vertical')
cbarlabel = r'$\epsilon_{YX}Jg_{YX}$ (pA)'
# cbarlabel = r'$\epsilon_{Y,\mathrm{%s}}Jg_{Y,\mathrm{%s}}$ (pA)' % (pre,pre)
axcb.set_label(cbarlabel)
axcb.locator = MaxNLocator(nbins=5)
axcb.update_ticks()
ax.set_xticks([-2., -1, 0, 1., 2.])
ax.set_yticks([-2., -1, 0, 1., 2.])
if i != (len(lList)-1):
ax.set_xticklabels([])
else:
ax.set_xlabel(r'$x_i - x_j$ (mm)', labelpad=0)
if i == 0:
ax.set_title(r'$Y=${}'.format(post))
if j != 0:
ax.set_yticklabels([])
else:
ax.set_ylabel('$X=${}\n'.format(pre) + r'$y_i - y_j$ (mm)', labelpad=0)
if i == 0 and j == 0:
ax.text(0.05, 0.95, 'A',
horizontalalignment='center',
verticalalignment='center',
fontsize=16, fontweight='demibold',
transform=fig.transFigure)
# network sketch
ax1.text2D(0.4, 0.95, 'B',
horizontalalignment='center',
verticalalignment='center',
fontsize=16, fontweight='demibold',
transform=fig.transFigure)
# build figure from bottom to top
pops_list = ['IN', 'EX', 'STIM'] # bottom, center, top
dots_IN_IN, srcdot_IN_IN = plot_connections(ax1, 'IN', 'IN', pops_list, red_conn_dens)
plot_layer(ax1, 'IN', pops_list, dilute_neurons)
plot_dots(ax1, dots_IN_IN)
plot_dots(ax1, srcdot_IN_IN)
dots_IN_EX, srcdot_IN_EX = plot_connections(ax1, 'IN', 'EX', pops_list, red_conn_dens)
_, srcdot_EX_IN = plot_connections(ax1, 'EX', 'IN', pops_list, red_conn_dens)
dots_EX_EX, srcdot_EX_EX = plot_connections(ax1, 'EX', 'EX', pops_list, red_conn_dens)
plot_layer(ax1, 'EX', pops_list, dilute_neurons)
plot_dots(ax1, dots_IN_EX)
plot_dots(ax1, dots_EX_EX)
plot_dots(ax1, srcdot_IN_EX)
plot_dots(ax1, srcdot_EX_EX)
plot_dots(ax1, srcdot_EX_IN)
_, srcdot_STIM_EX = plot_connections(ax1, 'STIM', 'EX', pops_list, red_conn_dens)
plot_layer(ax1, 'STIM', pops_list, dilute_neurons)
plot_dots(ax1, srcdot_STIM_EX)
# make plot look nice
ax1.set_xlabel('$x$ (mm)', labelpad=-1)
ax1.set_ylabel('$y$ (mm)', labelpad=-1)
ax1.set_xticks([-2., -1, 0, 1., 2.])
ax1.set_yticks([-2., -1, 0, 1., 2.])
ax1.set_xlim(-1.95, 1.95)
ax1.set_ylim(-1.95, 1.95)
ax1.xaxis.set_tick_params(pad=-1)
ax1.yaxis.set_tick_params(pad=-1)
ax1.w_zaxis.line.set_lw(0.)
ax1.set_zticks([])
ax1.grid(False)
ax1.xaxis.pane.set_edgecolor('white')
ax1.yaxis.pane.set_edgecolor('white')
ax1.xaxis.pane.fill = False
ax1.yaxis.pane.fill = False
ax1.zaxis.pane.fill = False
# legend
handles = \
[Patch(color=pops['STIM']['color']),
Patch(color=pops['EX']['color']),
Patch(color=pops['IN']['color']),
plt.Line2D((0,1),(0,0), color='white', marker='o',
markeredgecolor='black', linestyle=''),
plt.Line2D((0,1),(0,0), color=pops['EX']['conn_color_light']),
plt.Line2D((0,1),(0,0), color=pops['IN']['conn_color_light'])]
labels = \
['STIM',
'EX',
'IN',
'source',
'exc. connection',
'inh. connection']
ax1.legend(handles, labels, numpoints=1, loc=2, bbox_to_anchor=(0.7, 0.9),
fontsize=10)
ax1.view_init(elev=20, azim=-60)
fig.savefig(os.path.join(spike_output_path, 'network_sketch.pdf'), dpi=320,
bbox_inches=0)
fig.savefig(os.path.join(spike_output_path, 'network_sketch.eps'), dpi=320,
bbox_inches=0)
'''
Definition of helper functions for the network sketch.
'''
def plot_layer(ax, pop, pops_list, dilute_neurons):
# plot neurons at their original location
pos = np.array(pops[pop]['pos']).transpose()
z0 = pops_list.index(pop)
xshow = pos[0][0:len(pos[0]):dilute_neurons]
yshow = pos[1][0:len(pos[1]):dilute_neurons]
ax.plot(xshow, yshow, zs=z0,
marker=',',
linestyle='None',
color=pops[pop]['color'],
alpha=1.)
ax.text(-2, -2.8, z0+0.3, pop)
return
def plot_connections(ax, src, tgt, pops_list, red_conn_dens):
# z-positions
z0 = pops_list.index(src)
z1 = z0 + (pops_list.index(tgt) - z0)
# x,y-positions
if src == 'STIM':
xyloc = [0., 0.]
elif src == tgt:
xyloc = [0.8, 0.8]
elif src == 'EX':
xyloc = [0.8, -0.8]
elif src == 'IN':
xyloc = [-0.8, -0.8]
srcid = tp.FindNearestElement(pops[src]['layer'], xyloc, False)
srcloc = tp.GetPosition(srcid)[0]
tgtsloc = np.array(tp.GetTargetPositions(srcid,
pops[tgt]['layer'])[0])
# targets do not get picked in the same order;
# they are sorted here for reproducibility
tgtsloc = tgtsloc[np.argsort(tgtsloc[:,0])]
tgtsloc_show = tgtsloc[0:len(tgtsloc):red_conn_dens]
for tgtloc in tgtsloc_show:
ax.plot([srcloc[0], tgtloc[0]], [srcloc[1], tgtloc[1]],
[z0, z1], c=pops[src]['conn_color_light'], linewidth=1.)
# highlight target
ax.plot(xs=[tgtloc[0]], ys=[tgtloc[1]], zs=[z1],
marker='o',
markeredgecolor='none',
markersize=2, color=pops[src]['conn_color_dark'],
alpha=1.)
# to be printed on top
dots = [tgtsloc_show, z1, pops[src]['conn_color_dark'], 'none', 2]
srcdot = [srcloc, z0, 'white', 'black', 3]
return dots, srcdot
def plot_dots(ax, dots):
if type(dots[0][0]) == np.ndarray:
xs = zip(*dots[0])[0]
ys = zip(*dots[0])[1]
else:
xs = [dots[0][0]]
ys = [dots[0][1]]
ax.plot(xs, ys, zs=dots[1], marker='o', markeredgecolor=dots[3],
markersize=dots[4], c=dots[2], linestyle='none', alpha=1.)
return
'''
Plot a figure of spiking activity showing unsorted and sorted raster plots and
spike counts.
'''
def figure_raster(times):
print('Plotting spiking activity for time interval (ms): ' + str(times))
# stepsize for diluting (1 = all)
dilute = int(5) # int
print(' Diluting spike number: {}'.format(dilute))
fig = plt.figure(figsize=(13., 8.))
fig.subplots_adjust(top=0.94, bottom=0.1, left=0.08, right=0.97,
wspace=0.3, hspace=1.)
gs = gridspec.GridSpec(6,5)
# A: unsorted raster
gs_cell = gs[:2, :4]
pops_list = ['STIM', 'EX', 'IN'] # top to bottom
ax0 = _plot_raster_unsorted('A', gs_cell, pops_list, times, dilute)
# B: spike count histogram over unit
gs_cell = gs[:2, 4]
pops_list = ['STIM', 'EX', 'IN']
_plot_unit_histogram('B', gs_cell, pops_list, sharey=ax0)
# C: spike count histogram over time
gs_cell = gs[2:4, :4]
pops_list = ['STIM', 'EX', 'IN'] # top to bottom
_plot_time_histogram('C', gs_cell, pops_list, times)
# legend to the bottom right
ax = plt.subplot(gs[2:4,4:]) # just for the legend
plt.axis('off')
handles = [Patch(color=pops['STIM']['color']),
Patch(color=pops['EX']['color']),
Patch(color=pops['IN']['color'])]
labels = ['STIM',
'EX',
'IN']
ax.legend(handles, labels, loc='center')
# D: sorted raster
gs_cell = gs[4:6, :4]
pops_list = ['EX', 'IN', 'STIM']
_plot_raster_sorted('D', gs_cell, pops_list, times, dilute)
# E: spike count histogram over space
gs_cell = gs[4:6, 4]
pops_list = ['IN', 'EX', 'STIM'] # bottom to top
_plot_space_histogram('E', gs_cell, pops_list)
fig.savefig(os.path.join(spike_output_path, 'raster.pdf'), dpi=320)
fig.savefig(os.path.join(spike_output_path, 'raster.eps'), dpi=320)
'''
Definition of helper functions for the spiking activity.
'''
def _plot_spikes(ax, dilute, nodes,
events,
layerdict,
color='r',
marker=',', poplabel='EX',
position_sorted=True):
'''
Plots unsorted or sorted spike raster, flexible for both populations.
'''
X = []
T = []
for i, j in enumerate(nodes):
# extract spikes
t = events['times'][events['senders'] == j]
x, y = layerdict['positions'][i]
if t.size > 0:
T = r_[T, t] # concatenate spike times
if position_sorted:
pos = x # sorted by x positions
else:
pos = j
X = r_[X, zeros_like(t) + pos]
ax.plot(T[::dilute], X[::dilute], marker, markersize=.1, color=color, label=poplabel,
rasterized=False)
return
def _plot_space_histogram(label, gs_cell, pops_list):
gs_loc = gridspec.GridSpecFromSubplotSpec(1, 3, gs_cell, wspace=0.15)
binsize = 0.1 # should be the same as used for preprocessing
bins = np.arange(-2, 2+binsize, binsize)
xlists = []
for x, gid0, senders in zip([np.array(pops['IN']['layerdict']['positions'])[:, 0],
np.array(pops['EX']['layerdict']['positions'])[:, 0],
np.array(pops['STIM']['layerdict']['positions'])[:, 0]],
[pops['IN']['nodes'][0],
pops['EX']['nodes'][0],
pops['STIM']['nodes'][0]],
[pops['IN']['events']['senders'],
pops['EX']['events']['senders'],
pops['STIM']['events']['senders']]):
xlists += [[x[n-gid0] for n in senders]]
data = {}
data['IN'] = xlists[0]
data['EX'] = xlists[1]
data['STIM'] = xlists[2]
for i,pop in enumerate(pops_list):
ax = plt.subplot(gs_loc[0,i])
ax.hist(data[pop], bins=bins, histtype='stepfilled',
color=pops[pop]['color'], edgecolor='none',
orientation='horizontal')
ax.set_ylim(bins[0], bins[-1])
ax.set_yticklabels([])
ax.xaxis.set_major_locator(MaxNLocator(nbins=2, prune='upper'))
plt.xticks(rotation=-90)
if i==0:
ax.text(-0.6, 1.05, label, ha='left', va='bottom', fontsize=16,
fontweight='demibold', transform=ax.transAxes)
if i==int(len(pops_list)/2):
ax.set_title('spike count\n' + r'($\Delta={}$ mm)'.format(binsize))
ax.set_xlabel('count')
return
def _plot_time_histogram(label, gs_cell, pops_list, times):
gs_loc = gridspec.GridSpecFromSubplotSpec(3,1, gs_cell, hspace=0.15)
# binsize should be the same as used for preprocessing
binsize = 1 # in ms
bins = np.arange(transient, simtime+binsize, binsize)
for i,pop in enumerate(pops_list):
ax = plt.subplot(gs_loc[i,0])
ax.hist(pops[pop]['events']['times'], bins=bins, histtype='stepfilled',
color=pops[pop]['color'], edgecolor='none')
ax.set_ylim(bottom=0) # fixing only the bottom
ax.set_xlim(times[0], times[1])
ax.yaxis.set_major_locator(MaxNLocator(nbins=3, prune='upper'))
if i==0:
ax.set_title('spike count ' + r'($\Delta t={}$ ms)'.format(binsize))
ax.text(-0.05, 1.05, label, ha='left', va='bottom', fontsize=16,
fontweight='demibold', transform=ax.transAxes)
if i==int(len(pops_list)/2):
ax.set_ylabel('count')
else:
ax.set_xticklabels([])
return
def _plot_unit_histogram(label, gs_cell, pops_list, sharey):
tot_neurons = 0
ratios = []
for pop in pops_list:
tot_neurons += pops[pop]['N']
for pop in pops_list:
frac = 1.*pops[pop]['N']/tot_neurons
if frac < 0.1:
frac = 0.1
ratios.append(frac)
binsize = order / extent_length / 2. # neurons
bins = np.arange(min([min(pops[pop]['nodes']) for pop in pops_list]),
max([max(pops[pop]['nodes']) for pop in pops_list])+binsize,
binsize)
ax = plt.subplot(gs_cell)
for i,pop in enumerate(pops_list):
ax.hist(pops[pop]['events']['senders'],
bins=bins, histtype='stepfilled',
color=pops[pop]['color'], edgecolor='none',
orientation='horizontal', stacked=False, alpha=1)
ax.xaxis.set_major_locator(MaxNLocator(nbins=3))
ax.axis(ax.axis('tight'))
ax.set_xlabel('count')
ax.set_ylim(sharey.get_ylim())
ax.set_yticklabels([])
ax.text(-0.18, 1.05, label, ha='left', fontsize=16, va='bottom',
fontweight='demibold', transform=ax.transAxes)
ax.set_title('spike count\n' + r'($\Delta={}$ units)'.format(int(binsize)))
return
def _plot_raster_unsorted(label, gs_cell, pops_list, times, dilute):
tot_neurons = 0
ratios = []
for pop in pops_list:
tot_neurons += pops[pop]['N']
for pop in pops_list:
frac = 1.*pops[pop]['N']/tot_neurons
if frac < 0.1:
frac = 0.1
ratios.append(frac)
ax = plt.subplot(gs_cell)
for i,pop in enumerate(pops_list):
_plot_spikes(ax, dilute, nodes=pops[pop]['nodes'],
events=pops[pop]['events'],
layerdict=pops[pop]['layerdict'],
color=pops[pop]['color'],
marker=',', poplabel=pop,
position_sorted=False)
ax.axis(ax.axis('tight'))
ax.set_xlim(times[0], times[1])
ax.set_xticklabels([])
ax.set_xlabel('')
min_node = np.min(pops[pop]['nodes'])
max_node = np.max(pops[pop]['nodes'])
ax.set_title('unsorted spike raster')
ax.text(-0.05, 1.05, label, fontsize=16, ha='left',
va='bottom', fontweight='demibold', transform=ax.transAxes)
ax.set_ylabel('neuron ID')
return ax
def _plot_raster_sorted(label, gs_cell, pops_list, times, dilute):
ax = plt.subplot(gs_cell)
for pop in pops_list:
_plot_spikes(ax, dilute, nodes=pops[pop]['nodes'],
events=pops[pop]['events'],
layerdict=pops[pop]['layerdict'],
color=pops[pop]['color'],
marker=',', poplabel=pop,
position_sorted=True)
ax.set_title('sorted spike raster')
ax.set_ylabel('x position (mm)')
ax.set_xlabel('time (ms)')
ax.set_xlim(times[0], times[1])
ax.text(-0.05, 1.05, label, ha='left', fontsize=16, va='bottom',
fontweight='demibold', transform=ax.transAxes)
return
if __name__=='__main__':
# these functions are needed for generating test data for VIOLA
merge_spike_files()
write_population_GIDs()
create_viola_config_raw()
# these functions are optional
if False:
figure_network_sketch()
times = [transient, simtime] # displayed time interval
figure_raster(times)
|
HBPVIS/VIOLA
|
test_data/topo_brunel_alpha_nest.py
|
Python
|
gpl-2.0
| 44,592
|
#
#
# Copyright (C) 2007 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
class Error(Exception):
"""An error occurred during Q&A testing.
"""
pass
class OutOfNodesError(Error):
"""Out of nodes.
"""
pass
class OutOfInstancesError(Error):
"""Out of instances.
"""
pass
class UnusableNodeError(Error):
"""Unusable node.
"""
pass
|
sigmike/ganeti
|
qa/qa_error.py
|
Python
|
gpl-2.0
| 1,036
|
try:
import simplegui
except ImportError:
import SimpleGUICS2Pygame.simpleguics2pygame as simplegui
simplegui.Frame._hide_status = True
simplegui.Frame._keep_timers = False
from state import *
import Main
import math
game_gui = None
piece_color = {
GUARD: '#FF5733',
DRAGON: '#E12E2E',
KING: '#539E36'
}
player_turn_gui = {
KING_PLAYER: "A",
DRAGON_PLAYER: "A",
}
def point_to_rec_coordinates(v, rect_size=100):
def to_tuple(vector, x=0, y=0):
"""
Return the vector as a tuple.
:return: (int or float, int or float)
"""
return vector[0] + x, vector[1] + y
return [to_tuple(v, 0, rect_size), to_tuple(v), to_tuple(v, rect_size),
to_tuple(v, rect_size, rect_size)]
def get_rect_from_coordinates(x, y):
x_rect = math.floor(x / 100)
y_rect = math.floor(y / 100)
return x_rect * 5 + (4 - y_rect)
def number_to_coordinate(n):
return (n // 5) * 100, ((4 - n) % 5) * 100
class Vector:
"""
Vector (self.x, self.y).
"""
def __init__(self, x, y):
"""
Initialize the vector.
:param x: int or float
:param y: int or float
"""
assert isinstance(x, int) or isinstance(x, float)
assert isinstance(y, int) or isinstance(y, float)
self.x = x
self.y = y
def add(self, other):
"""
Adds other to self.
:param other: Vector
"""
assert isinstance(other, Vector)
self.x += other.x
self.y += other.y
def sub(self, other):
"""
Adds other to self.
:param other: Vector
"""
assert isinstance(other, Vector)
self.x -= other.x
self.y -= other.y
def magnitude(self):
return math.sqrt(self.x * self.x + self.y * self.y)
def div(self, n):
self.x = self.x / n
self.y = self.y / n
def normalize(self):
m = self.magnitude()
if m != 0 and m != 1:
self.div(m)
def limit(self, maximum):
if self.mag_sq() > maximum * maximum:
self.normalize()
self.mul_scalar(maximum)
def mag_sq(self):
return self.x * self.x + self.y * self.y
def mul_scalar(self, scalar):
"""
Multiplies self by scalar.
:param scalar: int or float
"""
assert isinstance(scalar, int) or isinstance(scalar, float)
self.x *= scalar
self.y *= scalar
def distance(self, other):
"""
Return the distance between self and other.
:param other: Vector
:return: float >= 0
"""
assert isinstance(other, Vector)
return math.sqrt((self.x - other.x) ** 2 + (self.y - other.y) ** 2)
def same(self, other):
"""
If (self.x, self.y) == (other.x, other.y)
then return True,
else return False.
:param other: Vector
"""
assert isinstance(other, Vector)
return (self.x == other.x) and (self.y == other.y)
def to_tuple(self, x=0, y=0):
"""
Return the vector as a tuple.
:return: (int or float, int or float)
"""
return self.x + x, self.y + y
def to_coordinates(self, rect_size=100):
return point_to_rec_coordinates(self.to_tuple(), rect_size)
class GameGUI:
def __init__(self, evaluate, search, max_depth, player1_human,
player2_human):
global game_gui, player_turn_gui
self.moves = []
self.pieces = {}
self.state = get_default_game_start()
self.expanded_state = create_expanded_state_representation(self.state)
self.evaluate = evaluate
self.search = search
self.max_depth = max_depth
self.recalc_pieces()
self.terminal = False
self.utility = 0
self.time = 0
self.paused = False
self.counter = -1
game_gui = self
player_turn_gui[KING_PLAYER] = "H" if player1_human else "A"
player_turn_gui[DRAGON_PLAYER] = "H" if player2_human else "A"
self.frame = simplegui.create_frame('Madking', 500, 500)
self.ut_label = self.frame.add_label("Utility: " + str(self.utility))
self.time_label = self.frame.add_label("Time: " + str(self.time))
def pause_handle():
self.paused = not self.paused
self.frame.add_button("pause", pause_handle)
self.frame.set_draw_handler(draw)
self.frame.set_mouseclick_handler(mouse_pressed)
self.frame.start()
def update_utility(self, u):
self.utility = u
self.ut_label.set_text("Utility: " + str(self.utility))
def update_time(self, t):
self.time = t
self.time_label.set_text("Time: " + str(self.time))
def recalc_pieces(self):
for i, num in get_live_pieces_enumeration_no_king(self.state):
if num >= 100:
num -= 100
x, y = number_to_coordinate(num)
self.pieces[num] = Piece(self.expanded_state[num], x, y, num)
k_num = get_king_tile_index(self.state)
x, y = number_to_coordinate(k_num)
self.pieces[k_num] = Piece(KING, x, y, k_num)
def add_move(self, move):
self.moves.append(move)
self.counter += 1
class Piece:
selected = -1
mouseX = -1
mouseY = -1
def __init__(self, piece_type, x, y, num):
self.location = Vector(x, y)
self.velocity = Vector(0, 0)
self.top_speed = 2000
self.type = piece_type
self.num = num
def update(self, canvas):
if Piece.mouseX != -1:
x = self.mouseX
y = self.mouseY
mouse = Vector(x, y)
mouse.sub(self.location)
if mouse.magnitude() > 3:
mouse.normalize()
mouse.mul_scalar(10)
acceleration = mouse
self.location.add(acceleration)
self.velocity.limit(self.top_speed)
self.location.add(self.velocity)
else:
Piece.selected = -1
self.location.x = x
self.location.y = y
Piece.mouseX = -1
game_gui.recalc_pieces()
self.display(canvas)
def display(self, canvas):
black = 255
black = '#' + ('0' + hex(black)[-1] if black < 16
else hex(black)[-2:]) * 3
if self.num != Piece.selected:
canvas.draw_polygon(self.location.to_coordinates(), 1,
piece_color[self.type], piece_color[self.type])
else:
color = 1
color = '#' + ('0' + hex(color)[-1] if color < 16
else hex(color)[-2:]) * 3
canvas.draw_polygon(self.location.to_coordinates(), 10, color,
piece_color[self.type])
canvas.draw_text(self.type, self.location.to_tuple(40, 50), 20, black)
def draw_empty_board(canvas):
color = 255
color = '#' + ('0' + hex(color)[-1] if color < 16
else hex(color)[-2:]) * 3
black = 0
black = '#' + ('0' + hex(black)[-1] if black < 16
else hex(black)[-2:]) * 3
for i in range(BOARD_NUM_RANKS):
for j in range(BOARD_NUM_FILES):
tile_i = tile_index(chr(i + 65) + str(j + 1))
x, y = number_to_coordinate(tile_i)
canvas.draw_polygon(point_to_rec_coordinates((x, y)), 1, black,
color)
def mouse_pressed(mouse):
if Piece.mouseX == -1 and not game_gui.terminal:
state = game_gui.state
expanded_state = game_gui.expanded_state
num = get_rect_from_coordinates(mouse[0], mouse[1])
if player_turn_gui[player_turn(state)] == 'H':
if Piece.selected == -1 and expanded_state[num] != '.':
Piece.selected = num
Piece.mouseX = -1
Piece.mouseY = -1
elif Piece.selected != -1:
selected = Piece.selected
if selected != num and (selected, num) in \
all_valid_moves(state, expanded_state):
game_gui.add_move((selected, num))
move_piece(game_gui.state, game_gui.expanded_state,
selected, num)
Piece.mouseX, Piece.mouseY = number_to_coordinate(num)
Piece.selected = num
game_gui.pieces[num] = game_gui.pieces[selected]
game_gui.pieces[num].num = num
del game_gui.pieces[selected]
terminal, utility = is_terminal(game_gui.state,
game_gui.expanded_state)
game_gui.terminal = terminal
game_gui.utility = utility
else:
Piece.selected = -1
def draw(canvas):
"""
Event handler to draw all items.
:param canvas: simplegui.Canvas
"""
if not game_gui.paused:
if not game_gui.terminal:
if Piece.selected == -1 and player_turn_gui[
player_turn(game_gui.state)] == 'A':
ai_move, time, utility = \
Main.next_move(game_gui.state, game_gui.expanded_state,
False, game_gui.evaluate, game_gui.search,
game_gui.max_depth)
game_gui.update_utility(utility)
game_gui.update_time(time)
game_gui.add_move(ai_move)
move_piece(game_gui.state, game_gui.expanded_state, ai_move[0],
ai_move[1])
Piece.mouseX, Piece.mouseY = number_to_coordinate(ai_move[1])
Piece.selected = ai_move[1]
game_gui.pieces[ai_move[1]] = game_gui.pieces[ai_move[0]]
game_gui.pieces[ai_move[1]].num = ai_move[1]
del game_gui.pieces[ai_move[0]]
terminal, utility = is_terminal(game_gui.state,
game_gui.expanded_state)
game_gui.terminal = terminal
game_gui.utility = utility
draw_empty_board(canvas)
for piece in game_gui.pieces.values():
if Piece.mouseX != -1 and piece.num == Piece.selected:
piece.update(canvas)
piece.display(canvas)
if game_gui.terminal:
text = ""
if game_gui.utility == DRAW:
text = "DRAW"
elif game_gui.utility == KING_WIN:
text = "KING WON"
elif game_gui.utility == DRAGON_WIN:
text = "DRAGON WON"
canvas.draw_text(text, (50, 250), 30, "#000000")
|
francois-rd/madking
|
gui.py
|
Python
|
gpl-2.0
| 10,840
|
import time
try:
from time import timeout_time
except ImportError:
from time import time as timeout_time
def compute_resolution(func):
resolution = None
points = 0
timeout = timeout_time() + 1.0
previous = func()
while timeout_time() < timeout or points < 3:
for loop in range(10):
t1 = func()
t2 = func()
dt = t2 - t1
if 0 < dt:
break
else:
dt = t2 - previous
if dt <= 0.0:
continue
if resolution is not None:
resolution = min(resolution, dt)
else:
resolution = dt
points += 1
previous = func()
return resolution
def format_duration(dt):
if dt >= 1e-3:
return "%.0f ms" % (dt * 1e3)
if dt >= 1e-6:
return "%.0f us" % (dt * 1e6)
else:
return "%.0f ns" % (dt * 1e9)
def test_clock(name, func):
print("%s:" % name)
resolution = compute_resolution(func)
print("- resolution in Python: %s" % format_duration(resolution))
clocks = ['clock', 'perf_counter', 'process_time']
if hasattr(time, 'monotonic'):
clocks.append('monotonic')
clocks.append('time')
for name in clocks:
func = getattr(time, name)
test_clock("%s()" % name, func)
info = time.get_clock_info(name)
print("- implementation: %s" % info.implementation)
print("- resolution: %s" % format_duration(info.resolution))
clock_ids = [name for name in dir(time) if name.startswith("CLOCK_")]
clock_ids.sort()
for clock_id_text in clock_ids:
clock_id = getattr(time, clock_id_text)
name = 'clock_gettime(%s)' % clock_id_text
def gettime():
return time.clock_gettime(clock_id)
try:
gettime()
except OSError as err:
print("%s failed: %s" % (name, err))
continue
test_clock(name, gettime)
resolution = time.clock_getres(clock_id)
print("- announced resolution: %s" % format_duration(resolution))
|
whichwit/scm-stv
|
docs/support/pep/pep-0418/clock_resolution.py
|
Python
|
gpl-2.0
| 1,995
|
#!/usr/bin/env python
import gtk
import vte
import os
import time
import pango
class Admin_Notebook():
def create_notebook(self):
notebook = gtk.Notebook()
notebook.set_current_page(0)
notebook.set_tab_pos(gtk.POS_LEFT)
notebook.show()
return notebook
def create_frame(self, name, border=10):
frame = gtk.Frame("")
frame.set_border_width(border)
frame.show()
label = gtk.Label(name)
label.set_alignment(0, 0.5)
label.set_justify(True)
label.show()
return frame, label
def append_tab(self, notebook, frame, label):
page = notebook.append_page(frame,label)
notebook.set_current_page(page)
return page
class Admin_Terminal():
def create_terminal(self, cmd_exit, args_show=None):
terminal = vte.Terminal()
terminal.connect("child-exited", cmd_exit)
terminal.connect("show", self.show_callback, args_show)
terminal.fork_command("bash")
terminal.show()
return terminal
def show_callback(self, terminal, args=None):
time.sleep(1)
if args:
terminal.feed_child(str(args))
def load_pref_file(self, session=None):
if session:
from configobj import ConfigObj
if session == "root":
pref_file = "/root/.pyAeJokuaa/plugins/hesapea/pref.conf"
else:
pref_file = "/home/"+session+"/.pyAeJokuaa/plugins/hesapea/pref.conf"
if os.path.isfile(pref_file):
config = ConfigObj(pref_file)
preference = {}
for i in config:
preference[i] = config[i]
return preference
else:
os.system("mkdir -p "+pref_file.replace("pref.conf", ""))
config = ConfigObj()
config.filename = pref_file
config["font"] = "Courier 10 Pitch 9"
config["transparent"] = False
config.write()
return True
else:
return False
def load_preference(self, terminal, session):
dict_pref = self.load_pref_file(session)
for i in dict_pref:
if i == "font":
terminal.set_font_full(pango.FontDescription(dict_pref[i]),
vte.ANTI_ALIAS_FORCE_DISABLE)
if i == "transparent":
if dict_pref[i] == "False":
terminal.set_background_transparent(False)
else:
terminal.set_background_transparent(True)
|
sergiotocalini/pyaejokuaa
|
trunk/plugins/hesapea/controller.py
|
Python
|
gpl-2.0
| 2,649
|
self.description = "Sysupgrade with ignored package prevent other upgrade"
lp1 = pmpkg("glibc", "1.0-1")
lp2 = pmpkg("gcc-libs", "1.0-1")
lp2.depends = ["glibc>=1.0-1"]
lp3 = pmpkg("pcre", "1.0-1")
lp3.depends = ["gcc-libs"]
for p in lp1, lp2, lp3:
self.addpkg2db("local", p)
sp1 = pmpkg("glibc", "1.0-2")
sp2 = pmpkg("gcc-libs", "1.0-2")
sp2.depends = ["glibc>=1.0-2"]
sp3 = pmpkg("pcre", "1.0-2")
sp3.depends = ["gcc-libs"]
for p in sp1, sp2, sp3:
self.addpkg2db("sync", p)
self.args = "-Su --ignore %s --ask=32" % sp1.name
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_VERSION=glibc|1.0-1")
self.addrule("PKG_VERSION=gcc-libs|1.0-1")
self.addrule("PKG_VERSION=pcre|1.0-2")
|
vadmium/pacman-arch
|
test/pacman/tests/sync140.py
|
Python
|
gpl-2.0
| 689
|
import copy
import logging
import os.path
from error import JobBrokenError
from errors import CacheUpstreamError
from infrastructure import infrastructure
from infrastructure_cache import cache
from job import Job, RESTORE_CFG
from job_types import JobTypes
from tasks import NodeStopTask, RsyncBackendTask, MinionCmdTask, HistoryRemoveNodeTask
import storage
logger = logging.getLogger('mm.jobs')
class MoveJob(Job):
# used to write group id
GROUP_FILE_PATH = RESTORE_CFG.get('group_file')
# used to mark source node that content has been moved away from it
GROUP_FILE_DIR_MOVE_DST_RENAME = RESTORE_CFG.get('group_file_dir_move_dst_rename')
MERGE_GROUP_FILE_MARKER_PATH = RESTORE_CFG.get('merge_group_file_marker')
MERGE_GROUP_FILE_DIR_MOVE_SRC_RENAME = RESTORE_CFG.get('merge_group_file_dir_move_src_rename')
PARAMS = ('group', 'uncoupled_group', 'uncoupled_group_fsid', 'merged_groups',
'resources',
'src_host', 'src_port', 'src_backend_id', 'src_family', 'src_base_path',
'dst_host', 'dst_port', 'dst_backend_id', 'dst_family', 'dst_base_path')
def __init__(self, **kwargs):
super(MoveJob, self).__init__(**kwargs)
self.type = JobTypes.TYPE_MOVE_JOB
@classmethod
def new(cls, *args, **kwargs):
job = super(MoveJob, cls).new(*args, **kwargs)
try:
unc_group = storage.groups[kwargs['uncoupled_group']]
job.uncoupled_group_fsid = str(unc_group.node_backends[0].fs.fsid)
except Exception:
job.release_locks()
raise
return job
def _set_resources(self):
resources = {
Job.RESOURCE_HOST_IN: [],
Job.RESOURCE_HOST_OUT: [],
Job.RESOURCE_FS: [],
}
resources[Job.RESOURCE_HOST_IN].append(self.dst_host)
resources[Job.RESOURCE_HOST_OUT].append(self.src_host)
for gid in [self.uncoupled_group, self.group] + self.merged_groups:
g = storage.groups[gid]
resources[Job.RESOURCE_FS].append(
(g.node_backends[0].node.host.addr, str(g.node_backends[0].fs.fsid)))
self.resources = resources
@property
def src_node_backend(self):
return self.node_backend(self.src_host, self.src_port, self.src_backend_id)
@property
def dst_node_backend(self):
return self.node_backend(self.dst_host, self.dst_port, self.dst_backend_id)
def on_start(self):
group = storage.groups[self.group]
self.check_node_backends(group)
if group.couple is None:
raise JobBrokenError('Group {} is uncoupled, cannot be moved'.format(
group.group_id))
if storage.FORBIDDEN_DC_SHARING_AMONG_GROUPS:
uncoupled_group = storage.groups[self.uncoupled_group]
self.check_node_backends(uncoupled_group)
try:
ug_dc = uncoupled_group.node_backends[0].node.host.dc
except CacheUpstreamError:
raise RuntimeError('Failed to get dc for host {}'.format(
uncoupled_group.node_backends[0].node.host))
for g in group.couple:
if g.group_id == group.group_id:
continue
dcs = set()
for nb in g.node_backends:
try:
dcs.add(nb.node.host.dc)
except CacheUpstreamError:
raise RuntimeError('Failed to get dc for host {}'.format(
nb.node.host))
if ug_dc in dcs:
raise JobBrokenError(
'Cannot move group {0} to uncoupled group '
'{1}, because group {2} is already in dc {3}'.format(
self.group, self.uncoupled_group, g.group_id, ug_dc))
src_backend = group.node_backends[0]
if src_backend.status != storage.Status.OK:
raise JobBrokenError('Group {0} node backend {1} status is {2}, should be {3}'.format(
group.group_id, src_backend, src_backend.status, storage.Status.OK))
def human_dump(self):
data = super(MoveJob, self).human_dump()
data['src_hostname'] = cache.get_hostname_by_addr(data['src_host'], strict=False)
data['dst_hostname'] = cache.get_hostname_by_addr(data['dst_host'], strict=False)
return data
def marker_format(self, marker):
hostnames = []
for host in (self.src_host, self.dst_host):
try:
hostnames.append(cache.get_hostname_by_addr(host))
except CacheUpstreamError:
raise RuntimeError('Failed to resolve host {0}'.format(host))
src_hostname, dst_hostname = hostnames
return marker.format(
group_id=str(self.group),
src_host=self.src_host,
src_hostname=src_hostname,
src_backend_id=self.src_backend_id,
src_port=str(self.src_port),
src_base_path=self.src_base_path,
dst_host=self.dst_host,
dst_hostname=dst_hostname,
dst_port=str(self.dst_port),
dst_base_path=self.dst_base_path,
dst_backend_id=self.dst_backend_id)
def create_tasks(self):
for group_id in self.merged_groups or []:
merged_group = storage.groups[group_id]
merged_nb = merged_group.node_backends[0]
merged_group_file = (os.path.join(merged_nb.base_path,
self.GROUP_FILE_PATH)
if self.GROUP_FILE_PATH else
'')
merged_path = ''
if self.MERGE_GROUP_FILE_DIR_MOVE_SRC_RENAME and merged_group_file:
merged_path = os.path.join(
merged_nb.base_path, self.MERGE_GROUP_FILE_DIR_MOVE_SRC_RENAME)
node_backend_str = self.node_backend(merged_nb.node.host.addr,
merged_nb.node.port,
merged_nb.backend_id)
merged_group_file_marker = (os.path.join(merged_nb.base_path,
self.MERGE_GROUP_FILE_MARKER_PATH)
if self.MERGE_GROUP_FILE_MARKER_PATH else
'')
shutdown_cmd = infrastructure._disable_node_backend_cmd(
merged_nb.node.host.addr,
merged_nb.node.port,
merged_nb.node.family,
merged_nb.backend_id)
params = {'node_backend': node_backend_str.encode('utf-8'),
'group': str(group_id),
'merged_to': str(self.uncoupled_group),
'remove_group_file': merged_group_file}
if merged_group_file_marker:
params['group_file_marker'] = merged_group_file_marker.format(
dst_group_id=self.uncoupled_group,
dst_backend_id=merged_nb.backend_id)
if merged_path:
params['move_src'] = os.path.dirname(merged_group_file)
params['move_dst'] = merged_path
task = NodeStopTask.new(self,
group=group_id,
uncoupled=True,
host=merged_nb.node.host.addr,
cmd=shutdown_cmd,
params=params)
self.tasks.append(task)
reconfigure_cmd = infrastructure._reconfigure_node_cmd(
merged_nb.node.host.addr,
merged_nb.node.port,
merged_nb.node.family)
task = MinionCmdTask.new(self,
host=merged_nb.node.host.addr,
cmd=reconfigure_cmd,
params={'node_backend': node_backend_str.encode('utf-8')})
self.tasks.append(task)
task = HistoryRemoveNodeTask.new(
self,
group=group_id,
host=merged_nb.node.host.addr,
port=merged_nb.node.port,
family=merged_nb.node.family,
backend_id=merged_nb.backend_id,
)
self.tasks.append(task)
shutdown_cmd = infrastructure._disable_node_backend_cmd(
self.dst_host, self.dst_port, self.dst_family, self.dst_backend_id)
group_file = (os.path.join(self.dst_base_path,
self.GROUP_FILE_PATH)
if self.GROUP_FILE_PATH else
'')
params = {'node_backend': self.dst_node_backend.encode('utf-8'),
'group': str(self.uncoupled_group),
'success_codes': [self.DNET_CLIENT_ALREADY_IN_PROGRESS]}
remove_path = ''
if self.GROUP_FILE_DIR_MOVE_DST_RENAME and group_file:
params['move_src'] = os.path.join(os.path.dirname(group_file))
remove_path = os.path.join(
self.dst_base_path, self.GROUP_FILE_DIR_MOVE_DST_RENAME)
params['move_dst'] = remove_path
task = NodeStopTask.new(self,
group=self.uncoupled_group,
uncoupled=True,
host=self.dst_host,
cmd=shutdown_cmd,
params=params)
self.tasks.append(task)
make_readonly_cmd = infrastructure._make_readonly_node_backend_cmd(
self.src_host, self.src_port, self.src_family, self.src_backend_id)
mark_backend = self.make_path(
self.BACKEND_DOWN_MARKER, base_path=self.src_base_path).format(
backend_id=self.src_backend_id)
task = MinionCmdTask.new(self,
host=self.src_host,
cmd=make_readonly_cmd,
params={'node_backend': self.src_node_backend.encode('utf-8'),
'mark_backend': mark_backend,
'success_codes': [self.DNET_CLIENT_ALREADY_IN_PROGRESS]})
self.tasks.append(task)
move_cmd = infrastructure.move_group_cmd(
src_host=self.src_host,
src_path=self.src_base_path,
src_family=self.src_family,
dst_path=self.dst_base_path)
group_file = (os.path.join(self.dst_base_path, self.GROUP_FILE_PATH)
if self.GROUP_FILE_PATH else
'')
ids_file = (os.path.join(self.dst_base_path, self.IDS_FILE_PATH)
if self.IDS_FILE_PATH else
'')
params = {'group': str(self.group),
'group_file': group_file,
'ids': ids_file}
if remove_path:
params['remove_path'] = remove_path
task = RsyncBackendTask.new(self,
host=self.dst_host,
src_host=self.src_host,
group=self.group,
cmd=move_cmd,
params=params)
self.tasks.append(task)
additional_files = RESTORE_CFG.get('move_additional_files', [])
for src_file_tpl, dst_file_path in additional_files:
rsync_cmd = infrastructure.move_group_cmd(
src_host=self.src_host,
src_path=self.src_base_path,
src_family=self.src_family,
dst_path=os.path.join(self.dst_base_path, dst_file_path),
file_tpl=src_file_tpl)
params = {'group': str(self.group)}
task = MinionCmdTask.new(self,
host=self.dst_host,
group=self.group,
cmd=rsync_cmd,
params=params)
self.tasks.append(task)
shutdown_cmd = infrastructure._disable_node_backend_cmd(
self.src_host, self.src_port, self.src_family, self.src_backend_id)
group_file = (os.path.join(self.src_base_path,
self.GROUP_FILE_PATH)
if self.GROUP_FILE_PATH else
'')
group_file_marker = (os.path.join(self.src_base_path,
self.GROUP_FILE_MARKER_PATH)
if self.GROUP_FILE_MARKER_PATH else
'')
params = {
'node_backend': self.src_node_backend.encode('utf-8'),
'group': str(self.group),
'group_file_marker': self.marker_format(group_file_marker),
'remove_group_file': group_file,
'success_codes': [self.DNET_CLIENT_ALREADY_IN_PROGRESS],
'unmark_backend': mark_backend,
}
if self.GROUP_FILE_DIR_MOVE_SRC_RENAME and group_file:
params['move_src'] = os.path.join(os.path.dirname(group_file))
params['move_dst'] = os.path.join(
self.src_base_path, self.GROUP_FILE_DIR_MOVE_SRC_RENAME)
task = NodeStopTask.new(self,
group=self.group,
host=self.src_host,
cmd=shutdown_cmd,
params=params)
self.tasks.append(task)
reconfigure_cmd = infrastructure._reconfigure_node_cmd(
self.src_host, self.src_port, self.src_family)
task = MinionCmdTask.new(self,
host=self.src_host,
cmd=reconfigure_cmd,
params={'node_backend': self.src_node_backend.encode('utf-8')})
self.tasks.append(task)
reconfigure_cmd = infrastructure._reconfigure_node_cmd(
self.dst_host, self.dst_port, self.dst_family)
task = MinionCmdTask.new(self,
host=self.dst_host,
cmd=reconfigure_cmd,
params={'node_backend': self.dst_node_backend.encode('utf-8')})
self.tasks.append(task)
task = HistoryRemoveNodeTask.new(self,
group=self.group,
host=self.src_host,
port=self.src_port,
family=self.src_family,
backend_id=self.src_backend_id)
self.tasks.append(task)
task = HistoryRemoveNodeTask.new(
self,
group=self.uncoupled_group,
host=self.dst_host,
port=self.dst_port,
family=self.dst_family,
backend_id=self.dst_backend_id,
)
self.tasks.append(task)
start_cmd = infrastructure._enable_node_backend_cmd(
self.dst_host, self.dst_port, self.dst_family, self.dst_backend_id)
task = MinionCmdTask.new(self,
host=self.dst_host,
cmd=start_cmd,
params={'node_backend': self.dst_node_backend.encode('utf-8')})
self.tasks.append(task)
@property
def _involved_groups(self):
group_ids = set([self.group])
if self.group in storage.groups:
group = storage.groups[self.group]
if group.couple:
group_ids.update(g.group_id for g in group.coupled_groups)
else:
group_ids.add(self.group)
group_ids.add(self.uncoupled_group)
if self.merged_groups:
group_ids.update(self.merged_groups)
return group_ids
@property
def _involved_couples(self):
couples = []
group = storage.groups[self.group]
if group.couple:
couples.append(str(group.couple))
return couples
@property
def involved_uncoupled_groups(self):
groups = [self.uncoupled_group]
if self.merged_groups:
groups.extend(self.merged_groups)
return groups
def _group_marks(self):
group = storage.groups[self.group]
updated_meta = copy.deepcopy(group.meta)
# valid convertion from tuple to dict
# TODO: move this to storage.Group.meta
updated_meta['version'] = 2
updated_meta['service'] = {
'status': storage.Status.MIGRATING,
'job_id': self.id
}
yield group.group_id, updated_meta
def _group_unmarks(self):
if self.group not in storage.groups:
raise RuntimeError('Group {0} is not found'.format(self.group))
group = storage.groups[self.group]
if not group.meta:
raise StopIteration
updated_meta = copy.deepcopy(group.meta)
updated_meta.pop('service', None)
yield group.group_id, updated_meta
|
yandex/mastermind
|
src/cocaine-app/jobs/move.py
|
Python
|
gpl-2.0
| 17,327
|
# -*- coding: iso-8859-1 -*-
#############################################################################################
# Name: unittest_Grib.py
# Author: Jun Hu
# Date: 2012-04-30
# Description: test cases for Grib class.
#############################################################################################
import sys,unittest
sys.path.insert(1, '../sundew/lib/')
from Grib import Grib
class unittest_Grib(unittest.TestCase):
def setUp(self):
stringBulletin = "TEST_GRIB_TEST"
self.grib = Grib(stringBulletin)
def test_Grib(self):
self.assertEqual(self.grib.begin, 5)
self.assertEqual(self.grib.last, -1)
self.assertEqual(self.grib.validate(), False)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(unittest_Grib))
return suite
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(unittest_Grib)
unittest.TextTestRunner(verbosity=2).run(suite)
|
khosrow/metpx
|
sundew/unittests/unittest_Grib.py
|
Python
|
gpl-2.0
| 978
|
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2007 Lukáš Lalinský
# Copyright (C) 2009 Carlin Mangar
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Asynchronous XML web service.
"""
import os
import sys
import re
import traceback
import time
from collections import deque, defaultdict
from PyQt4 import QtCore, QtNetwork, QtXml
from picard import version_string
from picard.util import partial
from picard.const import PUID_SUBMIT_HOST, PUID_SUBMIT_PORT
REQUEST_DELAY = defaultdict(lambda: 1000)
USER_AGENT_STRING = 'MusicBrainz%%20Picard-%s' % version_string
def _escape_lucene_query(text):
return re.sub(r'([+\-&|!(){}\[\]\^"~*?:\\])', r'\\\1', text)
def _wrap_xml_metadata(data):
return ('<?xml version="1.0" encoding="UTF-8"?>' +
'<metadata xmlns="http://musicbrainz.org/ns/mmd-2.0#">%s</metadata>' % data)
class XmlNode(object):
def __init__(self):
self.text = u''
self.children = {}
self.attribs = {}
def __repr__(self):
return repr(self.__dict__)
def __getattr__(self, name):
try:
return self.children[name]
except KeyError:
try:
return self.attribs[name]
except KeyError:
raise AttributeError, name
class XmlHandler(QtXml.QXmlDefaultHandler):
def init(self):
self.document = XmlNode()
self.node = self.document
_node_name_re = re.compile('[^a-zA-Z0-9]')
self._node_name = lambda n: _node_name_re.sub('_', unicode(n))
self.path = []
def startElement(self, namespace, name, qname, attrs):
node = XmlNode()
for i in xrange(attrs.count()):
node.attribs[self._node_name(attrs.localName(i))] = unicode(attrs.value(i))
self.node.children.setdefault(self._node_name(name), []).append(node)
self.path.append(self.node)
self.node = node
return True
def endElement(self, namespace, name, qname):
self.node = self.path.pop()
return True
def characters(self, text):
self.node.text += unicode(text)
return True
class XmlWebService(QtCore.QObject):
"""
Signals:
- authentication_required
"""
def __init__(self, parent=None):
QtCore.QObject.__init__(self, parent)
self.manager = QtNetwork.QNetworkAccessManager()
self.setup_proxy()
self.manager.connect(self.manager, QtCore.SIGNAL("finished(QNetworkReply *)"), self._process_reply)
self.manager.connect(self.manager, QtCore.SIGNAL("authenticationRequired(QNetworkReply *, QAuthenticator *)"), self._site_authenticate)
self.manager.connect(self.manager, QtCore.SIGNAL("proxyAuthenticationRequired(QNetworkProxy *, QAuthenticator *)"), self._proxy_authenticate)
self._last_request_times = {}
self._active_requests = {}
self._high_priority_queues = {}
self._low_priority_queues = {}
self._hosts = []
self._timer = QtCore.QTimer(self)
self._timer.setSingleShot(True)
self._timer.timeout.connect(self._run_next_task)
self._request_methods = {
"GET": self.manager.get,
"POST": self.manager.post,
"PUT": self.manager.put,
"DELETE": self.manager.deleteResource
}
def setup_proxy(self):
self.proxy = QtNetwork.QNetworkProxy()
if self.config.setting["use_proxy"]:
self.proxy.setType(QtNetwork.QNetworkProxy.HttpProxy)
self.proxy.setHostName(self.config.setting["proxy_server_host"])
self.proxy.setPort(self.config.setting["proxy_server_port"])
self.proxy.setUser(self.config.setting["proxy_username"])
self.proxy.setPassword(self.config.setting["proxy_password"])
self.manager.setProxy(self.proxy)
def _start_request(self, method, host, port, path, data, handler, xml, mblogin=False):
self.log.debug("%s http://%s:%d%s", method, host, port, path)
url = QtCore.QUrl.fromEncoded("http://%s:%d%s" % (host, port, path))
if mblogin:
url.setUserName(self.config.setting["username"])
url.setPassword(self.config.setting["password"])
request = QtNetwork.QNetworkRequest(url)
request.setRawHeader("User-Agent", "MusicBrainz-Picard/%s" % version_string)
if method == "POST" and host == self.config.setting["server_host"]:
request.setHeader(QtNetwork.QNetworkRequest.ContentTypeHeader, "application/xml; charset=utf-8")
send = self._request_methods[method]
reply = send(request, data) if data is not None else send(request)
key = (host, port)
self._last_request_times[key] = time.time()
self._active_requests[reply] = (request, handler, xml)
return True
def _process_reply(self, reply):
try:
request, handler, xml = self._active_requests.pop(reply)
except KeyError:
self.log.error("Error: Request not found for %s" % str(reply.request().url().toString()))
return
error = int(reply.error())
if handler is not None:
if error:
#print "ERROR", reply.error(), reply.errorString()
#for name in reply.rawHeaderList():
# print name, reply.rawHeader(name)
self.log.debug("HTTP Error: %d", error)
if xml:
xml_handler = XmlHandler()
xml_handler.init()
xml_reader = QtXml.QXmlSimpleReader()
xml_reader.setContentHandler(xml_handler)
xml_input = QtXml.QXmlInputSource(reply)
xml_reader.parse(xml_input)
handler(xml_handler.document, reply, error)
else:
handler(str(reply.readAll()), reply, error)
reply.close()
def get(self, host, port, path, handler, xml=True, priority=False, important=False, mblogin=False):
func = partial(self._start_request, "GET", host, port, path, None, handler, xml, mblogin)
return self.add_task(func, host, port, priority, important=important)
def post(self, host, port, path, data, handler, xml=True, priority=True, important=True, mblogin=True):
self.log.debug("POST-DATA %r", data)
func = partial(self._start_request, "POST", host, port, path, data, handler, xml, mblogin)
return self.add_task(func, host, port, priority, important=important)
def put(self, host, port, path, data, handler, priority=True, important=True, mblogin=True):
func = partial(self._start_request, "PUT", host, port, path, data, handler, False, mblogin)
return self.add_task(func, host, port, priority, important=important)
def delete(self, host, port, path, handler, priority=True, important=True, mblogin=True):
func = partial(self._start_request, "DELETE", host, port, path, None, handler, False, mblogin)
return self.add_task(func, host, port, priority, important=important)
def _site_authenticate(self, reply, authenticator):
self.emit(QtCore.SIGNAL("authentication_required"), reply, authenticator)
def _proxy_authenticate(self, proxy, authenticator):
self.emit(QtCore.SIGNAL("proxyAuthentication_required"), proxy, authenticator)
def stop(self):
self._high_priority_queues = {}
self._low_priority_queues = {}
for reply in self._active_requests.keys():
reply.abort()
def _run_next_task(self):
delay = sys.maxint
for key in self._hosts:
queue = self._high_priority_queues.get(key) or self._low_priority_queues.get(key)
if not queue:
continue
now = time.time()
last = self._last_request_times.get(key)
request_delay = REQUEST_DELAY[key]
last_ms = (now - last) * 1000 if last is not None else request_delay
if last_ms >= request_delay:
self.log.debug("Last request to %s was %d ms ago, starting another one", key, last_ms)
d = request_delay
queue.popleft()()
else:
d = request_delay - last_ms
self.log.debug("Waiting %d ms before starting another request to %s", d, key)
if d < delay:
delay = d
if delay < sys.maxint:
self._timer.start(delay)
def add_task(self, func, host, port, priority, important=False):
key = (host, port)
if key not in self._hosts:
self._hosts.append(key)
if priority:
queues = self._high_priority_queues
else:
queues = self._low_priority_queues
queues.setdefault(key, deque())
if important:
queues[key].appendleft(func)
else:
queues[key].append(func)
if not self._timer.isActive():
self._timer.start(0)
return (key, func, priority)
def remove_task(self, task):
key, func, priority = task
if priority:
queue = self._high_priority_queues[key]
else:
queue = self._low_priority_queues[key]
try:
queue.remove(func)
except:
pass
def _get_by_id(self, entitytype, entityid, handler, inc=[], params=[], priority=False, important=False, mblogin=False):
host = self.config.setting["server_host"]
port = self.config.setting["server_port"]
path = "/ws/2/%s/%s?inc=%s" % (entitytype, entityid, "+".join(inc))
if params: path += "&" + "&".join(params)
return self.get(host, port, path, handler, priority=priority, important=important, mblogin=mblogin)
def get_release_by_id(self, releaseid, handler, inc=[], priority=True, important=False, mblogin=False):
return self._get_by_id('release', releaseid, handler, inc, priority=priority, important=important, mblogin=mblogin)
def get_track_by_id(self, trackid, handler, inc=[], priority=True, important=False, mblogin=False):
return self._get_by_id('recording', trackid, handler, inc, priority=priority, important=important, mblogin=mblogin)
def lookup_puid(self, puid, handler, priority=False, important=False):
inc = ['releases', 'release-groups', 'media', 'artist-credits']
return self._get_by_id('puid', puid, handler, inc, priority=False, important=False)
def lookup_discid(self, discid, handler, priority=True, important=True):
inc = ['artist-credits', 'labels']
return self._get_by_id('discid', discid, handler, inc, params=["cdstubs=no"], priority=priority, important=important)
def _find(self, entitytype, handler, kwargs):
host = self.config.setting["server_host"]
port = self.config.setting["server_port"]
filters = []
query = []
for name, value in kwargs.items():
if name == 'limit':
filters.append((name, value))
else:
value = _escape_lucene_query(value).strip().lower()
if value: query.append('%s:(%s)' % (name, value))
if query: filters.append(('query', ' '.join(query)))
params = []
for name, value in filters:
value = str(QtCore.QUrl.toPercentEncoding(QtCore.QString(value)))
params.append('%s=%s' % (str(name), value))
path = "/ws/2/%s/?%s" % (entitytype, "&".join(params))
return self.get(host, port, path, handler)
def find_releases(self, handler, **kwargs):
return self._find('release', handler, kwargs)
def find_tracks(self, handler, **kwargs):
return self._find('recording', handler, kwargs)
def _browse(self, entitytype, handler, kwargs, inc=[], priority=False, important=False):
host = self.config.setting["server_host"]
port = self.config.setting["server_port"]
params = "&".join(["%s=%s" % (k, v) for k, v in kwargs.items()])
path = "/ws/2/%s?%s&inc=%s" % (entitytype, params, "+".join(inc))
return self.get(host, port, path, handler, priority=priority, important=important)
def browse_releases(self, handler, priority=True, important=True, **kwargs):
inc = ["media", "labels"]
return self._browse("release", handler, kwargs, inc, priority=priority, important=important)
def submit_puids(self, puids, handler):
path = '/ws/2/recording/?client=' + USER_AGENT_STRING
recordings = ''.join(['<recording id="%s"><puid-list><puid id="%s"/></puid-list></recording>' % i for i in puids.items()])
data = _wrap_xml_metadata('<recording-list>%s</recording-list>' % recordings)
return self.post(PUID_SUBMIT_HOST, PUID_SUBMIT_PORT, path, data, handler)
def submit_ratings(self, ratings, handler):
host = self.config.setting['server_host']
port = self.config.setting['server_port']
path = '/ws/2/rating/?client=' + USER_AGENT_STRING
recordings = (''.join(['<recording id="%s"><user-rating>%s</user-rating></recording>' %
(i[1], j*20) for i, j in ratings.items() if i[0] == 'recording']))
data = _wrap_xml_metadata('<recording-list>%s</recording-list>' % recordings)
return self.post(host, port, path, data, handler)
def query_musicdns(self, handler, **kwargs):
host, port = 'ofa.musicdns.org', 80
filters = []
for name, value in kwargs.items():
value = str(QtCore.QUrl.toPercentEncoding(value))
filters.append('%s=%s' % (str(name), value))
return self.post(host, port, '/ofa/1/track/', '&'.join(filters), handler, mblogin=False)
def download(self, host, port, path, handler, priority=False, important=False):
return self.get(host, port, path, handler, xml=False, priority=priority, important=important)
|
lalinsky/picard-debian
|
picard/webservice.py
|
Python
|
gpl-2.0
| 14,573
|
# -*- coding: utf-8 -*-
##############################################################################
# 2011 E2OpenPlugins #
# #
# This file is open source software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation. #
# #
##############################################################################
from Tools.Directories import fileExists
from Components.config import config
from models.services import getCurrentService, getBouquets, getChannels, getSatellites, getProviders, getEventDesc, getChannelEpg, getSearchEpg, getCurrentFullInfo, getMultiEpg, getEvent
from models.info import getInfo, getPublicPath, getOpenWebifVer, getTranscodingSupport, getLanguage
from models.movies import getMovieList
from models.timers import getTimers
from models.config import getConfigs, getConfigsSections, getZapStream, getShowChPicon
from base import BaseController
from time import mktime, localtime
from models.locations import getLocations
try:
from boxbranding import getBoxType, getMachineName, getMachineBrand, getMachineBuild
except:
from models.owibranding import getBoxType, getMachineName, getMachineBrand, getMachineBuild
class AjaxController(BaseController):
def __init__(self, session, path = ""):
BaseController.__init__(self, path)
self.session = session
def P_current(self, request):
return getCurrentFullInfo(self.session)
def P_bouquets(self, request):
stype = "tv"
if "stype" in request.args.keys():
stype = request.args["stype"][0]
bouq = getBouquets(stype)
return { "bouquets": bouq['bouquets'], "stype": stype }
def P_providers(self, request):
stype = "tv"
if "stype" in request.args.keys():
stype = request.args["stype"][0]
prov = getProviders(stype)
return { "providers": prov['providers'], "stype": stype }
def P_satellites(self, request):
stype = "tv"
if "stype" in request.args.keys():
stype = request.args["stype"][0]
sat = getSatellites(stype)
return { "satellites": sat['satellites'], "stype": stype }
def P_channels(self, request):
stype = "tv"
idbouquet = "ALL"
if "stype" in request.args.keys():
stype = request.args["stype"][0]
if "id" in request.args.keys():
idbouquet = request.args["id"][0]
channels = getChannels(idbouquet, stype)
channels['transcoding'] = getTranscodingSupport()
channels['type'] = stype
channels['showchannelpicon'] = getShowChPicon()['showchannelpicon']
return channels
def P_eventdescription(self, request):
return getEventDesc(request.args["sref"][0], request.args["idev"][0])
def P_event(self, request):
event = getEvent(request.args["sref"][0], request.args["idev"][0])
event['event']['recording_margin_before'] = config.recording.margin_before.value
event['event']['recording_margin_after'] = config.recording.margin_after.value
at = False
try:
from Plugins.Extensions.AutoTimer.AutoTimer import AutoTimer
at = True
except ImportError:
pass
event['at'] = at
event['transcoding'] = getTranscodingSupport()
event['kinopoisk'] = getLanguage()
return event
def P_about(self, request):
info = {}
info["owiver"] = getOpenWebifVer()
return { "info": info }
def P_boxinfo(self, request):
info = getInfo(self.session)
type = getBoxType()
if fileExists(getPublicPath("/images/boxes/"+type+".png")):
info["boximage"] = type+".png"
elif fileExists(getPublicPath("/images/boxes/"+type+".jpg")):
info["boximage"] = type+".jpg"
else:
info["boximage"] = "unknown.png"
return info
def P_epgpop(self, request):
events=[]
timers=[]
if "sref" in request.args.keys():
ev = getChannelEpg(request.args["sref"][0])
events = ev["events"]
elif "sstr" in request.args.keys():
ev = getSearchEpg(request.args["sstr"][0])
events = ev["events"]
at = False
if len(events) > 0:
t = getTimers(self.session)
timers = t["timers"]
try:
from Plugins.Extensions.AutoTimer.AutoTimer import AutoTimer
at = True
except ImportError:
pass
if config.OpenWebif.webcache.theme.value:
theme = config.OpenWebif.webcache.theme.value
else:
theme = 'original'
return { "theme":theme, "events": events , "timers" : timers , "at" : at, "kinopoisk": getLanguage()}
def P_epgdialog(self, request):
return self.P_epgpop(request)
def P_screenshot(self, request):
box = {}
box['brand'] = "dmm"
if getMachineBrand() == 'Vu+':
box['brand'] = "vuplus"
elif getMachineBrand() == 'GigaBlue':
box['brand'] = "gigablue"
elif getMachineBrand() == 'Edision':
box['brand'] = "edision"
elif getMachineBrand() == 'iQon':
box['brand'] = "iqon"
elif getMachineBrand() == 'Technomate':
box['brand'] = "techomate"
elif fileExists("/proc/stb/info/azmodel"):
box['brand'] = "azbox"
return { "box": box }
def P_powerstate(self, request):
return {}
def P_message(self, request):
return {}
def P_movies(self, request):
if "dirname" in request.args.keys():
movies = getMovieList(request.args["dirname"][0])
else:
movies = getMovieList()
movies['transcoding'] = getTranscodingSupport()
sorttype = config.OpenWebif.webcache.moviesort.value
unsort = movies['movies']
if sorttype == 'name':
movies['movies'] = sorted(unsort, key=lambda k: k['eventname'])
elif sorttype == 'named':
movies['movies'] = sorted(unsort, key=lambda k: k['eventname'],reverse=True)
elif sorttype == 'date':
movies['movies'] = sorted(unsort, key=lambda k: k['recordingtime'])
elif sorttype == 'dated':
movies['movies'] = sorted(unsort, key=lambda k: k['recordingtime'],reverse=True)
movies['sort'] = sorttype
return movies
def P_workinprogress(self, request):
return {}
def P_radio(self, request):
return {}
def P_timers(self, request):
return getTimers(self.session)
def P_edittimer(self, request):
return {}
def P_tv(self, request):
return {}
def P_config(self, request):
section = "usage"
if "section" in request.args.keys():
section = request.args["section"][0]
return getConfigs(section)
def P_settings(self, request):
ret = {
"result": True
}
ret['configsections'] = getConfigsSections()['sections']
if config.OpenWebif.webcache.theme.value:
ret['themes'] = config.OpenWebif.webcache.theme.choices
ret['theme'] = config.OpenWebif.webcache.theme.value
else:
ret['themes'] = []
ret['theme'] = 'original'
ret['zapstream'] = getZapStream()['zapstream']
ret['showchannelpicon'] = getShowChPicon()['showchannelpicon']
return ret
def P_multiepg(self, request):
bouq = getBouquets("tv")
if "bref" not in request.args.keys():
bref = bouq['bouquets'][0][0]
else:
bref = request.args["bref"][0]
endtime = 1440
begintime = -1
day = 0
if "day" in request.args.keys():
try:
day = int(request.args["day"][0])
if day > 0:
now = localtime()
begintime = mktime( (now.tm_year, now.tm_mon, now.tm_mday+day, 0, 0, 0, -1, -1, -1) )
except Exception, e:
pass
mode = 1
if config.OpenWebif.webcache.mepgmode.value:
try:
mode = int(config.OpenWebif.webcache.mepgmode.value)
except Exception, e:
pass
epg = getMultiEpg(self, bref, begintime, endtime, mode)
epg['bouquets'] = bouq['bouquets']
epg['bref'] = bref
epg['day'] = day
epg['mode'] = mode
return epg
def P_at(self, request):
ret = {}
ret['hasVPS'] = 0
ret['hasSeriesPlugin'] = 0
try:
from Plugins.Extensions.AutoTimer.AutoTimer import typeMap
ret['types'] = typeMap
except ImportError:
pass
loc = getLocations()
ret['locations'] = loc['locations']
try:
from Plugins.SystemPlugins.vps import Vps
ret['hasVPS'] = 1
except ImportError as ie:
pass
try:
from Plugins.Extensions.SeriesPlugin.plugin import Plugins
ret['hasSeriesPlugin'] = 1
except ImportError as ie:
pass
return ret
def P_bqe(self, request):
return {}
def P_epgr(self, request):
return {}
def P_webtv(self, request):
return {"transcoding" : getTranscodingSupport()}
|
svox1/e2openplugin-OpenWebif
|
plugin/controllers/ajax.py
|
Python
|
gpl-2.0
| 8,291
|
#!/usr/bin/env python
import paydaemon
from paydaemon.paydaemon import PAYDaemon
from litecoinrpc.connection import LitecoinConnection
from modules.fixedpoint import FixedPoint
DEBUG = 1
class LTCDaemon(PAYDaemon):
def __init__(self,pidfile):
PAYDaemon.__init__(self,pidfile,'LTC','BTC',LitecoinConnection)
# config
PAYDaemon.COIN_MIN_FREEPOOL = 10
PAYDaemon.SMULT = FixedPoint('100000000',8)
PAYDaemon.MINAMNT = FixedPoint('0.01',8)
PAYDaemon.FEEPERCT = FixedPoint('0.2',8)
PAYDaemon.TRANSFEE = FixedPoint('10000',8)
if __name__ == "__main__":
daemon = LTCDaemon('/tmp/daemon-ltc.pid')
if DEBUG:
daemon.run()
else:
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart [debug]" % sys.argv[0]
sys.exit(2)
|
CoinEXchange/CoinX
|
ltc_daemon.py
|
Python
|
gpl-2.0
| 1,080
|
# Function to compute Calculate alpha
def SimCalcAlphaBeta(imtemplate="",taylorlist=[],namealpha="",namebeta="",threshold=0.001):
nterms = len(taylorlist);
if(nterms>1):
if(not os.path.exists(namealpha)):
cpcmd = 'cp -r ' + imtemplate + ' ' + namealpha;
os.system(cpcmd);
if(nterms>2):
if(not os.path.exists(namebeta)):
cpcmd = 'cp -r ' + imtemplate + ' ' + namebeta;
os.system(cpcmd);
if(nterms>0):
ia.open(taylorlist[0]);
ptay0 = ia.getchunk();
ia.close();
if(nterms>1):
ia.open(taylorlist[1]);
ptay1 = ia.getchunk();
ia.close();
ia.open(namealpha);
alpha = ia.getchunk();
alpha.fill(0.0);
ia.close();
if(nterms>2):
ia.open(taylorlist[2]);
ptay2 = ia.getchunk();
ia.close();
ia.open(namebeta);
beta = ia.getchunk();
beta.fill(0.0);
ia.close();
# Calc alpha,beta from ptay0,ptay1,ptay2
N = ptay0.shape[0];
if(nterms>1):
for ii in range(0,N):
for jj in range(0,N):
if(ptay0[ii,jj,0,0]>threshold):
mtay0 = ptay0[ii,jj,0,0];
mtay1 = ptay1[ii,jj,0,0];
alphaval = mtay1/mtay0;
alpha[ii,jj,0,0] = alphaval;
if(nterms>2):
mtay2 = ptay2[ii,jj,0,0];
beta[ii,jj,0,0] = (mtay2/mtay0) - 0.5*alphaval*(alphaval-1);
if(ii%100 == 0):
print ii;
if(nterms>1):
ia.open(namealpha);
ia.putchunk(alpha);
ia.close();
if(nterms>2):
ia.open(namebeta);
ia.putchunk(beta);
ia.close();
##############################################################################
msname = 'ptest.ms';
rname = 'try';
niter=100;
scales=[0];
ntaylor=3;
threshold='0.05mJy';
imsize=1024;
cellsize="8.0arcsec";
stokes="I";
reffreq="1.4GHz";
gain=0.5;
mask="";
algo="msmfs";
weighting="briggs";
ftm="ft";
models=[];
restoreds=[];
residuals=[];
masks=[];
im.close();
im.open(msname);
im.selectvis(spw=0);
if(algo=="msmfs"):
for tt in range(0,ntaylor):
models.append(rname+'.'+str(tt)+'.model');
restoreds.append(rname+'.'+str(tt)+'.restored');
residuals.append(rname+'.'+str(tt)+'.residual');
masks.append(mask);
im.defineimage(nx=imsize,ny=imsize,cellx=cellsize,celly=cellsize,nchan=1,stokes=stokes,mode='mfs');
im.make(models[tt]);
else:
models = [rname+'.model'];
restoreds = [rname+'.restored'];
residuals = [rname+'.residual'];
masks = mask;
im.defineimage(nx=imsize,ny=imsize,cellx=cellsize,celly=cellsize,nchan=1,stokes=stokes,mode='mfs');
im.make(models[0]);
im.weight(type=weighting);
im.setscales(scalemethod='uservector',uservector=scales);
im.settaylorterms(ntaylorterms=ntaylor,reffreq=(qa.convert(qa.unit(reffreq),"Hz"))['value']);
im.setoptions(ftmachine=ftm);
if(mask == ""):
print 'clean without mask';
im.clean(model=models,image=restoreds,residual=residuals,algorithm=algo,threshold=threshold,niter=niter,interactive=False,gain=gain);
else:
print 'clean with mask';
im.clean(model=models,image=restoreds,residual=residuals,algorithm=algo,threshold=threshold,niter=niter,interactive=False,gain=gain,mask=masks);
im.done();
###########################################################################
## Calculate alpha and beta
imtemplate=rname+'.0.restored';
taylist=[];
for i in range(0,ntaylor):
taylist.append(rname+'.'+str(i)+'.restored');
SimCalcAlphaBeta(imtemplate=imtemplate,taylorlist=taylist,namealpha=rname+'.'+ftm+'.restored.alpha',namebeta=rname+'.'+ftm+'.restored.beta',threshold=0.02);
###########################################################################
|
ATNF/askapsdp
|
Code/Components/Synthesis/testdata/current/simulation/mfstest/mfsrun.py
|
Python
|
gpl-2.0
| 3,615
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from buildbot.plugins import *
from buildbot.schedulers.basic import AnyBranchScheduler, SingleBranchScheduler
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.plugins import reporters, util
from buildbot.process.properties import Interpolate
from config.settings import branches_list, get_arches
import os
####### SCHEDULERS
# Configure the Schedulers, which decide how to react to incoming changes.
# In this case, just kick off a 'runtests' build
def change_files_json_push(change):
print("Change111: "+str(change.files))
if any("sys-kernel/gentoo-sources" in s for s in change.files):
print("sys-kernel ebuild to test")
return True
else:
return False
def syskernel_change(change):
print("Change111: "+str(change.files))
if any("sys-kernel/" in s for s in change.files):
print("sys-kernel ebuild to test")
return True
else:
return False
def eclass_change(change):
print("Change111: "+str(change.files))
if any("eclass/kernel-2.eclass" in s for s in change.files):
print("sys-kernel ebuild to test")
return True
else:
return False
architecture_testing_list = get_arches()
def builderNames(branch):
builders = set()
for arch in architecture_testing_list:
for toolchain in arch["toolchain"]:
builders.add(branch + ':' + arch["name"] + ':' + toolchain)
return list(builders)
schedulers = []
for branch in branches_list:
schedulers.append(SingleBranchScheduler(
name=branch,
change_filter=util.ChangeFilter(branch=branch),
treeStableTimer=None,
builderNames=builderNames(branch)))
for arch in architecture_testing_list:
for toolchain in arch["toolchain"]:
schedulers.append(ForceScheduler(
name="Force_%s_%s_%s" % (branch.replace(".", "_"), arch["name"], toolchain),
builderNames=["%s:%s:%s" % (branch, arch["name"], toolchain)]
))
# add a changefilter for the pull requests
cf = util.ChangeFilter(category='pull', branch=branch)
# but only those that are targeted for that branch
cf.checks["prop:github.base.ref"] = cf.checks['branch']
del cf.checks['branch']
schedulers.append(SingleBranchScheduler(
name="pull" + branch,
change_filter=cf,
treeStableTimer=None,
builderNames=builderNames(branch)))
stab_cf = util.ChangeFilter(category='pull', branch=branch)
schedulers.append(SingleBranchScheduler(
name="stabilize" + branch,
change_filter=stab_cf,
treeStableTimer=None,
builderNames=builderNames(branch)))
# add a changefilter for the pull requests
gcf = util.ChangeFilter(category='gentoo-git', branch_re=branch+"\..*")
schedulers.append(SingleBranchScheduler(
name="git_pull" + branch,
change_filter=gcf,
treeStableTimer=None,
builderNames=builderNames(branch)))
stab_gcf = util.ChangeFilter(category='gentoo-tags-git',
branch_re="refs/tags/" + branch + "_stabilize")
schedulers.append(SingleBranchScheduler(
name="git_stabilize" + branch,
change_filter=stab_gcf,
treeStableTimer=None,
builderNames=builderNames(branch)))
gpcf = util.ChangeFilter(category='gentoo-pull', filter_fn=change_files_json_push)
schedulers.append(SingleBranchScheduler(
name="gentoo_sources",
change_filter=gpcf,
treeStableTimer=None,
builderNames=["gentoo_sources"]))
schedulers.append(ForceScheduler(
name="force_gentoo_sources",
builderNames=["gentoo_sources"]))
gpcf = util.ChangeFilter(category='gentoo-pull', filter_fn=syskernel_change)
schedulers.append(SingleBranchScheduler(
name="other_sources",
change_filter=gpcf,
treeStableTimer=None,
builderNames=["other_sources"]))
schedulers.append(ForceScheduler(
name="force_other_sources",
builderNames=["other_sources"]))
gpcf = util.ChangeFilter(category='gentoo-pull', filter_fn=eclass_change)
schedulers.append(SingleBranchScheduler(
name="eclass_change",
change_filter=gpcf,
treeStableTimer=None,
builderNames=["eclass_change"]))
schedulers.append(ForceScheduler(
name="force_eclass_change",
builderNames=["eclass_change"]))
|
aliceinwire/Gentoo_kernelCI
|
schedulers.py
|
Python
|
gpl-2.0
| 4,791
|
from django.contrib import admin
from .models import Booking, Hall
# Register your models here.
class BookingAdmin(admin.ModelAdmin):
list_display = ['hall', 'event_name', 'name', 'date', 'start_time', 'no_of_hours', 'email', 'status' ]
class Meta:
model = Booking
admin.site.register(Booking, BookingAdmin)
class HallAdmin(admin.ModelAdmin):
list_display = ['hall', 'seats']
class Meta:
model = Hall
admin.site.register(Hall, HallAdmin)
|
mandeeps708/booking_system
|
src/home/admin.py
|
Python
|
gpl-2.0
| 451
|
from django.contrib import admin
from project.models import *
admin.site.register(Document)
admin.site.register(Club)
admin.site.register(Project)
admin.site.register(Comment)
admin.site.register(Update)
admin.site.register(Task)
|
The-WebOps-Club/project-management-portal
|
project/admin.py
|
Python
|
gpl-2.0
| 240
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('title', models.CharField(max_length=200)),
('date', models.DateField(auto_now_add=True)),
('body', models.TextField()),
],
),
]
|
ncongleton/njcongleton.com
|
blog/migrations/0001_initial.py
|
Python
|
gpl-2.0
| 590
|
# encoding: utf-8
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('oluch', '0008_mark'),
]
operations = [
migrations.DeleteModel(
name='Disqual',
),
migrations.AlterField(
model_name='userprofile',
name='city',
field=models.CharField(max_length=1000, null=True, verbose_name='city', blank=True),
),
migrations.AlterField(
model_name='userprofile',
name='school',
field=models.CharField(max_length=1000, null=True, verbose_name='school', blank=True),
),
migrations.AlterField(
model_name='userprofile',
name='maxgrade',
field=models.IntegerField(default='11', max_length=2, verbose_name='last grade at school'),
),
migrations.AlterField(
model_name='userprofile',
name='show_results',
field=models.BooleanField(default=True, verbose_name='show results'),
),
migrations.AlterField(
model_name='userprofile',
name='grade',
field=models.IntegerField(max_length=2, null=True, verbose_name='grade', blank=True),
),
migrations.AlterField(
model_name='userprofile',
name='country',
field=models.CharField(default='Russia', max_length=1000, verbose_name='country'),
),
]
|
gurovic/oluch2
|
oluch/migrations/0009_auto_20140203_1130.py
|
Python
|
gpl-2.0
| 1,484
|
'''
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys
import urllib2,re,os,base64,xbmc,xbmcplugin,xbmcaddon,xbmcgui,urlparse,urllib
import urlresolver,yt
try:
import json
except:
import simplejson as json
from threading import Thread
Dialog = xbmcgui.Dialog()
Decode = base64.decodestring
CAT=Decode('LnBocA==')
BASE = 'http://www.couchtripper.com/forum2/page.php?page=3'
addon_id='plugin.video.abracadabra'
base_url = sys.argv[0]
addon_handle = int(sys.argv[1])
args = urlparse.parse_qs(sys.argv[2][1:])
PATH = "abracadabra"
VERSION = "0.0.2"
dp = xbmcgui.DialogProgress()
ADDON = xbmcaddon.Addon(id=addon_id)
debug = ADDON.getSetting('debug')
USERDATA_PATH = xbmc.translatePath('special://home/userdata/addon_data')
ADDON_DATA = USERDATA_PATH + '/'+PATH+'/'
favourites = ADDON_DATA + 'favourites'
if not os.path.exists(ADDON_DATA):
os.makedirs(ADDON_DATA)
if os.path.exists(favourites)==True:
FAV = open(favourites).read()
else: FAV = []
Sources = ['daclips','filehoot','allmyvideos','vidspot','vodlocker','vidto']
ADDONS = xbmc.translatePath(os.path.join('special://home','addons',''))
ART = os.path.join(ADDONS,addon_id,'resources','art')+os.sep
FANART = xbmc.translatePath(os.path.join(ADDONS,addon_id,'fanart.jpg'))
IMAGES = ART + 'icon.png'
Main = 'http://www.watchseries.ac'
def Main_Menu():
Menu('[COLORred]****************[/COLOR][COLORyellow] Thanks For Choosing Apprentice Streams[/COLOR] [COLORred]****************[/COLOR]','',8,IMAGES,FANART,'','')
Menu('[COLORskyblue]Where The Magics AT[/COLOR]','http://herovision.x10host.com/abracadabra/magic.php',4,IMAGES,FANART,'','')
Menu('[COLORred]****[/COLOR][COLORblue]Newly Added Magic[/COLOR][COLORred]****[/COLOR]','http://herovision.x10host.com/abracadabra/magicnew.php',4,IMAGES,FANART,'','')
Menu('[COLORorange]Favourites[/COLOR]','',103,IMAGES,FANART,'','')
Menu('[COLORred]Search[/COLOR]','',8,IMAGES,FANART,'','')
Menu('[COLORred]****************[/COLOR][COLORyellow] Follow Me On Twitter @Apprentice_007 For Updates And Feedback[/COLOR] [COLORred]****************[/COLOR]','',8,IMAGES,FANART,'','')
#[COLOR][/COLOR]
def Regex(url):
HTML = OPEN_URL(url)
Regex = re.compile('<a href="(.+?)" target="_blank"><img src="(.+?)" style="max-width:200px;" /><description = "(.+?)" /><background = "(.+?)" </background></a><br><b>(.+?)</b>').findall(HTML)
for url,img,desc,fanart,name in Regex:
if img == '123':
img = IMAGES
if fanart == '123':
fanart = FANART
if 'php' in url:
Menu(name,url,4,img,fanart,desc,'')
elif 'playlist' in url:
Menu(name,url,7,img,fanart,desc,'')
elif 'watchseries' in url:
Menu(name,url,100,img,fanart,desc,'')
elif not 'http' in url:
Play(name,url,9,img,fanart,desc,'')
else:
Play(name,url,5,img,fanart,desc,'')
xbmcplugin.addSortMethod(addon_handle, xbmcplugin.SORT_METHOD_TITLE);
def grab_youtube_playlist(url):
HTML = OPEN_URL(url)
block_set = re.compile('<tr class="pl-video yt-uix-tile(.+?)</tr>',re.DOTALL).findall(HTML)
for block in block_set:
image = re.compile('data-thumb="(.+?)"').findall(str(block))
for image in image:
image = image
name = re.compile('data-title="(.+?)"').findall(str(block))
for name in name:
if 'elete' in name:
pass
elif 'rivate Vid' in name:
pass
else:
name = (name).replace('"','').replace(''','\'').replace('&','&')
duration = re.compile('<div class="timestamp"><span aria-label=".+?">(.+?)</span>').findall(str(block))
for duration in duration:
duration = duration
url = re.compile('data-video-ids="(.+?)"').findall(str(block))
for url in url:
url = url
Play('[COLORred]'+str(duration)+'[/COLOR] : '+str(name),str(url),9,str(image),FANART,'','' )
setView('movies', '')
def Stand_up():
HTML = OPEN_URL(BASE)
Block = re.compile('<tr>.+?<td width=".+?" align=".+?">.+?<img border=".+?" src="..(.+?)" width=".+?" height=".+?"></td>.+?<td width=".+?" valign=".+?" align=".+?"><font size=".+?">(.+?)</font></td>.+?<td width=".+?">(.+?)</td>',re.DOTALL).findall(HTML)
for img, comic, c in Block:
find_URL = re.compile('<a href="(.+?)">(.+?)</a>',re.DOTALL).findall(c)
for url, name in find_URL:
if 'tube' in url:
pass
elif 'stage' in url:
Play(comic + ' - ' + name,(url).replace('" target="_blank',''),3,'http://couchtripper.com/'+img,FANART,'','')
elif 'vee' in url:
pass
xbmcplugin.addSortMethod(addon_handle, xbmcplugin.SORT_METHOD_TITLE);
def Youtube_StandUP_grab():
pass
###########################Watch series Grab##########################################
def Grab_Season(iconimage,url,extra):
image = ' '
description = ' '
fanart = ' '
season = ' '
OPEN = OPEN_URL(url)
image = re.compile('<img src="(.+?)">').findall(OPEN)
for image in image:
image = image
background = re.compile('style="background-image: url\((.+?)\)">').findall(OPEN)
for fanart in background:
fanart = fanart
match = re.compile('itemprop="season".+?href=".+?" href="(.+?)".+?aria-hidden=".+?"></i>.+?S(.+?)</span>',re.DOTALL).findall(OPEN)
for url,season in match:
season = 'S'+(season).replace(' ','').replace('\n','').replace(' ','').replace(' ','')
url = Main + url
Menu((season).replace(' ',''),url,101,image,fanart,description,'')
setView('Movies', 'info')
def Grab_Episode(url,name,fanart,extra,iconimage):
main_name = extra
season = name
OPEN = OPEN_URL(url)
image = iconimage
match = re.compile('<li itemprop="episode".+?<meta itemprop="url" content="(.+?)">.+?<span class="" itemprop="name">(.+?)</span>.+?<span itemprop="datepublished">(.+?)</span></span>.+?</li>',re.DOTALL).findall(OPEN)
for url,name,date in match:
name = (name).replace(' ','-').replace('---',' - ').replace(''','\'').replace('&','&').replace('"','"')
url = Main+url
date = date
full_name = name+' - [COLORred]'+date+'[/COLOR]'
Menu(full_name,url,102,image,fanart,'Aired : '+date,full_name)
def Get_Sources(name,URL,iconimage,fanart):
HTML = OPEN_URL(URL)
match = re.compile('<td>.+?<a href="/link/(.+?)".+?height="16px">(.+?)\n',re.DOTALL).findall(HTML)
for url,name in match:
for item in Sources:
if item in url:
URL = 'http://www.watchseries.ac/link/' + url
Play(name,URL,106,IMAGES,FANART,'','')
if len(match)<=0:
Menu('[COLORred]NO STREAMS AVAILABLE[/COLOR]','','','','','','')
def Get_site_link(url,name):
season_name = name
HTML = OPEN_URL(url)
match = re.compile('<iframe style=.+?" src="(.+?)"').findall(HTML)
match2 = re.compile('<IFRAME SRC="(.+?)"').findall(HTML)
match3 = re.compile('<IFRAME style=".+?" SRC="(.+?)"').findall(HTML)
for url in match:
main(url,season_name)
for url in match2:
main(url,season_name)
for url in match3:
main(url,season_name)
def main(url,season_name):
if 'daclips.in' in url:
daclips(url,season_name)
elif 'filehoot.com' in url:
filehoot(url,season_name)
elif 'allmyvideos.net' in url:
allmyvid(url,season_name)
elif 'vidspot.net' in url:
vidspot(url,season_name)
elif 'vodlocker' in url:
vodlocker(url,season_name)
elif 'vidto' in url:
vidto(url,season_name)
def vidto(url,season_name):
HTML = OPEN_URL(url)
match = re.compile('"file" : "(.+?)",\n.+?"default" : .+?,\n.+?"label" : "(.+?)"',re.DOTALL).findall(HTML)
for Link,name in match:
Printer(Link,season_name)
def allmyvid(url,season_name):
HTML = OPEN_URL(url)
match = re.compile('"file" : "(.+?)",\n.+?"default" : .+?,\n.+?"label" : "(.+?)"',re.DOTALL).findall(HTML)
for Link,name in match:
Printer(Link,season_name)
def vidspot(url,season_name):
HTML = OPEN_URL(url)
match = re.compile('"file" : "(.+?)",\n.+?"default" : .+?,\n.+?"label" : "(.+?)"').findall(HTML)
for Link,name in match:
Printer(Link,season_name)
def vodlocker(url,season_name):
HTML = OPEN_URL(url)
match = re.compile('file: "(.+?)",.+?skin',re.DOTALL).findall(HTML)
for Link in match:
Printer(Link,season_name)
def daclips(url,season_name):
HTML = OPEN_URL(url)
match = re.compile('{ file: "(.+?)", type:"video" }').findall(HTML)
for Link in match:
Printer(Link,season_name)
def filehoot(url,season_name):
HTML = OPEN_URL(url)
match = re.compile('file: "(.+?)",.+?skin',re.DOTALL).findall(HTML)
for Link in match:
Printer(Link,season_name)
def Printer(Link,season_name):
if 'http:/' in Link:
Resolve(Link)
###########################################Watch series end###########################################
#############################search#################################################
def Search():
filename = ['magic','magicnew']
Search_Name = Dialog.input('[COLORred]abracadabra[/COLOR]', type=xbmcgui.INPUT_ALPHANUM)
Search_Title = Search_Name.lower()
for file_name in filename:
Search_Url = 'http://herovision.x10host.com/abracadabra/'+file_name+'.php'
HTML = OPEN_URL(Search_Url)
match = re.compile('<NAME="(.+?)"<URL="(.+?)"<MODE="(.+?)"<IMAGE="(.+?)"<FANART="(.+?)"<DESC="(.+?)"').findall(HTML)
for name,url,mode,image,fanart,desc in match:
if Search_Title in name.lower():
if image == 'IMAGES':
image = IMAGES
if fanart == 'FANART':
fanart = FANART
if '.php' in url:
Menu(name,url,4,image,fanart,desc,'')
if mode == 'single':
Play(name,url,9,image,fanart,desc,'')
elif mode == 'playlist':
Menu(name,url,7,image,fanart,desc,'')
elif mode == 'watchseries':
Menu(name,url,100,image,fanart,desc,name)
elif mode == 'normal':
Play(name,url,5,image,fanart,desc,'')
xbmcplugin.addSortMethod(addon_handle, xbmcplugin.SORT_METHOD_TITLE);
#################################search end##################################################
def Play_Stage(url):
HTML = OPEN_URL(url)
playlink = re.compile("url\[.+?\] = '(.+?)';").findall(HTML)
for url in playlink:
Resolve((url).replace('[','').replace(']','').replace('\'',''))
def Menu(name,url,mode,iconimage,fanart,description,extra,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)+"&fanart="+urllib.quote_plus(fanart)+"&description="+urllib.quote_plus(description)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": description } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from abracadabra Favorites','XBMC.RunPlugin(%s?mode=105&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to abracadabra Favorites','XBMC.RunPlugin(%s?mode=104&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), urllib.quote_plus(fanart), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def Play(name,url,mode,iconimage,fanart,description,extra,showcontext=True,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)+"&fanart="+urllib.quote_plus(fanart)+"&description="+urllib.quote_plus(description)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": description } )
liz.setProperty( "Fanart_Image", fanart )
if showcontext:
contextMenu = []
if showcontext == 'fav':
contextMenu.append(('Remove from abracadabra Favorites','XBMC.RunPlugin(%s?mode=105&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if not name in FAV:
contextMenu.append(('Add to abracadabra Favorites','XBMC.RunPlugin(%s?mode=104&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), urllib.quote_plus(fanart), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
return ok
def RESOLVE(url):
play=xbmc.Player(GetPlayerCore())
import urlresolver
url = (url).strip()
try: play.play(url).strip()
except: pass
def GetPlayerCore():
try:
PlayerMethod=getSet("core-player")
if (PlayerMethod=='DVDPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_DVDPLAYER
elif (PlayerMethod=='MPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_MPLAYER
elif (PlayerMethod=='PAPLAYER'): PlayerMeth=xbmc.PLAYER_CORE_PAPLAYER
else: PlayerMeth=xbmc.PLAYER_CORE_AUTO
except: PlayerMeth=xbmc.PLAYER_CORE_AUTO
return PlayerMeth
return True
#===============================Favourites-----------Not sure whos code this is but credit due to them-------------------------------
def addon_log(string):
if debug == 'true':
xbmc.log("[addon.live.GenieTV-%s]: %s" %(addon_version, string))
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favourites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favourites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favourites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favourites, "w")
b.write(json.dumps(data))
b.close()
def getFavorites():
if os.path.exists(favourites)==False:
favList = []
addon_log('Making Favorites File')
favList.append(('abracadabra Favourites Section','','','','','',''))
a = open(favourites, "w")
a.write(json.dumps(favList))
a.close()
else:
items = json.loads(open(favourites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if ADDON.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
Menu(name,url,'',iconimage,fanart,'','','fav')
else:
Menu(name,url,i[4],iconimage,fanart,'','','fav')
def rmFavorite(name):
data = json.loads(open(favourites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favourites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
############################## FAVOURITES END ###############################
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
iconimage=None
mode=None
description=None
extra=None
fav_mode=None
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
extra=urllib.unquote_plus(params["extra"])
except:
pass
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
description=urllib.unquote_plus(params["description"])
except:
pass
def Resolve(url):
play=xbmc.Player()
import urlresolver
try: play.play(url)
except: pass
def OPEN_URL(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = ''
link = ''
try:
response = urllib2.urlopen(req)
link=response.read()
response.close()
except: pass
if link != '':
return link
else:
link = 'Opened'
return link
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
if mode == None : Main_Menu()
elif mode == 1 : Stand_up()
elif mode == 2 : Search()
elif mode == 3 : Play_Stage(url)
elif mode == 4 : Regex(url)
elif mode == 5 : Resolve(url)
elif mode == 8 : Search()
elif mode == 7 : grab_youtube_playlist(url)
elif mode == 9 : yt.PlayVideo(url)
elif mode == 100 : Grab_Season(iconimage,url,extra)
elif mode == 101 : Grab_Episode(url,name,fanart,extra,iconimage)
elif mode == 102 : Get_Sources(name,url,iconimage,fanart)
elif mode == 106 : Get_site_link(url,name)
elif mode==103:
addon_log("getFavorites")
getFavorites()
elif mode==104:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==105:
addon_log("rmFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
#def Search():
# Search_Name = Dialog.input('Search', type=xbmcgui.INPUT_ALPHANUM)
# Search_Title = Search_Name.lower()
# HTML = OPEN_URL(BASE)
# Block = re.compile('<tr>.+?<td width=".+?" align=".+?">.+?<img border=".+?" src="..(.+?)" width=".+?" height=".+?"></td>.+?<td width=".+?" valign=".+?" align=".+?"><font size=".+?">(.+?)</font></td>.+?<td width=".+?">(.+?)</td>',re.DOTALL).findall(HTML)
# for img, comic, c in Block:
# for Search_Name in comic:
# find_URL = re.compile('<a href="(.+?)">(.+?)</a>',re.DOTALL).findall(c)
# for url, name in find_URL:
# if 'tube' in url:
# pass
# elif 'stage' in url:
# Play(comic + ' - ' + name,(url).replace('" target="_blank',''),3,'http://couchtripper.com/'+img,FANART,'')
# elif 'vee' in url:
# pass
|
dannyperry571/theapprentice
|
plugin.video.abracadabra/default.py
|
Python
|
gpl-2.0
| 21,244
|
#!/usr/bin/env python
# check_snmp_large_storage.py - Check the used / free disk space of a device via SNMP
# (using the HOST-RESOURCES-MIB hrStorageSize).
# Copyright (C) 2016-2019 rsmuc <rsmuc@sec-dev.de>
# This file is part of "Health Monitoring Plugins".
# "Health Monitoring Plugins" is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# "Health Monitoring Plugins" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with "Health Monitoring Plugins". If not, see <https://www.gnu.org/licenses/>.
# Import PluginHelper and some utility constants from the Plugins module
from __future__ import absolute_import, division, print_function
from pynag.Plugins import ok
import health_monitoring_plugins.storage
if __name__ == '__main__':
# pylint: disable=C0103
helper = health_monitoring_plugins.SnmpHelper()
helper.parser.add_option('-p', '--partition',
dest='partition',
help='The disk / partition you want to monitor',
type='str')
helper.parser.add_option('-u', '--unit', dest="targetunit",
help="The unit you want to have (MB, GB, TB)", default="GB")
helper.parser.add_option('-s', '--scan', dest='scan_flag', default=False, action="store_true",
help='Show all available storages')
helper.parse_arguments()
sess = health_monitoring_plugins.SnmpSession(**helper.get_snmp_args())
# The default return value should be always OK
helper.status(ok)
storage = health_monitoring_plugins.storage.Storage(sess)
# if no partition is set, we will do a scan; or if scan_flag is set
if helper.options.partition == "" or \
helper.options.partition is None or \
helper.options.scan_flag:
# Device information
# run_scan()
storage.run_scan(helper, sess)
# the check for the defined partition
else:
storage.check_partition(helper, sess)
helper.check_all_metrics()
# Print out plugin information and exit nagios-style
helper.exit()
|
rsmuc/health_monitoring_plugins
|
health_monitoring_plugins/check_snmp_large_storage/check_snmp_large_storage.py
|
Python
|
gpl-2.0
| 2,573
|
# -*- coding: iso-8859-1 -*-
#
# Copyright (C) 2001 - 2020 Massimo Gerardi all rights reserved.
#
# Author: Massimo Gerardi massimo.gerardi@gmail.com
#
# Copyright (c) 2020 Qsistemi.com. All rights reserved.
#
# Viale Giorgio Ribotta, 11 (Roma)
# 00144 Roma (RM) - Italy
# Phone: (+39) 06.87.163
#
#
# Si veda file COPYING per le condizioni di software.
#
# www.qsistemi.com - italy@qsistemi.com
from reportlab.lib.pagesizes import *
def layout():
return portrait(A4)
def struttura (c):
c.setLineWidth(1)
c.setFont('Helvetica',14)
c.drawString(26.439,779.669,_("Cliente: "))
c.rect(14.5299,800.389,558.02,-30.2045,1,0)
c.rect(14.5299,767.54,558.932,-110.622,1,0)
c.drawString(26.439,718.501,_("Telefono:"))
c.drawString(297.182,718.501,_("Mobile:"))
c.drawString(26.439,745.864,_("Indirizzo:"))
c.drawString(266.17,695.697,_("Rifer.:"))
c.drawString(26.439,696.61,_("Note:"))
def testata (c,row):
c.setFont('Helvetica',18)
c.drawString(100.77,780.497,str(row['RAG_SOC']))
c.drawString(97.518,747.106,str(row['IND_CAP_ZONA_PR']))
c.drawString(26.3724,672.033,str(row['NOTE']))
c.drawString(93.8695,719.329,str(row['TEL_ABIT']))
c.setFont('Helvetica',14)
c.drawString(496.024,779.255,str(row['COD_AGE']))
c.setFont('Helvetica',14)
c.drawString(324.545,695.698,str(row['NSRIF']))
c.setFont('Helvetica',18)
c.drawString(355.558,719.329,str(row['MOBILE']))
def querytestata ():
return '''SELECT (RAG_SOC1||" "||RAG_SOC2) as RAG_SOC, (INDIRIZ||" "||CAP||" "||ZONA||" "||PR ) as IND_CAP_ZONA_PR,
COD_AGE,NSRIF,NOTE,TEL_ABIT,MOBILE
FROM anag WHERE T_CPART = "%s" AND PRECON ="%s"
AND NSRIF LIKE "%s" AND RAG_SOC1 LIKE "%s" AND NOTE LIKE "%s" AND COD_AGE= "%s" ORDER BY RAG_SOC1'''
def Ycorpo ():
return 1
def blocchi():
return [(0, -175), (0, -175), (0, -175)]
|
phasis/phasis
|
phasis/finc/lstnag.py
|
Python
|
gpl-2.0
| 1,877
|
import pygame
pygame.init()
resolution = (width, height) = (600, 400)
screen = pygame.display.set_mode(resolution)
clock = pygame.time.Clock()
pygame.mouse.set_visible(False)
miraimg = pygame.image.load("mira.png")
mirarect = miraimg.get_rect()
while True:
clock.tick(60)
screen.fill(0)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit(0)
mousepos = pygame.mouse.get_pos()
mirarect.centerx = mousepos[0]
mirarect.centery = mousepos[1]
screen.blit(miraimg, mirarect)
pygame.display.flip()
|
codeskyblue/pygame-cookbook
|
1-mousechange/changemouse.py
|
Python
|
gpl-2.0
| 606
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) Hugo Lindström <hugolm84@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from scrapy.selector import Selector
from tomahawkspider import TomahawkCrawlSpider
from tomahawk.helpers.tomahawkspiderhelper import extract, TomahawkSpiderHelper
from tomahawk.itemloaders import TomahawkItemLoader
class MetacriticSpider(TomahawkCrawlSpider):
name = 'Metacritic'
start_urls = [
"http://www.metacritic.com/music"
]
genre_nav_xpath = './/ul[@class="genre_nav"]/li'
next_page_xpath = './/div[@class="page_flipper"]/span[@class="flipper next"]/a'
current_page_name_xpath = './/ul[contains(@class, "tabs")]/li/span[@class="active"]/span/text()'
list_xpath = './/ol[contains(@class,"list_product_condensed")]/li'
rules = (
TomahawkCrawlSpider.follow_link_as_chart(
xpath=genre_nav_xpath,
deny=["music", "name"],
),
TomahawkCrawlSpider.follow_link_as_next(
xpath=next_page_xpath,
allow=[r'\?page=[1-9]']
),
)
def get_current_genre(self, selector):
navList = selector.xpath(self.genre_nav_xpath)
for index, item in enumerate(navList):
if item.xpath('.//span'):
return item.xpath('.//span/text()').extract()[0].strip()
return None
def do_create_chart(self, chart, response):
name = self.get_current_genre(chart.selector)
chart.add_value("name", name)
chart.add_value("type", TomahawkSpiderHelper.AlbumType)
chart.add_xpath("description", self.current_page_name_xpath)
return chart
def do_parse(self, chart, response):
selector = Selector(response)
for rank, item in enumerate(selector.xpath(self.list_xpath)):
entry = TomahawkItemLoader(selector=item)
entry.add_value("rank", rank)
entry.add_xpath("artist", './/span[@class="data"]/text()')
entry.add_xpath("album", './/a/text()')
chart.add_value("list", entry.load_item())
# process the item if there is no more next_pages, otherwise, return none and keep parsing
next_selector = selector.xpath(self.next_page_xpath)
if not next_selector:
self.log("No more next page! Processing")
return self.do_process_item(chart)
next_page = extract(self.next_page_xpath+"/@href", selector)[-1:]
if next_page and int(next_page) > 9:
self.log("Maximum depth! Processing")
return self.do_process_item(chart)
return None
|
hugolm84/tomahawk-charts
|
scraper/tomahawk/spiders/metacriticspider.py
|
Python
|
gpl-2.0
| 3,234
|
#! /usr/bin/python
import sys
from PyQt4 import QtGui,QtCore
class Button(QtGui.QPushButton):
def __init__(self,title,parent):
super(Button, self).__init__(title,parent)
def mouseMoveEvent(self,e):
if e.buttons()!=QtCore.Qt.RightButton:
return
mimeData=QtCore.QMimeData()
drag=QtGui.QDrag(self)
drag.setMimeData(mimeData)
drag.setHotSpot(e.pos()-self.rect().topLeft())
dropAction=drag.start(QtCore.Qt.MoveAction)
def mousePressEvent(self,e):
QtGui.QPushButton.mousePressEvent(self,e)
if e.button()==QtCore.Qt.LeftButton:
print 'press'
class Example(QtGui.QWidget):
def __init__(self):
super(Example, self).__init__()
self.initUI()
def initUI(self):
self.setWindowTitle('Click or Move')
self.setGeometry(300,300,280,150)
self.setAcceptDrops(True)
self.button=Button('Button',self)
self.button.move(100,65)
def dragEnterEvent(self,e):
e.accept()
def dropEvent(self,e):
position=e.pos()
self.button.move(position)
e.setDropAction(QtCore.Qt.MoveAction)
e.accept()
if __name__=='__main__':
app=QtGui.QApplication(sys.argv)
ex=Example()
ex.show()
sys.exit(app.exec_())
|
Urinx/PyQt4.tutorial
|
examples/33.dragdrop2.py
|
Python
|
gpl-2.0
| 1,132
|
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2017 Nicolargo <nicolas@nicolargo.com>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""CPU plugin."""
from ocglances.timer import getTimeSinceLastUpdate
from ocglances.compat import iterkeys
from ocglances.cpu_percent import cpu_percent
from ocglances.globals import LINUX
from ocglances.plugins.glances_core import Plugin as CorePlugin
from ocglances.plugins.glances_plugin import GlancesPlugin
import ocglances.psutil as psutil
# SNMP OID
# percentage of user CPU time: .1.3.6.1.4.1.2021.11.9.0
# percentages of system CPU time: .1.3.6.1.4.1.2021.11.10.0
# percentages of idle CPU time: .1.3.6.1.4.1.2021.11.11.0
snmp_oid = {'default': {'user': '1.3.6.1.4.1.2021.11.9.0',
'system': '1.3.6.1.4.1.2021.11.10.0',
'idle': '1.3.6.1.4.1.2021.11.11.0'},
'windows': {'percent': '1.3.6.1.2.1.25.3.3.1.2'},
'esxi': {'percent': '1.3.6.1.2.1.25.3.3.1.2'},
'netapp': {'system': '1.3.6.1.4.1.789.1.2.1.3.0',
'idle': '1.3.6.1.4.1.789.1.2.1.5.0',
'nb_log_core': '1.3.6.1.4.1.789.1.2.1.6.0'}}
# Define the history items list
# - 'name' define the stat identifier
# - 'color' define the graph color in #RGB format
# - 'y_unit' define the Y label
# All items in this list will be historised if the --enable-history tag is set
items_history_list = [{'name': 'user',
'description': 'User CPU usage',
'color': '#00FF00',
'y_unit': '%'},
{'name': 'system',
'description': 'System CPU usage',
'color': '#FF0000',
'y_unit': '%'}]
class Plugin(GlancesPlugin):
"""Glances CPU plugin.
'stats' is a dictionary that contains the system-wide CPU utilization as a
percentage.
"""
def __init__(self, args=None):
"""Init the CPU plugin."""
super(Plugin, self).__init__(args=args, items_history_list=items_history_list)
# We want to display the stat in the curse interface
self.display_curse = True
# Init stats
self.reset()
# Call CorePlugin in order to display the core number
try:
self.nb_log_core = CorePlugin(args=self.args).update()["log"]
except Exception:
self.nb_log_core = 1
def reset(self):
"""Reset/init the stats."""
self.stats = {}
@GlancesPlugin._check_decorator
@GlancesPlugin._log_result_decorator
def update(self):
"""Update CPU stats using the input method."""
# Reset stats
self.reset()
# Grab stats into self.stats
if self.input_method == 'local':
self.update_local()
elif self.input_method == 'snmp':
self.update_snmp()
return self.stats
def update_local(self):
"""Update CPU stats using PSUtil."""
# Grab CPU stats using psutil's cpu_percent and cpu_times_percent
# Get all possible values for CPU stats: user, system, idle,
# nice (UNIX), iowait (Linux), irq (Linux, FreeBSD), steal (Linux 2.6.11+)
# The following stats are returned by the API but not displayed in the UI:
# softirq (Linux), guest (Linux 2.6.24+), guest_nice (Linux 3.2.0+)
self.stats['total'] = cpu_percent.get()
cpu_times_percent = psutil.cpu_times_percent(interval=0.0)
for stat in ['user', 'system', 'idle', 'nice', 'iowait',
'irq', 'softirq', 'steal', 'guest', 'guest_nice']:
if hasattr(cpu_times_percent, stat):
self.stats[stat] = getattr(cpu_times_percent, stat)
# Additionnal CPU stats (number of events / not as a %)
# ctx_switches: number of context switches (voluntary + involuntary) per second
# interrupts: number of interrupts per second
# soft_interrupts: number of software interrupts per second. Always set to 0 on Windows and SunOS.
# syscalls: number of system calls since boot. Always set to 0 on Linux.
try:
cpu_stats = psutil.cpu_stats()
except AttributeError:
# cpu_stats only available with PSUtil 4.1 or +
pass
else:
# By storing time data we enable Rx/s and Tx/s calculations in the
# XML/RPC API, which would otherwise be overly difficult work
# for users of the API
time_since_update = getTimeSinceLastUpdate('cpu')
# Previous CPU stats are stored in the cpu_stats_old variable
if not hasattr(self, 'cpu_stats_old'):
# First call, we init the cpu_stats_old var
self.cpu_stats_old = cpu_stats
else:
for stat in cpu_stats._fields:
if getattr(cpu_stats, stat) is not None:
self.stats[stat] = getattr(cpu_stats, stat) - getattr(self.cpu_stats_old, stat)
self.stats['time_since_update'] = time_since_update
# Core number is needed to compute the CTX switch limit
self.stats['cpucore'] = self.nb_log_core
# Save stats to compute next step
self.cpu_stats_old = cpu_stats
def update_snmp(self):
"""Update CPU stats using SNMP."""
# Update stats using SNMP
if self.short_system_name in ('windows', 'esxi'):
# Windows or VMWare ESXi
# You can find the CPU utilization of windows system by querying the oid
# Give also the number of core (number of element in the table)
try:
cpu_stats = self.get_stats_snmp(snmp_oid=snmp_oid[self.short_system_name],
bulk=True)
except KeyError:
self.reset()
# Iter through CPU and compute the idle CPU stats
self.stats['nb_log_core'] = 0
self.stats['idle'] = 0
for c in cpu_stats:
if c.startswith('percent'):
self.stats['idle'] += float(cpu_stats['percent.3'])
self.stats['nb_log_core'] += 1
if self.stats['nb_log_core'] > 0:
self.stats['idle'] = self.stats[
'idle'] / self.stats['nb_log_core']
self.stats['idle'] = 100 - self.stats['idle']
self.stats['total'] = 100 - self.stats['idle']
else:
# Default behavor
try:
self.stats = self.get_stats_snmp(
snmp_oid=snmp_oid[self.short_system_name])
except KeyError:
self.stats = self.get_stats_snmp(
snmp_oid=snmp_oid['default'])
if self.stats['idle'] == '':
self.reset()
return self.stats
# Convert SNMP stats to float
for key in iterkeys(self.stats):
self.stats[key] = float(self.stats[key])
self.stats['total'] = 100 - self.stats['idle']
def update_views(self):
"""Update stats views."""
# Call the father's method
super(Plugin, self).update_views()
# Add specifics informations
# Alert and log
for key in ['user', 'system', 'iowait']:
if key in self.stats:
self.views[key]['decoration'] = self.get_alert_log(self.stats[key], header=key)
# Alert only
for key in ['steal', 'total']:
if key in self.stats:
self.views[key]['decoration'] = self.get_alert(self.stats[key], header=key)
# Alert only but depend on Core number
for key in ['ctx_switches']:
if key in self.stats:
self.views[key]['decoration'] = self.get_alert(self.stats[key], maximum=100 * self.stats['cpucore'], header=key)
# Optional
for key in ['nice', 'irq', 'iowait', 'steal', 'ctx_switches', 'interrupts', 'soft_interrupts', 'syscalls']:
if key in self.stats:
self.views[key]['optional'] = True
def msg_curse(self, args=None):
"""Return the list to display in the UI."""
# Init the return message
ret = []
# Only process if stats exist and plugin not disable
if not self.stats or self.is_disable():
return ret
# Build the string message
# If user stat is not here, display only idle / total CPU usage (for
# exemple on Windows OS)
idle_tag = 'user' not in self.stats
# Header
msg = '{:8}'.format('CPU')
ret.append(self.curse_add_line(msg, "TITLE"))
# Total CPU usage
msg = '{:>5}%'.format(self.stats['total'])
if idle_tag:
ret.append(self.curse_add_line(
msg, self.get_views(key='total', option='decoration')))
else:
ret.append(self.curse_add_line(msg))
# Nice CPU
if 'nice' in self.stats:
msg = ' {:8}'.format('nice:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='nice', option='optional')))
msg = '{:>5}%'.format(self.stats['nice'])
ret.append(self.curse_add_line(msg, optional=self.get_views(key='nice', option='optional')))
# ctx_switches
if 'ctx_switches' in self.stats:
msg = ' {:8}'.format('ctx_sw:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='ctx_switches', option='optional')))
msg = '{:>5}'.format(int(self.stats['ctx_switches'] // self.stats['time_since_update']))
ret.append(self.curse_add_line(
msg, self.get_views(key='ctx_switches', option='decoration'),
optional=self.get_views(key='ctx_switches', option='optional')))
# New line
ret.append(self.curse_new_line())
# User CPU
if 'user' in self.stats:
msg = '{:8}'.format('user:')
ret.append(self.curse_add_line(msg))
msg = '{:>5}%'.format(self.stats['user'])
ret.append(self.curse_add_line(
msg, self.get_views(key='user', option='decoration')))
elif 'idle' in self.stats:
msg = '{:8}'.format('idle:')
ret.append(self.curse_add_line(msg))
msg = '{:>5}%'.format(self.stats['idle'])
ret.append(self.curse_add_line(msg))
# IRQ CPU
if 'irq' in self.stats:
msg = ' {:8}'.format('irq:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='irq', option='optional')))
msg = '{:>5}%'.format(self.stats['irq'])
ret.append(self.curse_add_line(msg, optional=self.get_views(key='irq', option='optional')))
# interrupts
if 'interrupts' in self.stats:
msg = ' {:8}'.format('inter:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='interrupts', option='optional')))
msg = '{:>5}'.format(int(self.stats['interrupts'] // self.stats['time_since_update']))
ret.append(self.curse_add_line(msg, optional=self.get_views(key='interrupts', option='optional')))
# New line
ret.append(self.curse_new_line())
# System CPU
if 'system' in self.stats and not idle_tag:
msg = '{:8}'.format('system:')
ret.append(self.curse_add_line(msg))
msg = '{:>5}%'.format(self.stats['system'])
ret.append(self.curse_add_line(
msg, self.get_views(key='system', option='decoration')))
else:
msg = '{:8}'.format('core:')
ret.append(self.curse_add_line(msg))
msg = '{:>6}'.format(self.stats['nb_log_core'])
ret.append(self.curse_add_line(msg))
# IOWait CPU
if 'iowait' in self.stats:
msg = ' {:8}'.format('iowait:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='iowait', option='optional')))
msg = '{:>5}%'.format(self.stats['iowait'])
ret.append(self.curse_add_line(
msg, self.get_views(key='iowait', option='decoration'),
optional=self.get_views(key='iowait', option='optional')))
# soft_interrupts
if 'soft_interrupts' in self.stats:
msg = ' {:8}'.format('sw_int:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='soft_interrupts', option='optional')))
msg = '{:>5}'.format(int(self.stats['soft_interrupts'] // self.stats['time_since_update']))
ret.append(self.curse_add_line(msg, optional=self.get_views(key='soft_interrupts', option='optional')))
# New line
ret.append(self.curse_new_line())
# Idle CPU
if 'idle' in self.stats and not idle_tag:
msg = '{:8}'.format('idle:')
ret.append(self.curse_add_line(msg))
msg = '{:>5}%'.format(self.stats['idle'])
ret.append(self.curse_add_line(msg))
# Steal CPU usage
if 'steal' in self.stats:
msg = ' {:8}'.format('steal:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='steal', option='optional')))
msg = '{:>5}%'.format(self.stats['steal'])
ret.append(self.curse_add_line(
msg, self.get_views(key='steal', option='decoration'),
optional=self.get_views(key='steal', option='optional')))
# syscalls
# syscalls: number of system calls since boot. Always set to 0 on Linux. (do not display)
if 'syscalls' in self.stats and not LINUX:
msg = ' {:8}'.format('syscal:')
ret.append(self.curse_add_line(msg, optional=self.get_views(key='syscalls', option='optional')))
msg = '{:>5}'.format(int(self.stats['syscalls'] // self.stats['time_since_update']))
ret.append(self.curse_add_line(msg, optional=self.get_views(key='syscalls', option='optional')))
# Return the message with decoration
return ret
|
fraoustin/ocglances
|
ocglances/plugins/glances_cpu.py
|
Python
|
gpl-2.0
| 14,792
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().gettext
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from .._matchesfilterbase import MatchesFilterBase
#-------------------------------------------------------------------------
#
# MatchesFilter
#
#-------------------------------------------------------------------------
class MatchesFilter(MatchesFilterBase):
"""Rule that checks against another filter"""
name = _('Citations matching the <filter>')
description = _("Matches citations matched by the specified filter name")
namespace = 'Citation'
|
Forage/Gramps
|
gramps/gen/filters/rules/citation/_matchesfilter.py
|
Python
|
gpl-2.0
| 1,765
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import string
class EnumMetaClass:
"""Metaclass for enumeration.
To define your own enumeration, do something like
class Color(Enum):
red = 1
green = 2
blue = 3
Now, Color.red, Color.green and Color.blue behave totally
different: they are enumerated values, not integers.
Enumerations cannot be instantiated; however they can be
subclassed.
"""
def __init__(self, name, bases, dict):
"""Constructor -- create an enumeration.
Called at the end of the class statement. The arguments are
the name of the new class, a tuple containing the base
classes, and a dictionary containing everything that was
entered in the class' namespace during execution of the class
statement. In the above example, it would be {'red': 1,
'green': 2, 'blue': 3}.
"""
for base in bases:
if base.__class__ is not EnumMetaClass:
raise TypeError, "Enumeration base class must be enumeration"
bases = filter(lambda x: x is not Enum, bases)
self.__name__ = name
self.__bases__ = bases
self.__dict = {}
for key, value in dict.items():
self.__dict[key] = EnumInstance(name, key, value)
def __getattr__(self, name):
"""Return an enumeration value.
For example, Color.red returns the value corresponding to red.
This looks in the class dictionary and if it is not found
there asks the base classes.
The special attribute __members__ returns the list of names
defined in this class (it does not merge in the names defined
in base classes).
"""
if name == '__members__':
return self.__dict.keys()
try:
return self.__dict[name]
except KeyError:
for base in self.__bases__:
try:
return getattr(base, name)
except AttributeError:
continue
raise AttributeError, name
def __repr__(self):
s = self.__name__
if self.__bases__:
s = s + '(' + string.join(map(lambda x: x.__name__,
self.__bases__), ", ") + ')'
if self.__dict:
list = []
for key, value in self.__dict.items():
list.append("%s: %s" % (key, int(value)))
s = "%s: {%s}" % (s, string.join(list, ", "))
return s
class EnumInstance:
"""Class to represent an enumeration value.
EnumInstance('Color', 'red', 12) prints as 'Color.red' and behaves
like the integer 12 when compared, but doesn't support arithmetic.
"""
def __init__(self, classname, enumname, value):
self.__classname = classname
self.__enumname = enumname
self.__value = value
def __int__(self):
return self.__value
def __str__(self):
return self.__enumname
def __repr__(self):
return "%s.%s int(%s)" % (self.__classname, self.__enumname, self.__value)
def __cmp__(self, other):
if isinstance(other, EnumInstance):
return cmp(self.__value, int(other))
else:
raise TypeError, "Invalid type to compare."
# Create the base class for enumerations.
# It is an empty enumeration.
Enum = EnumMetaClass("Enum", (), {})
|
cria/microSICol
|
py/modules/enum.py
|
Python
|
gpl-2.0
| 3,449
|
"""
Copyright (C) 2009 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
"""
import sys
import apiutil
def GenerateEntrypoints():
#apiutil.CopyrightC()
# Get sorted list of dispatched functions.
# The order is very important - it must match cr_opcodes.h
# and spu_dispatch_table.h
print '%include "iprt/asmdefs.mac"'
print ""
print "%ifdef RT_ARCH_AMD64"
print "extern glim"
print "%else ; X86"
print "extern glim"
print "%endif"
print ""
keys = apiutil.GetDispatchedFunctions(sys.argv[1]+"/APIspec.txt")
for index in range(len(keys)):
func_name = keys[index]
if apiutil.Category(func_name) == "Chromium":
continue
if apiutil.Category(func_name) == "VBox":
continue
print "BEGINPROC_EXPORTED gl%s" % func_name
print "%ifdef RT_ARCH_AMD64"
print "\tjmp \t[glim+%d wrt rip wrt ..gotpcrel]" % (8*index)
print "%else ; X86"
print "\tjmp \t[glim+%d wrt ..gotpc]" % (4*index)
print "%endif"
print "ENDPROC gl%s" % func_name
print ""
print ';'
print '; Aliases'
print ';'
# Now loop over all the functions and take care of any aliases
allkeys = apiutil.GetAllFunctions(sys.argv[1]+"/APIspec.txt")
for func_name in allkeys:
if "omit" in apiutil.ChromiumProps(func_name):
continue
if func_name in keys:
# we already processed this function earlier
continue
# alias is the function we're aliasing
alias = apiutil.Alias(func_name)
if alias:
# this dict lookup should never fail (raise an exception)!
index = keys.index(alias)
print "BEGINPROC_EXPORTED gl%s" % func_name
print "%ifdef RT_ARCH_AMD64"
print "\tjmp \t[glim+%d wrt rip wrt ..gotpcrel]" % (8*index)
print "%else ; X86"
print "\tjmp \t[glim+%d wrt ..gotpc]" % (4*index)
print "%endif"
print "ENDPROC gl%s" % func_name
print ""
print ';'
print '; No-op stubs'
print ';'
# Now generate no-op stub functions
for func_name in allkeys:
if "stub" in apiutil.ChromiumProps(func_name):
print "BEGINPROC_EXPORTED gl%s" % func_name
print "\tleave"
print "\tret"
print "ENDPROC gl%s" % func_name
print ""
GenerateEntrypoints()
|
VirtualMonitor/VirtualMonitor
|
src/VBox/Additions/common/crOpenGL/SunOS_i386_exports.py
|
Python
|
gpl-2.0
| 2,893
|
from VMFFile import VMFFile
from VMFNode import getBounds
import copy
import numpy
OUTSIDE_MATERIAL = "DEV/DEV_BLENDMEASURE" # The material marking a portal
DOOR_DISTANCE_TOLERANCE = 16 # see pointNearPlane()
def oppositeDirection(direction):
"""Finds the opposite direction to the given one"""
if direction == "north":
return "south"
elif direction == "south":
return "north"
elif direction == "east":
return "west"
elif direction == "west":
return "east"
elif direction == "up":
return "down"
elif direction == "down":
return "up"
else:
raise AssertionError("Unknown direction \""+direction+"\" for opposite")
return None
# http://stackoverflow.com/questions/1401712/calculate-euclidean-distance-with-numpy
def euclideanDistance(x,y):
"""Returns the euclidean distance"""
return numpy.sqrt(numpy.sum((x-y)**2))
def pointNearPlane(point,bounds):
"""Checks whether a point is near a plane given by it's bounds"""
pointDistance = euclideanDistance(bounds[0],point)
portalSize = euclideanDistance(bounds[0],bounds[1])
return pointDistance < portalSize + DOOR_DISTANCE_TOLERANCE
def findPortalOnSolid(solid):
"""Finds a portal on a solid"""
side = None
for child in solid.children:
if child.name == "side":
if child.properties["material"] == OUTSIDE_MATERIAL :
side = child
break
if side == None:
return None
return side.plane
def getTranslationVector(mapPortal,otherMapPortal):
"""Returns the vector needed to translate the otherMapPortal so that it is moved into the position of the given mapPortal"""
portalBounds = getBounds(mapPortal)
portalSize = portalBounds[1]-portalBounds[0]
otherPortalBounds = getBounds(otherMapPortal)
otherPortalSize = otherPortalBounds[1]-otherPortalBounds[0]
if numpy.array_equal(portalSize,otherPortalSize):
vector = portalBounds - otherPortalBounds
return vector[0]
else:
return None
def translateBounds(bounds, vector):
"""Translates a bounding box by the given vector"""
bounds = bounds.copy()
bounds[0] += vector
bounds[1] += vector
return bounds
def intersect(bounds, otherBounds):
"""Intersects two bounding boxes"""
upperBounds = numpy.append(bounds[1],otherBounds[1]).reshape((2,3))
upperBound = numpy.min(upperBounds, axis=0)
lowerBounds = numpy.append(bounds[0],otherBounds[0]).reshape((2,3))
lowerBound = numpy.max(lowerBounds, axis=0)
return numpy.array([lowerBound,upperBound])
def collide(bounds, otherBounds):
"""Checks whether two bounding boxes collide"""
intersection = intersect(bounds, otherBounds)
size = intersection[1]-intersection[0]
collide = numpy.all(size > numpy.array([0,0,0]))
# if collide:
# print "Tiles collide",intersection
return collide
class MapTile:
"""The MapTile yields data for a complete map"""
def __init__(self):
"""Empty constructor"""
pass
def fromfile(self,filename):
"""Reads a map from a VMF file"""
# TODO: make this a class method
self.map = VMFFile()
self.map.fromfile(filename)
self.bounds = self.map.root.GetBoundsRecurse()
self.filename = filename
self.analyzePortals()
self.once = False
def deepcopy(self):
"""Returns a deep copy of this map"""
deepcopy = MapTile()
deepcopy.map = self.map.deepcopy()
deepcopy.bounds = self.bounds
deepcopy.doors = copy.deepcopy(self.doors)
deepcopy.filename = self.filename
deepcopy.once = self.once
return deepcopy
def setOnce(self, o):
self.once = o
def getOnce(self):
return self.once
def translate(self, vector):
"""Translate this map by the given vector"""
self.map.root.TranslateRecurse(vector)
self.bounds = translateBounds(self.bounds,vector)
def getPortalDirection(self, portalPlane):
"""Find out on which side of the map tile a portal is located"""
# this is probably stored in the U/V-axis material information
portalBounds = getBounds(portalPlane)
portalSize = portalBounds[1]-portalBounds[0]
if portalSize[0] == 0:
if portalBounds[0][0] == self.bounds[1][0]:
return "east"
elif portalBounds[0][2] == self.bounds[0][0]:
return "west"
else:
raise AssertionError("Invalid portal plane "+str(portalBounds)+" neither bound matches "+str(self.bounds))
elif portalSize[1] == 0:
if portalBounds[0][1] == self.bounds[1][1]:
return "north"
elif portalBounds[0][1] == self.bounds[0][1]:
return "south"
else:
raise AssertionError("Invalid portal plane "+str(portalBounds)+" neither bound matches "+str(self.bounds))
elif portalSize[2] == 0:
if portalBounds[0][2] == self.bounds[1][2]:
return "up"
elif portalBounds[0][2] == self.bounds[0][2]:
return "down"
else:
raise AssertionError("Invalid portal plane "+str(portalBounds)+" neither bound matches "+str(self.bounds))
else:
raise AssertionError("Invalid portal plane "+str(portalBounds))
def analyzePortals(self):
"""Find all IDs of solids with a portal and the portals' directions"""
doors = dict({'north': [], 'east': [], 'south': [], 'west': [], 'up': [], 'down': []})
solids = self.map.root.FindRecurse(lambda node : node.name == "solid" and not findPortalOnSolid(node) == None)
for solid in solids:
doors[self.getPortalDirection(findPortalOnSolid(solid))].append(solid.properties["id"])
self.doors = doors
def findConnections(self, otherMap, tailLength=None):
"""Returns a list of possible connections between this and the other map.
If tailLength is set, this map acts as if it only had tailLength portals with the highest IDs."""
connections = []
doorListsByDirection = self.doors.items()
if tailLength:
directionByDoor = dict()
for direction, doorList in doorListsByDirection:
for door in doorList:
directionByDoor[int(door)] = direction
tailDoors = sorted(directionByDoor.iterkeys(),reverse=True)[:tailLength]
doors = dict({'north': [], 'east': [], 'south': [], 'west': [], 'up': [], 'down': []})
for door in tailDoors:
doors[directionByDoor[door]].append(str(door))
doorListsByDirection = doors.items()
for direction, doorList in doorListsByDirection:
if len(doorList) > 0:
#print "Base map has a door in direction",direction
otherDoorList = otherMap.doors[oppositeDirection(direction)]
if len(otherDoorList) > 0:
#print "Other map has a door in opposite direction"
connections.append((direction,doorList,otherDoorList))
print "Have",len(connections),"possible connections"
return connections
def findPortalsAndVector(self, otherMap, connection):
"""Returns all information needed to connect the otherMap to this one using the given connection"""
mapPortal = self.findPortalOnSolidWithId(connection[1])
otherMapPortal = otherMap.findPortalOnSolidWithId(connection[2])
vector = getTranslationVector(mapPortal,otherMapPortal)
return (vector, mapPortal, otherMapPortal)
def append(self, otherMap, connection, vectors):
"""Appends the otherMap data to this one using the given connection and vectors.
Mends the maps together by removing portal solids or doors where applicable."""
otherMap = otherMap.deepcopy()
return self.mend(otherMap, connection, vectors)
def findPortalOnSolidWithId(self,id):
"""Finds a portal on the solid with the given ID."""
solid = self.map.root.FindRecurse(lambda node : node.name == "solid" and node.properties["id"] == id)[0]
portal = findPortalOnSolid(solid)
if portal == None:
print "ERROR: Every portal must have a solid having a side with the material "+OUTSIDE_MATERIAL +" marking the outside"
return portal
def mend(self, otherMap, connection, vectors):
"""Mends the otherMap with this one using the given connection, portals and translation vector."""
vector, mapPortal, otherMapPortal = vectors
if not otherMap == self:
removed = otherMap.map.root.DeleteRecurse(lambda node : "classname" in node.properties and node.properties["classname"] == "info_player_start")
print "Removed",removed,"info_player_start from other map"
removed = otherMap.map.root.DeleteRecurse(lambda node : "classname" in node.properties and node.properties["classname"] == "prop_door_rotating" and pointNearPlane(node.origin,otherMapPortal))
print "Removed",removed,"doors from other map"
removed = otherMap.map.root.DeleteRecurse(lambda node : node.name == "solid" and node.properties["id"] == connection[2])
print "Removed",removed,"solids from other map"
otherMap.doors[oppositeDirection(connection[0])].remove(connection[2])
entities = self.map.root.FindRecurse(lambda node : node.name == "entity" and not node.properties["classname"] == "func_detail" and pointNearPlane(node.origin,mapPortal))
removed = 0
for entity in entities:
removed += entity.DeleteRecurse(lambda node : node.name == "editor")
print "Removed",removed,"editor information from remaining entities in base map"
removed = self.map.root.DeleteRecurse(lambda node : node.name == "solid" and node.properties["id"] == connection[1])
print "Removed",removed,"solids from base map"
self.doors[connection[0]].remove(connection[1])
if not otherMap == self:
maxId = self.map.root.GetMaximumIdRecurse(0)
otherMap.map.root.IncreaseIdRecurse(maxId)
print "Increased IDs in other map by",maxId
print "Translating other map with vector",vector,"..."
otherMap.translate(vector)
print "Adding other map..."
self.map.root.AddOtherMap(otherMap.map.root)
for direction in otherMap.doors.keys():
for portalSolidId in otherMap.doors[direction]:
portalSolidId = str(int(portalSolidId)+maxId)
self.doors[direction].append(portalSolidId)
print "Merged portal info"
def detectLoops(self):
"""Detect loops within this map (tiles positioned in such a way that the player can run in circles)"""
print "Detecting loops..."
zeroVector = numpy.array([0,0,0])
for direction in self.doors.keys():
for portalSolidId in self.doors[direction]:
doorNodes = self.map.root.FindRecurse(lambda node : node.name == "solid" and node.properties["id"] == portalSolidId)
for doorNode in doorNodes:
portal = findPortalOnSolid(doorNode)
otherDoorNodes = self.map.root.FindRecurse(lambda node : not node == doorNode and node.name == "solid" and node.properties["id"] in self.doors[oppositeDirection(direction)] and numpy.array_equal(getTranslationVector(portal,findPortalOnSolid(node)),zeroVector))
if len(otherDoorNodes) == 1:
otherDoorNode = otherDoorNodes[0]
# TODO: the wrong door is removed
self.mend(self,(direction,doorNode.properties["id"],otherDoorNode.properties["id"]), (zeroVector, portal, findPortalOnSolid(otherDoorNode)))
def close(self):
"""Remove remaining door entities from the outside of the map so it becomes compilable."""
self.detectLoops()
# TODO: sometimes not all remaining doors are removed
removed = 0
for direction in self.doors.keys():
for portalSolidId in self.doors[direction]:
doorNodes = self.map.root.FindRecurse(lambda node : node.name == "solid" and node.properties["id"] == portalSolidId)
for doorNode in doorNodes:
portalBounds = getBounds(findPortalOnSolid(doorNode))
removed += self.map.root.DeleteRecurse(lambda node : "classname" in node.properties and node.properties["classname"] == "prop_door_rotating" and pointNearPlane(node.origin,portalBounds))
print "Removed",removed,"doors to close map"
def generateNavMeshScript(self):
"""Generate a config file for generating the nav mesh in game."""
lines = []
lines.append(["sv_cheats 1","z_debug 1","director_stop","nb_delete_all","nav_edit 1"])
start = self.map.root.FindRecurse(lambda node : "classname" in node.properties and node.properties["classname"] == "info_null" and "targetname" in node.properties and node.properties["targetname"] == "start")
if not len(start) == 2:
print "ERROR: Need 2 corners for PLAYER_START nav mesh, got",len(start),"instead"
else:
lines.append(["nav_clear_selected_set","setpos " + start[0].GetOrigin() + "","setang 90 0 0"])
lines.append(["nav_begin_area","setpos " + start[1].GetOrigin() + "","setang 90 0 0"])
lines.append(["nav_end_area","nav_toggle_in_selected_set","mark PLAYER_START","nav_clear_selected_set","clear_attribute PLAYER_START"])
finale = self.map.root.FindRecurse(lambda node : "classname" in node.properties and node.properties["classname"] == "info_null" and "targetname" in node.properties and node.properties["targetname"] == "finale")
if not len(finale) == 2:
print "ERROR: Need 2 corners for FINALE nav mesh, got",len(finale),"instead"
else:
lines.append(["nav_clear_selected_set","setpos " + finale[0].GetOrigin() + "","setang 90 0 0"])
lines.append(["nav_begin_area","setpos " + finale[1].GetOrigin() + "","setang 90 0 0"])
lines.append(["nav_end_area","nav_toggle_in_selected_set","mark FINALE","nav_clear_selected_set","clear_attribute FINALE"])
walkables = self.map.root.FindRecurse(lambda node : "classname" in node.properties and node.properties["classname"] == "info_null" and "targetname" in node.properties and node.properties["targetname"] == "walkable")
for walkable in walkables:
lines.append(["setpos " + walkable.GetOrigin() + "","setang 90 0 0"])
lines.append(["nav_mark_walkable"])
lines.append(["nav_generate_incremental"])
lines.append(["nav_analyze"])
lines.append(["nav_save"])
lines.append(["director_start","sv_cheats 0"])
out = "bind KP_PLUS navgen000\n"
for num, line in enumerate(lines):
out += "alias \"navgen%03i\" \"%s;bind KP_PLUS navgen%03i\"\n"%(num,";".join(line),num+1)
out += "alias \"navgen%03i\" \"echo Finished\"\n"%len(lines)
return out
|
740619537/L4D2-RMG
|
GENERATOR/MapTile.py
|
Python
|
gpl-2.0
| 14,257
|
# Copyright (C) 2013 Linaro Limited
#
# Author: Antonio Terceiro <antonio.terceiro@linaro.org>
#
# This file is part of LAVA Dispatcher.
#
# LAVA Dispatcher is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# LAVA Dispatcher is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along
# with this program; if not, see <http://www.gnu.org/licenses>.
import contextlib
import logging
import os
from lava_dispatcher.device.target import Target
import lava_dispatcher.device.dummy_drivers as drivers
from lava_dispatcher.errors import (
CriticalError,
)
class DummyTarget(Target):
def __init__(self, context, config):
super(DummyTarget, self).__init__(context, config)
driver = self.config.dummy_driver
if driver is None:
raise CriticalError(
"Required configuration entry missing: dummy_driver")
driver_class = drivers.__getattribute__(driver)
self.driver = driver_class(self)
def power_on(self):
proc = self.driver.connect()
proc.sendline("")
proc.sendline('export PS1="%s"' % self.tester_ps1)
return proc
def power_off(self, proc):
super(DummyTarget, self).power_off(proc)
self.driver.finalize(proc)
@contextlib.contextmanager
def file_system(self, partition, directory):
with self.driver.root() as root:
logging.debug("Accessing the file system at %s", root)
dest = root + directory
if not os.path.exists(dest):
os.makedirs(dest)
yield dest
target_class = DummyTarget
|
inwotep/lava-dispatcher
|
lava_dispatcher/device/dummy.py
|
Python
|
gpl-2.0
| 2,023
|
import os
from ConfigParser import SafeConfigParser
from interfaces.singleton import Singleton
class ConfigManager(object):
""" Configuration Manager Singleton class."""
# Singleton with metaclass:
__metaclass__ = Singleton
def __init__(self):
# http://stackoverflow.com/a/4060259
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
self.base_folder = __location__ + '/'
config_file_path = os.path.join(__location__, 'bot.conf')
# print "Config file path", config_file_path
default_config_value = ["[pyircbot]\n",
"server = irc.freenode.net\n",
"port = 6667\n",
"channel = pyircbotroom\n",
"nick = pyircbot\n",
"greeting = 100\n",
"timezone = Europe/Rome\n"]
# If the config file is not present try to restore it
try:
if not os.path.exists(config_file_path):
conf_file = open(config_file_path, "w")
conf_file.writelines(default_config_value)
conf_file.close()
except IOError, error:
sys.exit(error)
self._config_parser = SafeConfigParser()
self._config_parser.readfp(open(config_file_path))
# config options
self._server_address = self._config_parser.get('pyircbot', 'server')
self._server_port = self._config_parser.get('pyircbot', 'port')
self._channel = self._config_parser.get('pyircbot', 'channel')
self._bot_nick = self._config_parser.get('pyircbot', 'nick')
self.greeting_probability = self._config_parser.get('pyircbot', 'greeting')
self._timezone = self._config_parser.get('pyircbot', 'timezone')
self._verbose = False
self._update_data_path()
def _update_data_path(self):
"""Internal method called to update information about data folder. Data folder path depends on channel name."""
self._data_path = self.base_folder + self.channel + "_data/"
self.stateful_data_path = self.base_folder + "stateful_data/"
self.greetings_file_path = self.stateful_data_path + "greetings.txt"
# Decorators properties
@property
def server_address(self):
return self._server_address
@server_address.setter
def server_address(self, value):
self._server_address = value
@property
def server_port(self):
return int(self._server_port)
@server_port.setter
def server_port(self, value):
self._server_port = value
@property
def channel(self):
return self._channel
@channel.setter
def channel(self, value):
self._channel = value
self._update_data_path()
@property
def bot_nick(self):
return self._bot_nick
@bot_nick.setter
def bot_nick(self, value):
self._bot_nick = value
@property
def data_path(self):
return self._data_path
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, bool_value):
self._verbose = bool_value
@property
def timezone(self):
return self._timezone
@timezone.setter
def timezone(self, value):
self._timezone = value
|
zencoders/pyircbot
|
config.py
|
Python
|
gpl-2.0
| 3,417
|
from django.conf.urls import patterns, include, url
from .views import HomeView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', HomeView.as_view(), name='home'),
url(r'^leagues/', include('leagues.urls', namespace='leagues')),
url(r'^teams/', include('teams.urls', namespace='teams')),
url(r'^games/', include('games.urls', namespace='games')),
url(r'^admin/', include(admin.site.urls)),
)
|
LuanP/futebolistica
|
futebolistica/futebolistica/urls.py
|
Python
|
gpl-2.0
| 525
|
#!/usr/bin/env python2
import argparse
import json
import math
import os
parser = argparse.ArgumentParser()
parser.add_argument('out_dir')
parser.add_argument('json_file', nargs='+', type=argparse.FileType('r'))
parser.add_argument('iters_per_job', type=int)
args = parser.parse_args()
cases = []
max_iter = []
for fin in args.json_file:
case = json.load(fin)
cases.append(case)
assert('configs' in case and len(case['configs']) == 2)
max_iter.append((int(case['configs'][0]['Integrator']['maxIter']),
int(case['configs'][1]['Integrator']['maxIter'])))
total_max_iter = max([max(a,b) for (a,b) in max_iter])
num_jobs = int(math.ceil(float(total_max_iter) / args.iters_per_job))
for i in range(0, num_jobs):
os.mkdir('job%d' % i)
os.mkdir('%s/job%d' % (args.out_dir, i))
for j in range(0, len(cases)):
for k in range(0, 2):
iter_i = min(i * args.iters_per_job, max_iter[j][k])
iter_i_1 = min((i+1) * args.iters_per_job, max_iter[j][k])
config = cases[j]['configs'][k]
dt = float(config['Integrator']['fixedDeltaTime'])
config['Integrator']['startIter'] = iter_i
config['Integrator']['startTime'] = iter_i * dt
config['Integrator']['maxIter'] = iter_i_1
if i > 0:
config['Flow']['initCase'] = 'Restart'
config['Flow']['restartDir'] = '%s/job%d/sample%s/fluid_iter%010d' % (args.out_dir, i-1, 2*j+k, iter_i)
config['Particles']['initCase'] = 'Restart'
config['Particles']['restartDir'] = '%s/job%d/sample%s/particles_iter%010d' % (args.out_dir, i-1, 2*j+k, iter_i)
with open('job%d/case%d.json' % (i, j), 'w') as fout:
json.dump(cases[j], fout, indent=4)
print "Testcases created; when you're ready, run:"
for i in range(0, num_jobs):
after_str = 'AFTER=$JOBID ' if i > 0 else ''
print 'JOBOUT=`%sRANKS_PER_NODE=4 $SOLEIL_DIR/src/soleil.sh $(echo -m\ job%d/case{0..%d}.json) -o %s/job%d`; JOBID="${JOBOUT//[!0-9]/}"; echo $JOBID' % (after_str, i, len(cases) - 1, args.out_dir, i)
|
stanfordhpccenter/soleil-x
|
testcases/hit_to_openchannel/chain_jobs.py
|
Python
|
gpl-2.0
| 2,127
|
# coding: utf-8
import json
from os import listdir
from os.path import isfile, join
class Template(object):
def __init__(self, application):
self.application = application
self.templates = {}
exposed = True
def load(self, template_dir):
for file in listdir(template_dir):
abs_file = join(template_dir, file)
if isfile(abs_file) and file[-5:] == ".html":
with open(abs_file, "r") as f:
self.templates[file[:-5]] = f.read()
def GET(self):
self.load(self.application.application_dir + "/templates")
return json.dumps({"templates": self.templates})
# EOF
|
DarkLuk42/hn-ias-race
|
app/resources/template.py
|
Python
|
gpl-2.0
| 674
|
import os
import sys
import unittest
import urllib
if sys.version_info[0] < 3:
import urllib2
else:
import urllib.request as urllib2
from ..ext import resources
class SDL2ExtResourcesTest(unittest.TestCase):
__tags__ = ["sdl2ext"]
def test_open_zipfile(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
zfile = os.path.join(fpath, "resources.zip")
# resources.zip is a packed version of resources/, which at
# least contains
#
# resources/rwopstest.txt
# resources/surfacetest.bmp
resfile = resources.open_zipfile(zfile, "rwopstest.txt", "resources")
self.assertIsNotNone(resfile)
resfile = resources.open_zipfile(zfile, "resources/rwopstest.txt")
self.assertIsNotNone(resfile)
self.assertRaises(KeyError, resources.open_zipfile, zfile, "invalid")
self.assertRaises(KeyError, resources.open_zipfile, zfile, None)
self.assertRaises(KeyError, resources.open_zipfile, zfile,
"rwopstest.txt", "data")
self.assertRaises(KeyError, resources.open_zipfile, zfile,
"rwopstest.txt", 1234)
self.assertRaises(KeyError, resources.open_zipfile, zfile,
None, None)
self.assertRaises(TypeError, resources.open_zipfile, None,
"rwopstest.txt")
self.assertRaises(TypeError, resources.open_zipfile, None, None)
self.assertRaises(TypeError, resources.open_zipfile, None,
"rwopstest.txt", "resources")
def test_open_tarfile(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
tfile = os.path.join(fpath, "resources.tar.gz")
# resources.tar.gz is a packed version of resources/, which at
# least contains
#
# resources/rwopstest.txt
# resources/surfacetest.bmp
resfile = resources.open_tarfile(tfile, "rwopstest.txt", "resources")
self.assertIsNotNone(resfile)
resfile = resources.open_tarfile(tfile, "resources/rwopstest.txt")
self.assertIsNotNone(resfile)
# TODO: refine the error handling in open_tarfile()
self.assertRaises(KeyError, resources.open_tarfile, tfile, "invalid")
self.assertRaises(AttributeError, resources.open_tarfile, tfile, None)
self.assertRaises(KeyError, resources.open_tarfile, tfile,
"rwopstest.txt", "data")
self.assertRaises(KeyError, resources.open_tarfile, tfile,
"rwopstest.txt", 1234)
self.assertRaises(AttributeError, resources.open_tarfile, tfile,
None, None)
self.assertRaises(ValueError, resources.open_tarfile, None,
"rwopstest.txt")
self.assertRaises(ValueError, resources.open_tarfile, None, None)
self.assertRaises(ValueError, resources.open_tarfile, None,
"rwopstest.txt", "resources")
def test_open_url(self):
if sys.version_info[0] < 3:
p2url = urllib.pathname2url
else:
p2url = urllib2.pathname2url
fpath = os.path.join(os.path.dirname(__file__), "resources")
fpath = os.path.abspath(fpath)
tfile = os.path.join(fpath, "rwopstest.txt")
urlpath = "file:%s" % p2url(tfile)
resfile = resources.open_url(urlpath)
self.assertIsNotNone(resfile)
tfile = os.path.join(fpath, "invalid")
urlpath = "file:%s" % p2url(tfile)
self.assertRaises(urllib2.URLError, resources.open_url, urlpath)
@unittest.skipIf(sys.platform=="cli", "IronPython's tarfile module is broken")
def test_Resources(self):
self.assertRaises(ValueError, resources.Resources, "invalid")
res = resources.Resources()
self.assertIsInstance(res, resources.Resources)
self.assertRaises(KeyError, res.get, "surfacetest.bmp")
fpath = os.path.join(os.path.dirname(__file__), "resources")
res = resources.Resources(fpath)
self.assertIsNotNone(res.get("rwopstest.txt"))
self.assertIsNotNone(res.get("surfacetest.bmp"))
res2 = resources.Resources(__file__)
self.assertIsNotNone(res2.get("rwopstest.txt"))
self.assertIsNotNone(res2.get("surfacetest.bmp"))
res3 = resources.Resources(__file__, "resources")
self.assertIsNotNone(res3.get("rwopstest.txt"))
self.assertIsNotNone(res3.get("surfacetest.bmp"))
@unittest.skipIf(sys.platform=="cli", "IronPython's tarfile module is broken")
def test_Resources_add(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
sfile = os.path.join(fpath, "surfacetest.bmp")
zfile = os.path.join(fpath, "resources.zip")
res = resources.Resources()
res.add(sfile)
self.assertRaises(KeyError, res.get, "rwopstest.txt")
self.assertIsNotNone(res.get("surfacetest.bmp"))
res.add(zfile)
self.assertIsNotNone(res.get("rwopstest.txt"))
self.assertIsNotNone(res.get("surfacetest.bmp"))
self.assertRaises(TypeError, res.add, None)
self.assertRaises(ValueError, res.add, "invalid_name.txt")
def test_Resources_add_file(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
sfile = os.path.join(fpath, "surfacetest.bmp")
zfile = os.path.join(fpath, "resources.zip")
res = resources.Resources()
res.add_file(sfile)
res.add_file(zfile)
self.assertRaises(KeyError, res.get, "rwopstest.txt")
self.assertIsNotNone(res.get("surfacetest.bmp"))
self.assertIsNotNone(res.get("resources.zip"))
self.assertRaises(TypeError, res.add_file, None)
self.assertRaises(ValueError, res.add_file, "invalid_name.txt")
def test_Resources_add_archive(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
zfile = os.path.join(fpath, "resources.zip")
tfile = os.path.join(fpath, "resources.tar.gz")
res = resources.Resources()
res.add_archive(zfile)
self.assertIsNotNone(res.get("surfacetest.bmp"))
self.assertIsNotNone(res.get("rwopstest.txt"))
self.assertRaises(KeyError, res.get, "resources.zip")
self.assertRaises(TypeError, res.add_archive, None)
self.assertRaises(ValueError, res.add_archive, "invalid_name.txt")
res = resources.Resources()
res.add_archive(tfile, typehint="targz")
self.assertIsNotNone(res.get("surfacetest.bmp"))
self.assertIsNotNone(res.get("rwopstest.txt"))
self.assertRaises(KeyError, res.get, "resources.tar.gz")
@unittest.skipIf(sys.platform=="cli", "IronPython's tarfile module is broken")
def test_Resources_get(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
for path in (fpath, None):
res = resources.Resources(path)
self.assertRaises(KeyError, res.get, "invalid_file.txt")
self.assertRaises(KeyError, res.get, None)
self.assertRaises(KeyError, res.get, 123456)
if path is None:
self.assertRaises(KeyError, res.get, "surfacetest.bmp")
self.assertRaises(KeyError, res.get, "rwopstest.txt")
else:
self.assertIsNotNone(res.get("surfacetest.bmp"))
self.assertIsNotNone(res.get("rwopstest.txt"))
@unittest.skipIf(sys.platform=="cli", "IronPython's tarfile module is broken")
def test_Resources_get_filelike(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
zfile = os.path.join(fpath, "resources.zip")
pfile = os.path.join(fpath, "rwopstest.txt")
res = resources.Resources()
res.add(zfile)
v1 = res.get_filelike("rwopstest.txt")
v2 = res.get_filelike("surfacetest.bmp")
self.assertEqual(type(v1), type(v2))
res.add(pfile)
v1 = res.get_filelike("rwopstest.txt")
v2 = res.get_filelike("surfacetest.bmp")
self.assertNotEqual(type(v1), type(v2))
self.assertRaises(KeyError, res.get_filelike, None)
self.assertRaises(KeyError, res.get_filelike, "invalid")
self.assertRaises(KeyError, res.get_filelike, 1234)
@unittest.skipIf(sys.platform=="cli", "IronPython's tarfile module is broken")
def test_Resources_get_path(self):
fpath = os.path.join(os.path.dirname(__file__), "resources")
zfile = os.path.join(fpath, "resources.zip")
pfile = os.path.join(fpath, "rwopstest.txt")
res = resources.Resources()
res.add(zfile)
res.add(pfile)
zpath = res.get_path("surfacetest.bmp")
self.assertTrue(zpath.find("surfacetest.bmp@") != -1)
self.assertNotEqual(zpath, zfile)
ppath = res.get_path("rwopstest.txt")
self.assertTrue(ppath.find("rwopstest.txt") != -1)
self.assertRaises(KeyError, res.get_path, None)
self.assertRaises(KeyError, res.get_path, "invalid")
self.assertRaises(KeyError, res.get_path, 1234)
@unittest.skipIf(sys.platform=="cli", "IronPython's tarfile module is broken")
def test_Resources_scan(self):
fpath = os.path.join(os.path.dirname(__file__))
res = resources.Resources()
res.scan(fpath)
self.assertIsNotNone(res.get("rwopstest.txt"))
self.assertIsNotNone(res.get("surfacetest.bmp"))
self.assertRaises(ValueError, res.scan, "invalid")
self.assertRaises(ValueError, res.scan, fpath, "invalid")
self.assertRaises(Exception, res.scan, 12345)
res = resources.Resources()
res.scan(fpath, "resources")
self.assertIsNotNone(res.get("rwopstest.txt"))
self.assertIsNotNone(res.get("surfacetest.bmp"))
if __name__ == '__main__':
sys.exit(unittest.main())
|
m1trix/Tetris-Wars
|
tetris_wars/sdl2/test/sdl2ext_resources_test.py
|
Python
|
gpl-2.0
| 9,970
|
import paramiko
from paramiko.client import SSHClient
def test_credentials(hostname, username, password, port):
""" Returns True if the credentials work
"""
client = SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(hostname, username=username, password=password,
port=port)
return True
except Exception as e:
print(e)
return False
class connect(object):
def __init__(self, server, username=None, password=None, port=22):
print("Connecting to %s" % server)
self.server = server
self.username = username
self.password = password
self.port = port
def __enter__(self):
# connect to the Server
env["hosts"] = [self.server.address]
if self.username and self.password:
env["user"] = self.username
env["password"] = self.password
else:
env["key_filename"] = get_pem_filename()
env["port"] = self.port
def __exit__(self, *args, **kwargs):
pass
|
codedbyjay/django-branches
|
helpers.py
|
Python
|
gpl-2.0
| 1,037
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'ladder',
'USER': 'ladder',
'PASSWORD': 'hp4_fkh=a(64x',
'HOST': '',
'PORT': '',
},
}
DATABASE_ROUTERS = [
]
|
OpenTTD-Ladder/ladder-web
|
ladder/ladder/settings/databases.py
|
Python
|
gpl-2.0
| 280
|
import operator
import numpy as np
import pytest
from pandas.core.dtypes.common import is_bool_dtype
import pandas as pd
import pandas._testing as tm
from pandas.core.sorting import nargsort
from .base import BaseExtensionTests
class BaseMethodsTests(BaseExtensionTests):
"""Various Series and DataFrame methods."""
@pytest.mark.parametrize("dropna", [True, False])
def test_value_counts(self, all_data, dropna):
all_data = all_data[:10]
if dropna:
other = np.array(all_data[~all_data.isna()])
else:
other = all_data
result = pd.Series(all_data).value_counts(dropna=dropna).sort_index()
expected = pd.Series(other).value_counts(dropna=dropna).sort_index()
self.assert_series_equal(result, expected)
def test_value_counts_with_normalize(self, data):
# GH 33172
data = data[:10].unique()
values = np.array(data[~data.isna()])
result = (
pd.Series(data, dtype=data.dtype).value_counts(normalize=True).sort_index()
)
expected = pd.Series([1 / len(values)] * len(values), index=result.index)
self.assert_series_equal(result, expected)
def test_count(self, data_missing):
df = pd.DataFrame({"A": data_missing})
result = df.count(axis="columns")
expected = pd.Series([0, 1])
self.assert_series_equal(result, expected)
def test_series_count(self, data_missing):
# GH#26835
ser = pd.Series(data_missing)
result = ser.count()
expected = 1
assert result == expected
def test_apply_simple_series(self, data):
result = pd.Series(data).apply(id)
assert isinstance(result, pd.Series)
def test_argsort(self, data_for_sorting):
result = pd.Series(data_for_sorting).argsort()
expected = pd.Series(np.array([2, 0, 1], dtype=np.int64))
self.assert_series_equal(result, expected)
def test_argsort_missing_array(self, data_missing_for_sorting):
result = data_missing_for_sorting.argsort()
expected = np.array([2, 0, 1], dtype=np.dtype("int"))
# we don't care whether it's int32 or int64
result = result.astype("int64", casting="safe")
expected = expected.astype("int64", casting="safe")
tm.assert_numpy_array_equal(result, expected)
def test_argsort_missing(self, data_missing_for_sorting):
result = pd.Series(data_missing_for_sorting).argsort()
expected = pd.Series(np.array([1, -1, 0], dtype=np.int64))
self.assert_series_equal(result, expected)
def test_argmin_argmax(self, data_for_sorting, data_missing_for_sorting, na_value):
# GH 24382
# data_for_sorting -> [B, C, A] with A < B < C
assert data_for_sorting.argmax() == 1
assert data_for_sorting.argmin() == 2
# with repeated values -> first occurence
data = data_for_sorting.take([2, 0, 0, 1, 1, 2])
assert data.argmax() == 3
assert data.argmin() == 0
# with missing values
# data_missing_for_sorting -> [B, NA, A] with A < B and NA missing.
assert data_missing_for_sorting.argmax() == 0
assert data_missing_for_sorting.argmin() == 2
@pytest.mark.parametrize("method", ["argmax", "argmin"])
def test_argmin_argmax_empty_array(self, method, data):
# GH 24382
err_msg = "attempt to get"
with pytest.raises(ValueError, match=err_msg):
getattr(data[:0], method)()
@pytest.mark.parametrize("method", ["argmax", "argmin"])
def test_argmin_argmax_all_na(self, method, data, na_value):
# all missing with skipna=True is the same as emtpy
err_msg = "attempt to get"
data_na = type(data)._from_sequence([na_value, na_value], dtype=data.dtype)
with pytest.raises(ValueError, match=err_msg):
getattr(data_na, method)()
@pytest.mark.parametrize(
"na_position, expected",
[
("last", np.array([2, 0, 1], dtype=np.dtype("intp"))),
("first", np.array([1, 2, 0], dtype=np.dtype("intp"))),
],
)
def test_nargsort(self, data_missing_for_sorting, na_position, expected):
# GH 25439
result = nargsort(data_missing_for_sorting, na_position=na_position)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("ascending", [True, False])
def test_sort_values(self, data_for_sorting, ascending, sort_by_key):
ser = pd.Series(data_for_sorting)
result = ser.sort_values(ascending=ascending, key=sort_by_key)
expected = ser.iloc[[2, 0, 1]]
if not ascending:
# GH 35922. Expect stable sort
if ser.nunique() == 2:
expected = ser.iloc[[0, 1, 2]]
else:
expected = ser.iloc[[1, 0, 2]]
self.assert_series_equal(result, expected)
@pytest.mark.parametrize("ascending", [True, False])
def test_sort_values_missing(
self, data_missing_for_sorting, ascending, sort_by_key
):
ser = pd.Series(data_missing_for_sorting)
result = ser.sort_values(ascending=ascending, key=sort_by_key)
if ascending:
expected = ser.iloc[[2, 0, 1]]
else:
expected = ser.iloc[[0, 2, 1]]
self.assert_series_equal(result, expected)
@pytest.mark.parametrize("ascending", [True, False])
def test_sort_values_frame(self, data_for_sorting, ascending):
df = pd.DataFrame({"A": [1, 2, 1], "B": data_for_sorting})
result = df.sort_values(["A", "B"])
expected = pd.DataFrame(
{"A": [1, 1, 2], "B": data_for_sorting.take([2, 0, 1])}, index=[2, 0, 1]
)
self.assert_frame_equal(result, expected)
@pytest.mark.parametrize("box", [pd.Series, lambda x: x])
@pytest.mark.parametrize("method", [lambda x: x.unique(), pd.unique])
def test_unique(self, data, box, method):
duplicated = box(data._from_sequence([data[0], data[0]]))
result = method(duplicated)
assert len(result) == 1
assert isinstance(result, type(data))
assert result[0] == duplicated[0]
@pytest.mark.parametrize("na_sentinel", [-1, -2])
def test_factorize(self, data_for_grouping, na_sentinel):
codes, uniques = pd.factorize(data_for_grouping, na_sentinel=na_sentinel)
expected_codes = np.array(
[0, 0, na_sentinel, na_sentinel, 1, 1, 0, 2], dtype=np.intp
)
expected_uniques = data_for_grouping.take([0, 4, 7])
tm.assert_numpy_array_equal(codes, expected_codes)
self.assert_extension_array_equal(uniques, expected_uniques)
@pytest.mark.parametrize("na_sentinel", [-1, -2])
def test_factorize_equivalence(self, data_for_grouping, na_sentinel):
codes_1, uniques_1 = pd.factorize(data_for_grouping, na_sentinel=na_sentinel)
codes_2, uniques_2 = data_for_grouping.factorize(na_sentinel=na_sentinel)
tm.assert_numpy_array_equal(codes_1, codes_2)
self.assert_extension_array_equal(uniques_1, uniques_2)
assert len(uniques_1) == len(pd.unique(uniques_1))
assert uniques_1.dtype == data_for_grouping.dtype
def test_factorize_empty(self, data):
codes, uniques = pd.factorize(data[:0])
expected_codes = np.array([], dtype=np.intp)
expected_uniques = type(data)._from_sequence([], dtype=data[:0].dtype)
tm.assert_numpy_array_equal(codes, expected_codes)
self.assert_extension_array_equal(uniques, expected_uniques)
def test_fillna_copy_frame(self, data_missing):
arr = data_missing.take([1, 1])
df = pd.DataFrame({"A": arr})
filled_val = df.iloc[0, 0]
result = df.fillna(filled_val)
assert df.A.values is not result.A.values
def test_fillna_copy_series(self, data_missing):
arr = data_missing.take([1, 1])
ser = pd.Series(arr)
filled_val = ser[0]
result = ser.fillna(filled_val)
assert ser._values is not result._values
assert ser._values is arr
def test_fillna_length_mismatch(self, data_missing):
msg = "Length of 'value' does not match."
with pytest.raises(ValueError, match=msg):
data_missing.fillna(data_missing.take([1]))
def test_combine_le(self, data_repeated):
# GH 20825
# Test that combine works when doing a <= (le) comparison
orig_data1, orig_data2 = data_repeated(2)
s1 = pd.Series(orig_data1)
s2 = pd.Series(orig_data2)
result = s1.combine(s2, lambda x1, x2: x1 <= x2)
expected = pd.Series(
[a <= b for (a, b) in zip(list(orig_data1), list(orig_data2))]
)
self.assert_series_equal(result, expected)
val = s1.iloc[0]
result = s1.combine(val, lambda x1, x2: x1 <= x2)
expected = pd.Series([a <= val for a in list(orig_data1)])
self.assert_series_equal(result, expected)
def test_combine_add(self, data_repeated):
# GH 20825
orig_data1, orig_data2 = data_repeated(2)
s1 = pd.Series(orig_data1)
s2 = pd.Series(orig_data2)
result = s1.combine(s2, lambda x1, x2: x1 + x2)
with np.errstate(over="ignore"):
expected = pd.Series(
orig_data1._from_sequence(
[a + b for (a, b) in zip(list(orig_data1), list(orig_data2))]
)
)
self.assert_series_equal(result, expected)
val = s1.iloc[0]
result = s1.combine(val, lambda x1, x2: x1 + x2)
expected = pd.Series(
orig_data1._from_sequence([a + val for a in list(orig_data1)])
)
self.assert_series_equal(result, expected)
def test_combine_first(self, data):
# https://github.com/pandas-dev/pandas/issues/24147
a = pd.Series(data[:3])
b = pd.Series(data[2:5], index=[2, 3, 4])
result = a.combine_first(b)
expected = pd.Series(data[:5])
self.assert_series_equal(result, expected)
@pytest.mark.parametrize("frame", [True, False])
@pytest.mark.parametrize(
"periods, indices",
[(-2, [2, 3, 4, -1, -1]), (0, [0, 1, 2, 3, 4]), (2, [-1, -1, 0, 1, 2])],
)
def test_container_shift(self, data, frame, periods, indices):
# https://github.com/pandas-dev/pandas/issues/22386
subset = data[:5]
data = pd.Series(subset, name="A")
expected = pd.Series(subset.take(indices, allow_fill=True), name="A")
if frame:
result = data.to_frame(name="A").assign(B=1).shift(periods)
expected = pd.concat(
[expected, pd.Series([1] * 5, name="B").shift(periods)], axis=1
)
compare = self.assert_frame_equal
else:
result = data.shift(periods)
compare = self.assert_series_equal
compare(result, expected)
def test_shift_0_periods(self, data):
# GH#33856 shifting with periods=0 should return a copy, not same obj
result = data.shift(0)
assert data[0] != data[1] # otherwise below is invalid
data[0] = data[1]
assert result[0] != result[1] # i.e. not the same object/view
@pytest.mark.parametrize("periods", [1, -2])
def test_diff(self, data, periods):
data = data[:5]
if is_bool_dtype(data.dtype):
op = operator.xor
else:
op = operator.sub
try:
# does this array implement ops?
op(data, data)
except Exception:
pytest.skip(f"{type(data)} does not support diff")
s = pd.Series(data)
result = s.diff(periods)
expected = pd.Series(op(data, data.shift(periods)))
self.assert_series_equal(result, expected)
df = pd.DataFrame({"A": data, "B": [1.0] * 5})
result = df.diff(periods)
if periods == 1:
b = [np.nan, 0, 0, 0, 0]
else:
b = [0, 0, 0, np.nan, np.nan]
expected = pd.DataFrame({"A": expected, "B": b})
self.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"periods, indices",
[[-4, [-1, -1]], [-1, [1, -1]], [0, [0, 1]], [1, [-1, 0]], [4, [-1, -1]]],
)
def test_shift_non_empty_array(self, data, periods, indices):
# https://github.com/pandas-dev/pandas/issues/23911
subset = data[:2]
result = subset.shift(periods)
expected = subset.take(indices, allow_fill=True)
self.assert_extension_array_equal(result, expected)
@pytest.mark.parametrize("periods", [-4, -1, 0, 1, 4])
def test_shift_empty_array(self, data, periods):
# https://github.com/pandas-dev/pandas/issues/23911
empty = data[:0]
result = empty.shift(periods)
expected = empty
self.assert_extension_array_equal(result, expected)
def test_shift_zero_copies(self, data):
result = data.shift(0)
assert result is not data
result = data[:0].shift(2)
assert result is not data
def test_shift_fill_value(self, data):
arr = data[:4]
fill_value = data[0]
result = arr.shift(1, fill_value=fill_value)
expected = data.take([0, 0, 1, 2])
self.assert_extension_array_equal(result, expected)
result = arr.shift(-2, fill_value=fill_value)
expected = data.take([2, 3, 0, 0])
self.assert_extension_array_equal(result, expected)
def test_not_hashable(self, data):
# We are in general mutable, so not hashable
with pytest.raises(TypeError, match="unhashable type"):
hash(data)
def test_hash_pandas_object_works(self, data, as_frame):
# https://github.com/pandas-dev/pandas/issues/23066
data = pd.Series(data)
if as_frame:
data = data.to_frame()
a = pd.util.hash_pandas_object(data)
b = pd.util.hash_pandas_object(data)
self.assert_equal(a, b)
def test_searchsorted(self, data_for_sorting, as_series):
b, c, a = data_for_sorting
arr = type(data_for_sorting)._from_sequence([a, b, c])
if as_series:
arr = pd.Series(arr)
assert arr.searchsorted(a) == 0
assert arr.searchsorted(a, side="right") == 1
assert arr.searchsorted(b) == 1
assert arr.searchsorted(b, side="right") == 2
assert arr.searchsorted(c) == 2
assert arr.searchsorted(c, side="right") == 3
result = arr.searchsorted(arr.take([0, 2]))
expected = np.array([0, 2], dtype=np.intp)
tm.assert_numpy_array_equal(result, expected)
# sorter
sorter = np.array([1, 2, 0])
assert data_for_sorting.searchsorted(a, sorter=sorter) == 0
def test_where_series(self, data, na_value, as_frame):
assert data[0] != data[1]
cls = type(data)
a, b = data[:2]
ser = pd.Series(cls._from_sequence([a, a, b, b], dtype=data.dtype))
cond = np.array([True, True, False, False])
if as_frame:
ser = ser.to_frame(name="a")
cond = cond.reshape(-1, 1)
result = ser.where(cond)
expected = pd.Series(
cls._from_sequence([a, a, na_value, na_value], dtype=data.dtype)
)
if as_frame:
expected = expected.to_frame(name="a")
self.assert_equal(result, expected)
# array other
cond = np.array([True, False, True, True])
other = cls._from_sequence([a, b, a, b], dtype=data.dtype)
if as_frame:
other = pd.DataFrame({"a": other})
cond = pd.DataFrame({"a": cond})
result = ser.where(cond, other)
expected = pd.Series(cls._from_sequence([a, b, b, b], dtype=data.dtype))
if as_frame:
expected = expected.to_frame(name="a")
self.assert_equal(result, expected)
@pytest.mark.parametrize("repeats", [0, 1, 2, [1, 2, 3]])
def test_repeat(self, data, repeats, as_series, use_numpy):
arr = type(data)._from_sequence(data[:3], dtype=data.dtype)
if as_series:
arr = pd.Series(arr)
result = np.repeat(arr, repeats) if use_numpy else arr.repeat(repeats)
repeats = [repeats] * 3 if isinstance(repeats, int) else repeats
expected = [x for x, n in zip(arr, repeats) for _ in range(n)]
expected = type(data)._from_sequence(expected, dtype=data.dtype)
if as_series:
expected = pd.Series(expected, index=arr.index.repeat(repeats))
self.assert_equal(result, expected)
@pytest.mark.parametrize(
"repeats, kwargs, error, msg",
[
(2, {"axis": 1}, ValueError, "axis"),
(-1, {}, ValueError, "negative"),
([1, 2], {}, ValueError, "shape"),
(2, {"foo": "bar"}, TypeError, "'foo'"),
],
)
def test_repeat_raises(self, data, repeats, kwargs, error, msg, use_numpy):
with pytest.raises(error, match=msg):
if use_numpy:
np.repeat(data, repeats, **kwargs)
else:
data.repeat(repeats, **kwargs)
@pytest.mark.parametrize("box", [pd.array, pd.Series, pd.DataFrame])
def test_equals(self, data, na_value, as_series, box):
data2 = type(data)._from_sequence([data[0]] * len(data), dtype=data.dtype)
data_na = type(data)._from_sequence([na_value] * len(data), dtype=data.dtype)
data = tm.box_expected(data, box, transpose=False)
data2 = tm.box_expected(data2, box, transpose=False)
data_na = tm.box_expected(data_na, box, transpose=False)
# we are asserting with `is True/False` explicitly, to test that the
# result is an actual Python bool, and not something "truthy"
assert data.equals(data) is True
assert data.equals(data.copy()) is True
# unequal other data
assert data.equals(data2) is False
assert data.equals(data_na) is False
# different length
assert data[:2].equals(data[:3]) is False
# emtpy are equal
assert data[:0].equals(data[:0]) is True
# other types
assert data.equals(None) is False
assert data[[0]].equals(data[0]) is False
|
iproduct/course-social-robotics
|
11-dnn-keras/venv/Lib/site-packages/pandas/tests/extension/base/methods.py
|
Python
|
gpl-2.0
| 18,343
|
import urllib2
import subprocess,shlex
import commands
import simplejson as json
import re
import pymongo
from pymongo import ASCENDING, DESCENDING,MongoClient
import base64
### Grabs an apikey and password and b64 encodes them
def xfe_get_token(db):
return base64.b64encode(db.settings.find({'type':'IBM_X-Force_api_key'})[0]['value'] + ':' + db.settings.find({'type':'IBM_X-Force_api_password'})[0]['value'])
### Returns live and passive DNS records from IBM Xforce
def xfe_dns_check(xfe_token,domain):
url = "https://api.xforce.ibmcloud.com/resolve/%s " % domain
cmd = """curl -s %s -H 'Accept-Language: en-US,' -H 'Authorization: Bearer %s' -H 'Accept: application/json'""" % (url,xfe_token)
p = subprocess.Popen(shlex.split(cmd.encode('ascii')), stdout=subprocess.PIPE).communicate()[0]
dns_results=json.loads(p)
return dns_results
### Returns the IP report for the entered IP from IBM Xforce
def xfe_ip_rep_check(xfe_token,ip):
url = "https://api.xforce.ibmcloud.com/ipr/malware/%s " % ip
cmd = """curl -s %s -H 'Accept-Language: en-US,' -H 'Authorization: Bearer %s' -H 'Accept: application/json'""" % (url,xfe_token)
p = subprocess.Popen(shlex.split(cmd.encode('ascii')), stdout=subprocess.PIPE).communicate()[0]
ip_rep_results=json.loads(p)
return ip_rep_results
### Returns the URL report for the entered URL from IBM Xforce
def xfe_url_check(xfe_token,url):
url = "https://api.xforce.ibmcloud.com/url/%s" % re.sub(ur"\u2019",'',url)
htoken = "Basic "+ xfe_token
headers = {'Authorization': htoken,}
request = urllib2.Request(url, None, headers)
try:
data = urllib2.urlopen(request,timeout=30)
except urllib2.HTTPError, e:
if e.code != 404:
print "\033[31mERROR::=> %s \n %s \033[0m" % (e,url)
return {'error':'not found'}
except urllib2.URLError, e:
print "\033[31mERROR::=> %s \n %s \033[0m" % (e,url)
return {'error':'not found'}
except IOError, e:
print "\033[31mERROR::=> %s \n %s \033[0m" % (e,url)
return {'error':'not found'}
url_results = json.loads(data.read())
return url_results
### Returns the Internet Application Profiles (IAP) that needs to be fetched from IBM Xforce
def xfe_app_check(xfe_token,app):
url = "https://api.xforce.ibmcloud.com/app/%s" % app
cmd = """curl -s %s -H 'Accept-Language: en-US,' -H 'Authorization: Bearer %s' -H 'Accept: application/json'""" % (url,xfe_token)
p = subprocess.Popen(shlex.split(cmd.encode('ascii')), stdout=subprocess.PIPE).communicate()[0]
app_results=json.loads(p)
return app_results
### Returns a malware report for the given md5 from IBM Xforce
def xfe_malware_check(xfe_token,md5):
url = "https://api.xforce.ibmcloud.com/malware/%s" % md5
htoken = "Basic "+ xfe_token
headers = {'Authorization': htoken,}
request = urllib2.Request(url, None, headers)
try:
data = urllib2.urlopen(request)
except urllib2.HTTPError, e:
if e.code != 404:
print "\033[31mERROR::=> %s \n %s \033[0m" % (e,md5)
return {'error':'not found'}
except urllib2.URLError, e:
print "\033[31mERROR::=> %s \n %s \033[0m" % (e,md5)
return {'error':'not found'}
except IOError, e:
print "\033[31mERROR::=> %s \n %s \033[0m" % (e,md5)
return {'error':'not found'}
md5_results = json.loads(data.read())
return md5_results
|
wfsec/osxstrata
|
scripts/xforceMod.py
|
Python
|
gpl-2.0
| 3,254
|
# check_dns.py -- Returns OK if a hostname resolves to any ip address
# check_dns plugin will need some system libraries for DNS lookup
from __future__ import absolute_import
from _socket import gaierror
import socket
import time
# Import PluginHelper and some utility constants from the Plugins module
from pynag.Plugins import PluginHelper,ok,warning,critical,unknown
# Create an instance of PluginHelper()
my_plugin = PluginHelper()
# Our Plugin will need -H and -a attributes and we will use PluginHelpers wrapper around optparse for this:
my_plugin.parser.add_option('-H', help="Hostname or ip address", dest="hostname")
my_plugin.parser.add_option('-a', help="Expected Address", dest="address")
# When parse_arguments is called some default options like --threshold and --no-longoutput are automatically added
my_plugin.parse_arguments()
#
# Here starts Plugin-specific logic
#
# Get the hostname and expected address that was provided on the command-line
# address will be optional, but we will throw and error if hostname is not provided
hostname = my_plugin.options.hostname
address = my_plugin.options.address
if hostname is None:
my_plugin.parser.error('-H argument is required')
# Here comes the specific check logic
try:
start_time = time.time()
result = socket.gethostbyname( hostname ) # result will contain the ip address resolved
end_time = time.time()
# If no address was specified with -a, then we return
# OK if hostname resolved to anything at all
if address is None or address == result:
my_plugin.status(ok)
my_plugin.add_summary("%s resolves to %s" % (hostname, result))
else:
my_plugin.status(critical)
my_plugin.add_summary("%s resolves to %s but should resolve to %s" % (hostname,result,address))
# Add run_time metric, so we can also alert if lookup takes to long
run_time = end_time - start_time
my_plugin.add_metric('run_time', run_time)
except gaierror:
# If any exceptions happened in the code above, lets return a critical status
my_plugin.status(critical)
my_plugin.add_summary('Could not resolve host "%s"' % hostname )
# when check_all_metrics() is run, any metrics we have added with add_metric() will be processed against
# Thresholds (like --threshold). This part will allow our plugin users to alert on lookup_time
my_plugin.check_all_metrics()
# Print status output and exit
my_plugin.exit()
|
pynag/pynag
|
examples/Plugins/check_dns.py
|
Python
|
gpl-2.0
| 2,435
|
from geopy.compat import text_type
from geopy.exc import GeocoderParseError
try:
import pytz
pytz_available = True
except ImportError:
pytz_available = False
__all__ = (
"Timezone",
)
def ensure_pytz_is_installed():
if not pytz_available:
raise ImportError(
'pytz must be installed in order to locate timezones. '
' Install with `pip install geopy -e ".[timezone]"`.'
)
def from_timezone_name(timezone_name, raw=None):
ensure_pytz_is_installed()
try:
pytz_timezone = pytz.timezone(timezone_name)
except pytz.UnknownTimeZoneError:
raise GeocoderParseError(
"pytz could not parse the timezone identifier (%s) "
"returned by the service." % timezone_name
)
except KeyError:
raise GeocoderParseError(
"geopy could not find a timezone in this response: %s" %
raw
)
return Timezone(pytz_timezone, raw)
def from_fixed_gmt_offset(gmt_offset_hours, raw=None):
ensure_pytz_is_installed()
pytz_timezone = pytz.FixedOffset(gmt_offset_hours * 60)
return Timezone(pytz_timezone, raw)
class Timezone(object):
"""
Contains a parsed response for a timezone request, which is
implemented in few geocoders which provide such lookups.
.. versionadded:: 1.18.0
"""
__slots__ = ("_pytz_timezone", "_raw")
def __init__(self, pytz_timezone, raw=None):
self._pytz_timezone = pytz_timezone
self._raw = raw
@property
def pytz_timezone(self):
"""
pytz timezone instance.
:rtype: :class:`pytz.tzinfo.BaseTzInfo`
"""
return self._pytz_timezone
@property
def raw(self):
"""
Timezone's raw, unparsed geocoder response. For details on this,
consult the service's documentation.
:rtype: dict or None
"""
return self._raw
def __unicode__(self):
return text_type(self._pytz_timezone)
__str__ = __unicode__
def __repr__(self):
return "Timezone(%s)" % repr(self.pytz_timezone)
def __getstate__(self):
return self._pytz_timezone, self._raw
def __setstate__(self, state):
self._pytz_timezone, self._raw = state
def __eq__(self, other):
return (
isinstance(other, Timezone) and
self._pytz_timezone == other._pytz_timezone and
self.raw == other.raw
)
def __ne__(self, other):
return not (self == other)
|
phborba/dsgtoolsop
|
auxiliar/geopy/timezone.py
|
Python
|
gpl-2.0
| 2,534
|
import copy
import attr
from widgetastic.exceptions import NoSuchElementException
from widgetastic_patternfly import BootstrapSelect
from widgetastic_patternfly import Input
from wrapanapi.systems import RedfishSystem
from cfme.common.provider import DefaultEndpoint
from cfme.common.provider import DefaultEndpointForm
from cfme.exceptions import HostStatsNotContains
from cfme.exceptions import ProviderHasNoProperty
from cfme.exceptions import StatsDoNotMatch
from cfme.physical.physical_chassis import PhysicalChassis
from cfme.physical.physical_chassis import PhysicalChassisCollection
from cfme.physical.physical_rack import PhysicalRack
from cfme.physical.physical_rack import PhysicalRackCollection
from cfme.physical.physical_server import PhysicalServer
from cfme.physical.physical_server import PhysicalServerCollection
from cfme.physical.provider import PhysicalProvider
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.log import logger
from cfme.utils.varmeth import variable
class RedfishEndpoint(DefaultEndpoint):
api_port = 443
security_protocol = 'SSL'
@property
def view_value_mapping(self):
return {
'security_protocol': self.security_protocol,
'hostname': self.hostname,
'api_port': self.api_port
}
class RedfishEndpointForm(DefaultEndpointForm):
security_protocol = BootstrapSelect('default_security_protocol')
api_port = Input('default_api_port')
@attr.s(cmp=False)
class RedfishProvider(PhysicalProvider):
STATS_TO_MATCH = ['num_server', 'num_chassis', 'num_racks']
type_name = 'redfish'
endpoints_form = RedfishEndpointForm
string_name = 'Physical Infrastructure'
mgmt_class = RedfishSystem
refresh_text = "Refresh Relationships and Power States"
db_types = ["Redfish::PhysicalInfraManager"]
settings_key = 'ems_redfish'
log_name = 'redfish'
@property
def mgmt(self):
from cfme.utils.providers import get_mgmt
d = copy.deepcopy(self.data)
d['type'] = self.type_name
d['hostname'] = self.default_endpoint.hostname
d['api_port'] = self.default_endpoint.api_port
d['security_protocol'] = self.default_endpoint.security_protocol
d['credentials'] = self.default_endpoint.credentials
return get_mgmt(d)
@classmethod
def from_config(cls, prov_config, prov_key, appliance=None):
appliance = appliance or cls.appliance
endpoint = RedfishEndpoint(**prov_config['endpoints']['default'])
return appliance.collections.physical_providers.instantiate(
prov_class=cls,
name=prov_config['name'],
endpoints={endpoint.name: endpoint},
key=prov_key)
@property
def view_value_mapping(self):
return {
'name': self.name,
'prov_type': 'Redfish'
}
def get_detail(self, label):
view = navigate_to(self, 'Details')
try:
stat = view.entities.summary('Relationships').get_text_of(label)
logger.info("{}: {}".format(label, stat))
except NoSuchElementException:
logger.error("Couldn't find number of {}".format(label))
return stat
@variable(alias='ui')
def num_chassis(self):
return int(self.get_detail('Physical Chassis'))
@variable(alias='ui')
def num_racks(self):
return int(self.get_detail('Physical Racks'))
@attr.s
class RedfishPhysicalServer(PhysicalServer):
INVENTORY_TO_MATCH = ['power_state']
STATS_TO_MATCH = ['cores_capacity', 'memory_capacity']
@attr.s
class RedfishPhysicalServerCollection(PhysicalServerCollection):
ENTITY = RedfishPhysicalServer
@attr.s
class RedfishPhysicalChassis(PhysicalChassis):
INVENTORY_TO_MATCH = ['chassis_name', 'description', 'identify_led_state']
STATS_TO_MATCH = ['num_physical_servers']
def __init__(self):
super(RedfishPhysicalChassis, self)
@attr.s
class RedfishPhysicalChassisCollection(PhysicalChassisCollection):
ENTITY = RedfishPhysicalChassis
@attr.s
class RedfishPhysicalRack(PhysicalRack):
INVENTORY_TO_MATCH = ["rack_name"]
STATS_TO_MATCH = []
def __init__(self):
super(RedfishPhysicalRack, self)
def validate_stats(self, ui=False):
""" Validates that the detail page matches the physical rack's information.
This method logs into the provider using the mgmt_system interface and collects
a set of statistics to be matched against the UI. An exception will be raised
if the stats retrieved from the UI do not match those retrieved from wrapanapi.
"""
# Make sure we are on the physical rack detail page
if ui:
self.load_details()
# Retrieve the client and the stats and inventory to match
client = self.provider.mgmt
stats_to_match = self.STATS_TO_MATCH
inventory_to_match = self.INVENTORY_TO_MATCH
# Retrieve the stats and inventory from wrapanapi
rack_stats = client.rack_stats(self, stats_to_match)
rack_inventory = client.rack_inventory(self, inventory_to_match)
# Refresh the browser
if ui:
self.browser.selenium.refresh()
# Verify that the stats retrieved from wrapanapi match those retrieved
# from the UI
for stat in stats_to_match:
try:
cfme_stat = int(getattr(self, stat)(method='ui' if ui else None))
rack_stat = int(rack_stats[stat])
if rack_stat != cfme_stat:
msg = "The {} stat does not match. (server: {}, server stat: {}, cfme stat: {})"
raise StatsDoNotMatch(msg.format(stat, self.name, rack_stat, cfme_stat))
except KeyError:
raise HostStatsNotContains(
"Server stats information does not contain '{}'".format(stat))
except AttributeError:
raise ProviderHasNoProperty("Provider does not know how to get '{}'".format(stat))
# Verify that the inventory retrieved from wrapanapi match those retrieved
# from the UI
for inventory in inventory_to_match:
try:
cfme_inventory = getattr(self, inventory)(method='ui' if ui else None)
rack_inventory = rack_inventory[inventory]
if rack_inventory != cfme_inventory:
msg = "The {} inventory does not match. (server: {}, server inventory: {}, " \
"cfme inventory: {})"
raise StatsDoNotMatch(msg.format(inventory, self.name, rack_inventory,
cfme_inventory))
except KeyError:
raise HostStatsNotContains(
"Server inventory information does not contain '{}'".format(inventory))
except AttributeError:
msg = "Provider does not know how to get '{}'"
raise ProviderHasNoProperty(msg.format(inventory))
@attr.s
class RedfishPhysicalRackCollection(PhysicalRackCollection):
ENTITY = RedfishPhysicalRack
|
izapolsk/integration_tests
|
cfme/physical/provider/redfish.py
|
Python
|
gpl-2.0
| 7,197
|
from scipy.stats import johnsonsb
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1, 1)
# Calculate a few first moments:
a, b = 4.32, 3.18
mean, var, skew, kurt = johnsonsb.stats(a, b, moments='mvsk')
# Display the probability density function (``pdf``):
x = np.linspace(johnsonsb.ppf(0.01, a, b),
johnsonsb.ppf(0.99, a, b), 100)
ax.plot(x, johnsonsb.pdf(x, a, b),
'r-', lw=5, alpha=0.6, label='johnsonsb pdf')
# Alternatively, the distribution object can be called (as a function)
# to fix the shape, location and scale parameters. This returns a "frozen"
# RV object holding the given parameters fixed.
# Freeze the distribution and display the frozen ``pdf``:
rv = johnsonsb(a, b)
ax.plot(x, rv.pdf(x), 'k-', lw=2, label='frozen pdf')
# Check accuracy of ``cdf`` and ``ppf``:
vals = johnsonsb.ppf([0.001, 0.5, 0.999], a, b)
np.allclose([0.001, 0.5, 0.999], johnsonsb.cdf(vals, a, b))
# True
# Generate random numbers:
r = johnsonsb.rvs(a, b, size=1000)
# And compare the histogram:
ax.hist(r, normed=True, histtype='stepfilled', alpha=0.2)
ax.legend(loc='best', frameon=False)
plt.show()
|
platinhom/ManualHom
|
Coding/Python/scipy-html-0.16.1/generated/scipy-stats-johnsonsb-1.py
|
Python
|
gpl-2.0
| 1,134
|
# -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para sockshare
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[sockshare.py] url="+page_url)
data = scrapertools.cache_page(page_url)
patron = 'value="([0-9a-f]+?)" name="hash"'
matches = re.compile(patron,re.DOTALL).findall(data)
if len(matches)==0:return []
post = "hash="+matches[0]+"&confirm=Continue as Free User"
data = scrapertools.cache_page( page_url , post=post, headers=[['User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.14) Gecko/20080404 Firefox/2.0.0.14'],['Referer',page_url]] )
logger.info("data="+data)
# Extrae el trozo cifrado
patron = "playlist: '(.+?)'"
matches = re.compile(patron,re.DOTALL).findall(data)
#scrapertools.printMatches(matches)
data = ""
if len(matches)>0:
xmlurl = urlparse.urljoin(page_url,matches[0])
logger.info("[sockshare.py] Playlis="+xmlurl)
else:
logger.info("[sockshare.py] No encuentra Playlist=")
return []
logger.info("xmlurl="+xmlurl)
data = scrapertools.downloadpageWithoutCookies(xmlurl)
# Extrae la URL
patron = '</link><media\:content url="(.+?)"'
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
video_urls = []
if len(matches)>0:
video_urls.append( ["."+matches[0].rsplit('.',1)[1][0:3]+" [sockshare]",matches[0]])
for video_url in video_urls:
logger.info("[sockshare.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos de este servidor en el texto pasado
def find_videos(text):
encontrados = set()
devuelve = []
# http://www.peliculasaudiolatino.com/show/sockshare.php?url=CEE0B3A7DDFED758
patronvideos = 'sockshare.php\?url=([A-Z0-9]+)'
logger.info("[sockshare.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[sockshare]"
url = "http://www.sockshare.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'sockshare' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
# http://www.sockshare.com/embed/CEE0B3A7DDFED758
patronvideos = 'http://www.sockshare.com/(?:file|embed)/([A-Z0-9]+)'
logger.info("[sockshare.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(text)
for match in matches:
titulo = "[sockshare]"
url = "http://www.sockshare.com/embed/"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'sockshare' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
|
titienmiami/mmc.repository
|
plugin.video.tvalacarta/servers/sockshare.py
|
Python
|
gpl-2.0
| 3,311
|
from src import model
from src.model import User, Pin, Category, Velov
from flask import Flask, flash, render_template, request, session, jsonify
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
def connectToDatabase():
"""
Connect to our SQLite database and return a Session object
"""
session = model.db
return session
db = connectToDatabase()
def getAllPin(idCategory):
"""
request for all pins
"""
if (idCategory):
cat = Category.query.get(idCategory)
if cat:
items = cat.pins
else:
items = []
else:
items = Pin.query.all()
if items:
print "Pins non vides"
return jsonify(pins=[item.serialize() for item in items])
print "pins vide"
return jsonify(error="No pins")
def getPinById(idPin):
print idPin
if idPin:
item = Pin.query.get(int(idPin))
print item.id
if item:
return jsonify(item.serialize())
return jsonify(error="No pin")
return jsonify(error = "Invalid parameters")
def getAllUser():
print "getAllUser"
items = User.query.all()
print "query faite"
if items :
print "users non vide"
return jsonify(users=[item.serialize() for item in items])
print "user vide"
return jsonify(error="No user")
def getAllCategory(pin):
print "displayCategories"
#id de la pin
if pin:
pi = Pin.query.get(pin)
items = pi.categories
else:
items = Category.query.all()
if items :
print "categories non vide"
return jsonify(categories=[item.serialize() for item in items])
print "Category vide"
return jsonify(error="No category")
def getCategoryById(category):
print "displayCategory"
if category:
item = Category.query.filter_by(nom=category).first()
print item.id
if item:
return jsonify(item.serialize())
return jsonify(error="No category")
return jsonify(error = "Invalid parameters")
def authentification(form):
user = User.query.filter_by(pseudo=form['pseudo'], passw=form['passw']).first()
if user:
return jsonify(id=user.id, pseudo=user.pseudo)
return jsonify(error="authentification error")
def addPin(form):
print "addPin"
"""if (form['title'] and form['user'] and form['lng'] and form['lat']):
exist = Pin.query.filter_by(title=form['title'], lng=form['lng'], lat=form['lat']).first()
if exist:
return jsonify(error="already exists")
user = User.query.get(form['user'])
if not(user):
return jsonify(error="user doesn't exist")
#FAUX pin = Pin(form['title'], float(form['lng']), float(form['lat']), form['user'], form['category'], form['description'])
"""
db.session.add(form)
db.session.commit()
# return jsonify(pin = pin.serialize())
#return jsonify(error="invalid parameters")
#updates or creates a velov
def updateVelovByIdVelov(current):
if current:
item = Velov.query.filter_by(idVelov=current.idVelov).first()
if item:
item.velo = current.velo
item.libre = current.libre
db.session.commit()
else:
addPin(current)
def addUser(form):
if (form['pseudo'] and form['passw']):
exist = User.query.filter_by(pseudo=form['pseudo']).first()
if exist:
return jsonify(error="already exist")
user = User(form['pseudo'], form['passw'])
db.session.add(user)
db.session.commit()
return jsonify(id=user.id, pseudo=user.pseudo)
return jsonify(error="invalid parameters")
|
H4213/WeLyon
|
server/service.py
|
Python
|
gpl-2.0
| 3,368
|
import os
import markup
def relpath_same_drive(p1, p2):
"""
Convert p1 into path relative to p2 if both on the same drive.
"""
dr = os.path.splitdrive(p1)[0]
if len(dr) == 0 or dr == os.path.splitdrive(p2)[0]:
return os.path.relpath(p1, p2)
return p1
def create_report(path, gs):
""" Creates html report from the list of GeneStage objects """
folder = os.path.split(path)[0]
html_folder = os.path.join(folder, 'html')
if not os.path.exists(html_folder):
os.makedirs(html_folder)
result_page = markup.page()
result_page.init()
genes = {}
for g in gs:
if g.gene not in genes:
genes[g.gene] = []
genes[g.gene] += [g]
for gene, gl in sorted(genes.items(), key=lambda x: x[0]):
for g in sorted(gl, key = lambda x: x.stage):
stage_path = os.path.join(html_folder, gene + '_' + str(g.stage) + '.html')
stage_page = markup.page()
stage_page.init()
for name in g.names:
img = g.find_images(name)
stage_page.a(markup.oneliner.img(src=relpath_same_drive(name, html_folder), height=100), href=relpath_same_drive(name, html_folder))
if len(img) > 0 and hasattr(img[0], "saved_name"):
stage_page.a(markup.oneliner.img(src=relpath_same_drive(img[0].saved_name, html_folder), height=100), href=relpath_same_drive(img[0].saved_name, html_folder))
stage_page.hr()
print >>open(stage_path, 'w'), stage_page
result_page.h4(gene)
result_page.br()
result_page.a('Stage ' + str(g.stage), href=os.path.relpath(stage_path, folder))
result_page.br()
for i, cls in enumerate(g.cleared):
if len(cls) > 0:
cluster_path = stage_path[:-4] + '_c' + str(i) + '.html'
cluster_page = markup.page()
cluster_page.init()
cluster_page.a(markup.oneliner.img(src=relpath_same_drive(cls.best().saved_name, html_folder), height=200, alt='No good'), href=relpath_same_drive(cls.best().saved_name, html_folder))
cluster_page.hr()
for img in cls:
cluster_page.a(markup.oneliner.img(src=relpath_same_drive(img.saved_name, html_folder), height=75, alt='No good'), href=relpath_same_drive(img.saved_name, html_folder))
print >>open(cluster_path, 'w'), cluster_page
result_page.a(markup.oneliner.img(src=relpath_same_drive(cls.best().saved_name, folder), height=75, alt='No good'), href=os.path.relpath(cluster_path, folder))
for i, cls in enumerate(g.uncleared):
if len(cls) > 0:
cluster_path = stage_path[:-4] + '_u' + str(i) + '.html'
cluster_page = markup.page()
cluster_page.init()
cluster_page.a(markup.oneliner.img(src=relpath_same_drive(cls.best().saved_name, html_folder), height=200, alt='No good'), href=relpath_same_drive(cls.best().saved_name, html_folder))
cluster_page.hr()
for img in cls:
cluster_page.a(markup.oneliner.img(src=relpath_same_drive(img.saved_name, html_folder), height=75, alt='No good'), href=relpath_same_drive(img.saved_name, html_folder))
print >>open(cluster_path, 'w'), cluster_page
result_page.a(markup.oneliner.img(src=relpath_same_drive(cls.best().saved_name, folder), height=75, alt='No good'), href=os.path.relpath(cluster_path, folder))
result_page.hr()
print >>open(path, 'w'), result_page
|
ilyapatrushev/isimage
|
isimage/select_images/create_report.py
|
Python
|
gpl-2.0
| 3,952
|
# https://sam.nrel.gov/images/web_page_files/ssc_guide.pdf#subsection.3.4
import omf.solvers.nrelsam2013 as sam # This import takes a long time (15 seconds)
def inspect_pvwattsv1():
'''
In the GRIP API we only use the pvwattsv1 module
'''
ssc = sam.SSCAPI()
pv = ssc.ssc_module_create("pvwattsv1")
idx = 0
pv_var = ssc.ssc_module_var_info(pv, idx)
while (pv_var is not None):
print('Name: {}'.format(ssc.ssc_info_name(pv_var)))
print('Label: {}'.format(ssc.ssc_info_label(pv_var)))
print('Units: {}'.format(ssc.ssc_info_units(pv_var)))
print('Meta: {}'.format(ssc.ssc_info_meta(pv_var)))
print('Group: {}'.format(ssc.ssc_info_group(pv_var)))
print('Entry description: {}'.format(ssc.ssc_entry_description(pv_var)))
#print('Entry name: {}'.format(ssc.ssc_entry_name(pv_var))) # Segfault?!
print('')
#print(ssc.ssc_info_required(pv_var)) # Only available after 2013 SDK
#print(ssc.ssc_info_constraints(pv_var)) # Only available after 2013 SDK
idx += 1
pv_var = ssc.ssc_module_var_info(pv, idx)
print('Variable count: {}'.format(idx))
if __name__ == '__main__':
inspect_pvwattsv1()
|
dpinney/omf
|
omf/scratch/GRIP/helper/nrel_sam_introspection.py
|
Python
|
gpl-2.0
| 1,220
|
import random
class EffortAgent:
def __init__(self, memory_length, epsilon, u_l, u_h, r1, r2, beta, cost, threshold):
self.memory_length = memory_length
self.epsilon = epsilon
self.u_l = u_l
self.u_h = u_h
self.r1 = r1
self.r2 = r2
self.beta = beta
self.cost = cost
self.threshold = threshold
self.choices = []
self.states = []
for i in xrange(memory_length):
if random.random() < 0.5:
self.choices.append(1)
if random.random() < 0.5:
self.states.append(1)
else:
self.states.append(0)
else:
self.choices.append(0)
self.states.append(0)
def set_neighbours(self, nbrs):
self.neighbours = nbrs
def make_choice(self):
if random.random() < self.epsilon:
if random.random() < 0.5:
self.choices.append(0)
return 0
else:
self.choices.append(1)
return 1
else:
if self.expected_utility_on_effort() > self.expected_utility_without_effort():
self.choices.append(1)
return 1
else:
self.choices.append(0)
return 0
def set_state(self, state):
self.states.append(state)
def estimated_probability_of_success(self):
average = self.history()
if average < self.threshold:
return self.r1 * average
else:
return self.beta - self.r2 * average
def expected_utility_on_effort(self):
p = self.estimated_probability_of_success()
return (self.u_h - self.cost) * p + (self.u_l - self.cost) * (1.0 - p)
def expected_utility_without_effort(self):
return self.u_l
def current_state(self):
return self.states[-1]
def current_choice(self):
return self.choices[-1]
def average_state(self):
if len(self.states) == 0:
return 0
else:
return sum(self.states) / float(len(self.states))
def average_effort(self):
if len(self.choices) == 0:
return 0
else:
return sum(self.choices) / float(len(self.choices))
def history(self):
success = sum(self.states[-self.memory_length:])
attempts = sum(self.choices[-self.memory_length:])
if attempts == 0:
return 0.0
else:
return success / float(attempts)
class EffortWorld:
def __init__(self, n, threshold, beta, r1, r2, u_l, u_h, memory, epsilon, cost):
"""
Create a world with n agents
"""
self.threshold = threshold
self.r1 = r1
self.r2 = r2
self.beta = beta
self.agents = []
for i in xrange(n):
self.agents.append(EffortAgent(memory_length=memory, epsilon=epsilon, u_l=u_l, u_h=u_h, r1=r1, r2=r2, beta=beta, cost=cost, threshold=threshold))
for idx, a in enumerate(self.agents):
a.set_neighbours(self.agents)
def step(self):
choices = []
for a in self.agents:
choices.append(a.make_choice())
ave_choice = sum(choices) / float(len(choices))
if ave_choice < self.threshold:
p = self.r1 * ave_choice
else:
p = self.beta - self.r2 * ave_choice
for a in self.agents:
if a.current_choice() == 0:
a.set_state(0)
else:
if random.random() < p:
a.set_state(1)
else:
a.set_state(0)
def total_effort(self):
return sum(map(lambda a: a.current_choice(), self.agents))
def total_high_status(self):
return sum(map(lambda a: a.current_state(), self.agents))
def run_simulation(n, periods=1000,threshold=0.5, beta=1.25, r1=1.5, r2=1.0, u_l=2.0, u_h=5.0, memory=10, epsilon=0.1, cost=1.0):
world = EffortWorld(n, threshold=threshold, beta=beta, r1=r1, r2=r2, u_l=u_l, u_h=u_h, memory=memory, epsilon=epsilon, cost=cost)
efforts, high_statuses = [], []
for period in xrange(periods):
world.step()
efforts.append(world.total_effort())
high_statuses.append(world.total_high_status())
average_effort = map(lambda a: a.average_effort(), world.agents)
average_state = map(lambda a: a.average_state(), world.agents)
return {
"efforts": efforts,
"high_statuses" : high_statuses,
"ave_effort" : average_effort,
"ave_state" : average_state
}
|
jamesporter/endogenous-polarisation
|
models/effort_model.py
|
Python
|
gpl-2.0
| 4,684
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# benchmark_alternating - benchmark alternating read/write
# Copyright (C) 2003-2011 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
"""Benchmark alternating file reads and writes"""
import os
import sys
import threading
import time
import getopt
clock_fun = time.time
def default_configuration():
"""Return dictionary with default configuration values"""
conf = {'number': 100000, 'data_bytes': 512}
return conf
def usage():
"""Usage help"""
print("Usage: %s" % sys.argv[0])
print("Run alternating benchmark")
print("Options and default values:")
for (key, val) in default_configuration().items():
print("--%s: %s" % (key, val))
class Writer(threading.Thread):
"""Writer thread"""
def __init__(self, start_time, writefile, data, conf):
threading.Thread.__init__(self)
self.writefile = writefile
self.data = data
self.start_time = start_time
self.endtime = -1
self.conf = conf
def run(self):
"""Runner"""
for _ in xrange(self.conf['number']):
self.writefile.write(self.data)
self.writefile.flush()
#time.sleep(0.001)
self.endtime = (clock_fun() - self.start_time)
print "Write finished at %0.3f" % (self.endtime * 1000)
class Reader(threading.Thread):
"""Reader thread"""
def __init__(self, start_time, readfile, conf):
threading.Thread.__init__(self)
self.readfile = readfile
self.start_time = start_time
self.endtime = -1
self.conf = conf
def run(self):
"""Runner"""
for _ in xrange(self.conf['number']):
nbytes = len(self.readfile.read(self.conf['data_bytes']))
if nbytes < self.conf['data_bytes']:
self.readfile.seek(0)
#time.sleep(0.001)
self.endtime = (clock_fun() - self.start_time)
print "Read finished at %0.3f" % (self.endtime * 1000)
def prepare_files(conf):
"""Set up files used in benchmark"""
if not os.path.exists("readfile"):
data = open("/dev/urandom").read(conf['data_bytes'])
readfile = open("readfile", "wb")
readfile.write(data)
readfile.close()
def main(conf):
"""Run timed benchmark"""
# WRITES ONLY
threads = []
prepare_files(conf)
readfile = open("readfile", "rb")
writefile = open("writefile", "wb")
data = open("/dev/urandom").read(conf['data_bytes']/8)
start_time = clock_fun()
for _ in xrange(1):
worker = Writer(start_time, writefile, data, conf)
threads.append(worker)
for worker in threads:
worker.start()
for worker in threads:
worker.join()
end = time.time()
print "Time for pure writes %d" % (end - start_time)
# MIXED MODE
threads = []
readfile = open("readfile", "rb")
writefile = open("writefile", "wb")
data = open("/dev/urandom").read(conf['data_bytes']/8)
start_time = clock_fun()
worker = Writer(start_time, writefile, data, conf)
threads.append(worker)
for _ in xrange(4):
worker = Reader(start_time, readfile, conf)
threads.append(worker)
for worker in threads:
worker.start()
for worker in threads:
worker.join()
end = time.time()
# READ/ONLY MODE
print "Time for mixed reads/writes %d" % (end - start_time)
threads = []
readfile = open("readfile", "rb")
start_time = clock_fun()
for _ in xrange(5):
worker = Reader(start_time, readfile, conf)
threads.append(worker)
for worker in threads:
worker.start()
for worker in threads:
worker.join()
end = time.time()
print "Time for just reads %d" % (end - start_time)
if __name__ == '__main__':
conf = default_configuration()
# Parse command line
try:
(opts, args) = getopt.getopt(sys.argv[1:],
'd:hn:', [
'data-bytes=',
'help',
'number=',
])
except getopt.GetoptError, err:
print('Error in option parsing: ' + err.msg)
usage()
sys.exit(1)
for (opt, val) in opts:
if opt in ('-d', '--data-bytes'):
try:
conf["data_bytes"] = int(val)
except ValueError, err:
print('Error in parsing %s value: %s' % (opt, err))
sys.exit(1)
elif opt in ('-h', '--help'):
usage()
sys.exit(0)
elif opt in ('-n', '--number'):
try:
conf["number"] = int(val)
except ValueError, err:
print('Error in parsing %s value: %s' % (opt, err))
sys.exit(1)
else:
print("unknown option: %s" % opt)
usage()
sys.exit(1)
main(conf)
|
heromod/migrid
|
mig/grsfs-fuse/benchmarks/code/benchmark_alternating.py
|
Python
|
gpl-2.0
| 5,766
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Vratislav Podzimek <vpodzime@redhat.com>
# Martin Kolman <mkolman@redhat.com>
from pyanaconda import iutil
import unittest
import os
import tempfile
import signal
import shutil
from .test_constants import ANACONDA_TEST_DIR
from timer import timer
class UpcaseFirstLetterTests(unittest.TestCase):
def setUp(self):
# create the directory used for file/folder tests
if not os.path.exists(ANACONDA_TEST_DIR):
os.makedirs(ANACONDA_TEST_DIR)
def tearDown(self):
# remove the testing directory
shutil.rmtree(ANACONDA_TEST_DIR)
def upcase_first_letter_test(self):
"""Upcasing first letter should work as expected."""
# no change
self.assertEqual(iutil.upcase_first_letter("Czech RePuBliC"),
"Czech RePuBliC")
# simple case
self.assertEqual(iutil.upcase_first_letter("czech"), "Czech")
# first letter only
self.assertEqual(iutil.upcase_first_letter("czech republic"),
"Czech republic")
# no lowercase
self.assertEqual(iutil.upcase_first_letter("czech Republic"),
"Czech Republic")
class RunProgramTests(unittest.TestCase):
def run_program_test(self):
"""Test the _run_program method."""
# correct calling should return rc==0
self.assertEqual(iutil._run_program(['ls'])[0], 0)
# incorrect calling should return rc!=0
self.assertNotEqual(iutil._run_program(['ls', '--asdasd'])[0], 0)
# check if an int is returned for bot success and error
self.assertIsInstance(iutil._run_program(['ls'])[0], int)
self.assertIsInstance(iutil._run_program(['ls', '--asdasd'])[0], int)
# error should raise OSError
with self.assertRaises(OSError):
iutil._run_program(['asdasdadasd'])
def run_program_binary_test(self):
"""Test _run_program with binary output."""
# Echo something that cannot be decoded as utf-8
retcode, output = iutil._run_program(['echo', '-en', r'\xa0\xa1\xa2'], binary_output=True)
self.assertEqual(retcode, 0)
self.assertEqual(output, b'\xa0\xa1\xa2')
def exec_with_redirect_test(self):
"""Test execWithRedirect."""
# correct calling should return rc==0
self.assertEqual(iutil.execWithRedirect('ls', []), 0)
# incorrect calling should return rc!=0
self.assertNotEqual(iutil.execWithRedirect('ls', ['--asdasd']), 0)
def exec_with_capture_test(self):
"""Test execWithCapture."""
# check some output is returned
self.assertGreater(len(iutil.execWithCapture('ls', ['--help'])), 0)
# check no output is returned
self.assertEqual(len(iutil.execWithCapture('true', [])), 0)
def exec_with_capture_no_stderr_test(self):
"""Test execWithCapture with no stderr"""
with tempfile.NamedTemporaryFile(mode="w+t") as testscript:
testscript.write("""#!/bin/sh
echo "output"
echo "error" >&2
""")
testscript.flush()
# check that only the output is captured
self.assertEqual(
iutil.execWithCapture("/bin/sh", [testscript.name], filter_stderr=True),
"output\n")
# check that both output and error are captured
self.assertEqual(iutil.execWithCapture("/bin/sh", [testscript.name]),
"output\nerror\n")
def exec_with_capture_empty_test(self):
"""Test execWithCapture with no output"""
# check that the output is an empty string
self.assertEqual(
iutil.execWithCapture("/bin/sh", ["-c", "exit 0"]),
"")
def exec_readlines_test(self):
"""Test execReadlines."""
# test no lines are returned
self.assertEqual(list(iutil.execReadlines("true", [])), [])
# test some lines are returned
self.assertGreater(len(list(iutil.execReadlines("ls", ["--help"]))), 0)
# check that it always returns an iterator for both
# if there is some output and if there isn't any
self.assertTrue(hasattr(iutil.execReadlines("ls", ["--help"]), "__iter__"))
self.assertTrue(hasattr(iutil.execReadlines("true", []), "__iter__"))
def exec_readlines_test_normal_output(self):
"""Test the output of execReadlines."""
# Test regular-looking output
with tempfile.NamedTemporaryFile(mode="w+t") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two"
echo "three"
exit 0
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(StopIteration, rl_iterator.__next__)
# Test output with no end of line
with tempfile.NamedTemporaryFile(mode="w+t") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two"
echo -n "three"
exit 0
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(StopIteration, rl_iterator.__next__)
def exec_readlines_test_exits(self):
"""Test execReadlines in different child exit situations."""
# Tests that exit on signal will raise OSError once output
# has been consumed, otherwise the test will exit normally.
# Test a normal, non-0 exit
with tempfile.NamedTemporaryFile(mode="wt") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two"
echo "three"
exit 1
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(OSError, rl_iterator.__next__)
# Test exit on signal
with tempfile.NamedTemporaryFile(mode="wt") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two"
echo "three"
kill -TERM $$
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(OSError, rl_iterator.__next__)
# Repeat the above two tests, but exit before a final newline
with tempfile.NamedTemporaryFile(mode="wt") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two"
echo -n "three"
exit 1
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(OSError, rl_iterator.__next__)
with tempfile.NamedTemporaryFile(mode="wt") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two"
echo -n "three"
kill -TERM $$
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(OSError, rl_iterator.__next__)
def exec_readlines_test_signals(self):
"""Test execReadlines and signal receipt."""
# ignored signal
old_HUP_handler = signal.signal(signal.SIGHUP, signal.SIG_IGN)
try:
with tempfile.NamedTemporaryFile(mode="wt") as testscript:
testscript.write("""#!/bin/sh
echo "one"
kill -HUP $PPID
echo "two"
echo -n "three"
exit 0
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(StopIteration, rl_iterator.__next__)
finally:
signal.signal(signal.SIGHUP, old_HUP_handler)
# caught signal
def _hup_handler(signum, frame):
pass
old_HUP_handler = signal.signal(signal.SIGHUP, _hup_handler)
try:
with tempfile.NamedTemporaryFile(mode="wt") as testscript:
testscript.write("""#!/bin/sh
echo "one"
kill -HUP $PPID
echo "two"
echo -n "three"
exit 0
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(StopIteration, rl_iterator.__next__)
finally:
signal.signal(signal.SIGHUP, old_HUP_handler)
def exec_readlines_test_filter_stderr(self):
"""Test execReadlines and filter_stderr."""
# Test that stderr is normally included
with tempfile.NamedTemporaryFile(mode="w+t") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two" >&2
echo "three"
exit 0
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "two")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(StopIteration, rl_iterator.__next__)
# Test that filter stderr removes the middle line
with tempfile.NamedTemporaryFile(mode="w+t") as testscript:
testscript.write("""#!/bin/sh
echo "one"
echo "two" >&2
echo "three"
exit 0
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name], filter_stderr=True)
self.assertEqual(next(rl_iterator), "one")
self.assertEqual(next(rl_iterator), "three")
self.assertRaises(StopIteration, rl_iterator.__next__)
def start_program_preexec_fn_test(self):
"""Test passing preexec_fn to startProgram."""
marker_text = "yo wassup man"
# Create a temporary file that will be written before exec
with tempfile.NamedTemporaryFile(mode="w+t") as testfile:
# Write something to testfile to show this method was run
def preexec():
# Open a copy of the file here since close_fds has already closed the descriptor
testcopy = open(testfile.name, 'w')
testcopy.write(marker_text)
testcopy.close()
with timer(5):
# Start a program that does nothing, with a preexec_fn
proc = iutil.startProgram(["/bin/true"], preexec_fn=preexec)
proc.communicate()
# Rewind testfile and look for the text
testfile.seek(0, os.SEEK_SET)
self.assertEqual(testfile.read(), marker_text)
def start_program_stdout_test(self):
"""Test redirecting stdout with startProgram."""
marker_text = "yo wassup man"
# Create a temporary file that will be written by the program
with tempfile.NamedTemporaryFile(mode="w+t") as testfile:
# Open a new copy of the file so that the child doesn't close and
# delete the NamedTemporaryFile
stdout = open(testfile.name, 'w')
with timer(5):
proc = iutil.startProgram(["/bin/echo", marker_text], stdout=stdout)
proc.communicate()
# Rewind testfile and look for the text
testfile.seek(0, os.SEEK_SET)
self.assertEqual(testfile.read().strip(), marker_text)
def start_program_reset_handlers_test(self):
"""Test the reset_handlers parameter of startProgram."""
with tempfile.NamedTemporaryFile(mode="w+t") as testscript:
testscript.write("""#!/bin/sh
# Just hang out and do nothing, forever
while true ; do sleep 1 ; done
""")
testscript.flush()
# Start a program with reset_handlers
proc = iutil.startProgram(["/bin/sh", testscript.name])
with timer(5):
# Kill with SIGPIPE and check that the python's SIG_IGN was not inheritted
# The process should die on the signal.
proc.send_signal(signal.SIGPIPE)
proc.communicate()
self.assertEqual(proc.returncode, -(signal.SIGPIPE))
# Start another copy without reset_handlers
proc = iutil.startProgram(["/bin/sh", testscript.name], reset_handlers=False)
with timer(5):
# Kill with SIGPIPE, then SIGTERM, and make sure SIGTERM was the one
# that worked.
proc.send_signal(signal.SIGPIPE)
proc.terminate()
proc.communicate()
self.assertEqual(proc.returncode, -(signal.SIGTERM))
def exec_readlines_auto_kill_test(self):
"""Test execReadlines with reading only part of the output"""
with tempfile.NamedTemporaryFile(mode="w+t") as testscript:
testscript.write("""#!/bin/sh
# Output forever
while true; do
echo hey
done
""")
testscript.flush()
with timer(5):
rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name])
# Save the process context
proc = rl_iterator._proc
# Read two lines worth
self.assertEqual(next(rl_iterator), "hey")
self.assertEqual(next(rl_iterator), "hey")
# Delete the iterator and wait for the process to be killed
del rl_iterator
proc.communicate()
# Check that the process is gone
self.assertIsNotNone(proc.poll())
def watch_process_test(self):
"""Test watchProcess"""
def test_still_running():
with timer(5):
# Run something forever so we can kill it
proc = iutil.startProgram(["/bin/sh", "-c", "while true; do sleep 1; done"])
iutil.watchProcess(proc, "test1")
proc.kill()
# Wait for the SIGCHLD
signal.pause()
self.assertRaises(iutil.ExitError, test_still_running)
# Make sure watchProcess checks that the process has not already exited
with timer(5):
proc = iutil.startProgram(["true"])
proc.communicate()
self.assertRaises(iutil.ExitError, iutil.watchProcess, proc, "test2")
class MiscTests(unittest.TestCase):
def get_dir_size_test(self):
"""Test the getDirSize."""
# dev null should have a size == 0
self.assertEqual(iutil.getDirSize('/dev/null'), 0)
# incorrect path should also return 0
self.assertEqual(iutil.getDirSize('/dev/null/foo'), 0)
# check if an int is always returned
self.assertIsInstance(iutil.getDirSize('/dev/null'), int)
self.assertIsInstance(iutil.getDirSize('/dev/null/foo'), int)
# TODO: mock some dirs and check if their size is
# computed correctly
def mkdir_chain_test(self):
"""Test mkdirChain."""
# don't fail if directory path already exists
iutil.mkdirChain('/')
iutil.mkdirChain('/tmp')
# create a path and test it exists
test_folder = "test_mkdir_chain"
test_paths = [
"foo",
"foo/bar/baz",
u"foo/bar/baz",
"",
"čřščščřščř",
u"čřščščřščř",
"asdasd asdasd",
"! spam"
]
# join with the toplevel test folder and the folder for this
# test
test_paths = [os.path.join(ANACONDA_TEST_DIR, test_folder, p)
for p in test_paths]
def create_return(path):
iutil.mkdirChain(path)
return path
# create the folders and check that they exist
for p in test_paths:
self.assertTrue(os.path.exists(create_return(p)))
# try to create them again - all the paths should already exist
# and the mkdirChain function needs to handle that
# without a traceback
for p in test_paths:
iutil.mkdirChain(p)
def get_active_console_test(self):
"""Test get_active_console."""
# at least check if a string is returned
self.assertIsInstance(iutil.get_active_console(), str)
def is_console_on_vt_test(self):
"""Test isConsoleOnVirtualTerminal."""
# at least check if a bool is returned
self.assertIsInstance(iutil.isConsoleOnVirtualTerminal(), bool)
def parse_nfs_url_test(self):
"""Test parseNfsUrl."""
# empty NFS url should return 3 blanks
self.assertEqual(iutil.parseNfsUrl(""), ("", "", ""))
# the string is delimited by :, there is one prefix and 3 parts,
# the prefix is discarded and all parts after the 3th part
# are also discarded
self.assertEqual(iutil.parseNfsUrl("discard:options:host:path"),
("options", "host", "path"))
self.assertEqual(iutil.parseNfsUrl("discard:options:host:path:foo:bar"),
("options", "host", "path"))
self.assertEqual(iutil.parseNfsUrl(":options:host:path::"),
("options", "host", "path"))
self.assertEqual(iutil.parseNfsUrl(":::::"),
("", "", ""))
# if there is only prefix & 2 parts,
# the two parts are host and path
self.assertEqual(iutil.parseNfsUrl("prefix:host:path"),
("", "host", "path"))
self.assertEqual(iutil.parseNfsUrl(":host:path"),
("", "host", "path"))
self.assertEqual(iutil.parseNfsUrl("::"),
("", "", ""))
# if there is only a prefix and single part,
# the part is the host
self.assertEqual(iutil.parseNfsUrl("prefix:host"),
("", "host", ""))
self.assertEqual(iutil.parseNfsUrl(":host"),
("", "host", ""))
self.assertEqual(iutil.parseNfsUrl(":"),
("", "", ""))
def vt_activate_test(self):
"""Test vtActivate."""
# pylint: disable=no-member
def raise_os_error(*args, **kwargs):
raise OSError
_execWithRedirect = iutil.vtActivate.__globals__['execWithRedirect']
try:
# chvt does not exist on all platforms
# and the function needs to correctly survie that
iutil.vtActivate.__globals__['execWithRedirect'] = raise_os_error
self.assertEqual(iutil.vtActivate(2), False)
finally:
iutil.vtActivate.__globals__['execWithRedirect'] = _execWithRedirect
def get_deep_attr_test(self):
"""Test getdeepattr."""
# pylint: disable=attribute-defined-outside-init
class O(object):
pass
a = O()
a.b = O()
a.b1 = 1
a.b.c = 2
a.b.c1 = "ř"
self.assertEqual(iutil.getdeepattr(a, "b1"), 1)
self.assertEqual(iutil.getdeepattr(a, "b.c"), 2)
self.assertEqual(iutil.getdeepattr(a, "b.c1"), "ř")
# be consistent with getattr and throw
# AttributeError if non-existent attribute is requested
with self.assertRaises(AttributeError):
iutil.getdeepattr(a, "")
with self.assertRaises(AttributeError):
iutil.getdeepattr(a, "b.c.d")
def set_deep_attr_test(self):
"""Test setdeepattr."""
# pylint: disable=attribute-defined-outside-init
# pylint: disable=no-member
class O(object):
pass
a = O()
a.b = O()
a.b1 = 1
a.b.c = O()
a.b.c1 = "ř"
# set to a new attribute
iutil.setdeepattr(a, "b.c.d", True)
self.assertEqual(a.b.c.d, True)
# override existing attribute
iutil.setdeepattr(a, "b.c", 1234)
self.assertEqual(a.b.c, 1234)
# "" is actually a valid attribute name
# that can be only accessed by getattr
iutil.setdeepattr(a, "", 1234)
self.assertEqual(getattr(a, ""), 1234)
iutil.setdeepattr(a, "b.", 123)
self.assertEqual(iutil.getdeepattr(a, "b."), 123)
# error should raise AttributeError
with self.assertRaises(AttributeError):
iutil.setdeepattr(a, "b.c.d.e.f.g.h", 1234)
def strip_accents_test(self):
"""Test strip_accents."""
# empty string
self.assertEqual(iutil.strip_accents(u""), u"")
self.assertEqual(iutil.strip_accents(""), "")
# some Czech accents
self.assertEqual(iutil.strip_accents(u"ěščřžýáíéúů"), u"escrzyaieuu")
self.assertEqual(iutil.strip_accents(u"v češtině"), u"v cestine")
self.assertEqual(iutil.strip_accents(u"měšťánek rozšíří HÁČKY"),
u"mestanek rozsiri HACKY")
self.assertEqual(iutil.strip_accents(u"nejneobhospodařovávatelnějšímu"),
u"nejneobhospodarovavatelnejsimu")
# some German umlauts
self.assertEqual(iutil.strip_accents(u"Lärmüberhörer"), u"Larmuberhorer")
self.assertEqual(iutil.strip_accents(u"Heizölrückstoßabdämpfung"),
u"Heizolrucksto\xdfabdampfung")
# some Japanese
self.assertEqual(iutil.strip_accents(u"日本語"), u"\u65e5\u672c\u8a9e")
self.assertEqual(iutil.strip_accents(u"アナコンダ"), # Anaconda
u"\u30a2\u30ca\u30b3\u30f3\u30bf")
# combined
input_string = u"ASCI měšťánek アナコンダ Heizölrückstoßabdämpfung"
output_string =u"ASCI mestanek \u30a2\u30ca\u30b3\u30f3\u30bf Heizolrucksto\xdfabdampfung"
self.assertEqual(iutil.strip_accents(input_string), output_string)
def cmp_obj_attrs_test(self):
"""Test cmp_obj_attrs."""
# pylint: disable=attribute-defined-outside-init
class O(object):
pass
a = O()
a.b = 1
a.c = 2
a1 = O()
a1.b = 1
a1.c = 2
b = O()
b.b = 1
b.c = 3
# a class should have it's own attributes
self.assertTrue(iutil.cmp_obj_attrs(a, a, ["b", "c"]))
self.assertTrue(iutil.cmp_obj_attrs(a1, a1, ["b", "c"]))
self.assertTrue(iutil.cmp_obj_attrs(b, b, ["b", "c"]))
# a and a1 should have the same attributes
self.assertTrue(iutil.cmp_obj_attrs(a, a1, ["b", "c"]))
self.assertTrue(iutil.cmp_obj_attrs(a1, a, ["b", "c"]))
self.assertTrue(iutil.cmp_obj_attrs(a1, a, ["c", "b"]))
# missing attributes are considered a mismatch
self.assertFalse(iutil.cmp_obj_attrs(a, a1, ["b", "c", "d"]))
# empty attribute list is not a mismatch
self.assertTrue(iutil.cmp_obj_attrs(a, b, []))
# attributes of a and b differ
self.assertFalse(iutil.cmp_obj_attrs(a, b, ["b", "c"]))
self.assertFalse(iutil.cmp_obj_attrs(b, a, ["b", "c"]))
self.assertFalse(iutil.cmp_obj_attrs(b, a, ["c", "b"]))
def to_ascii_test(self):
"""Test _toASCII."""
# check some conversions
self.assertEqual(iutil._toASCII(""), "")
self.assertEqual(iutil._toASCII(" "), " ")
self.assertEqual(iutil._toASCII("&@`'łŁ!@#$%^&*{}[]$'<>*"),
"&@`'!@#$%^&*{}[]$'<>*")
self.assertEqual(iutil._toASCII("ABC"), "ABC")
self.assertEqual(iutil._toASCII("aBC"), "aBC")
_out = "Heizolruckstoabdampfung"
self.assertEqual(iutil._toASCII("Heizölrückstoßabdämpfung"), _out)
def upper_ascii_test(self):
"""Test upperASCII."""
self.assertEqual(iutil.upperASCII(""),"")
self.assertEqual(iutil.upperASCII("a"),"A")
self.assertEqual(iutil.upperASCII("A"),"A")
self.assertEqual(iutil.upperASCII("aBc"),"ABC")
self.assertEqual(iutil.upperASCII("_&*'@#$%^aBcžčŘ"),
"_&*'@#$%^ABCZCR")
_out = "HEIZOLRUCKSTOABDAMPFUNG"
self.assertEqual(iutil.upperASCII("Heizölrückstoßabdämpfung"), _out)
def lower_ascii_test(self):
"""Test lowerASCII."""
self.assertEqual(iutil.lowerASCII(""),"")
self.assertEqual(iutil.lowerASCII("A"),"a")
self.assertEqual(iutil.lowerASCII("a"),"a")
self.assertEqual(iutil.lowerASCII("aBc"),"abc")
self.assertEqual(iutil.lowerASCII("_&*'@#$%^aBcžčŘ"),
"_&*'@#$%^abczcr")
_out = "heizolruckstoabdampfung"
self.assertEqual(iutil.lowerASCII("Heizölrückstoßabdämpfung"), _out)
def have_word_match_test(self):
"""Test have_word_match."""
self.assertTrue(iutil.have_word_match("word1 word2", "word1 word2 word3"))
self.assertTrue(iutil.have_word_match("word1 word2", "word2 word1 word3"))
self.assertTrue(iutil.have_word_match("word2 word1", "word3 word1 word2"))
self.assertTrue(iutil.have_word_match("word1", "word1 word2"))
self.assertTrue(iutil.have_word_match("word1 word2", "word2word1 word3"))
self.assertTrue(iutil.have_word_match("word2 word1", "word3 word1word2"))
self.assertTrue(iutil.have_word_match("word1", "word1word2"))
self.assertTrue(iutil.have_word_match("", "word1"))
self.assertFalse(iutil.have_word_match("word3 word1", "word1"))
self.assertFalse(iutil.have_word_match("word1 word3", "word1 word2"))
self.assertFalse(iutil.have_word_match("word3 word2", "word1 word2"))
self.assertFalse(iutil.have_word_match("word1word2", "word1 word2 word3"))
self.assertFalse(iutil.have_word_match("word1", ""))
self.assertFalse(iutil.have_word_match("word1", None))
self.assertFalse(iutil.have_word_match(None, "word1"))
self.assertFalse(iutil.have_word_match("", None))
self.assertFalse(iutil.have_word_match(None, ""))
self.assertFalse(iutil.have_word_match(None, None))
# Compare designated unicode and "standard" unicode string and make sure nothing crashes
self.assertTrue(iutil.have_word_match("fête", u"fête champêtre"))
self.assertTrue(iutil.have_word_match(u"fête", "fête champêtre"))
def parent_dir_test(self):
"""Test the parent_dir function"""
dirs = [("", ""), ("/", ""), ("/home/", ""), ("/home/bcl", "/home"), ("home/bcl", "home"),
("/home/bcl/", "/home"), ("/home/extra/bcl", "/home/extra"),
("/home/extra/bcl/", "/home/extra"), ("/home/extra/../bcl/", "/home")]
for d, r in dirs:
self.assertEqual(iutil.parent_dir(d), r)
def open_with_perm_test(self):
"""Test the open_with_perm function"""
# Create a directory for test files
test_dir = tempfile.mkdtemp()
try:
# Reset the umask
old_umask = os.umask(0)
try:
# Create a file with mode 0777
iutil.open_with_perm(test_dir + '/test1', 'w', 0o777)
self.assertEqual(os.stat(test_dir + '/test1').st_mode & 0o777, 0o777)
# Create a file with mode 0600
iutil.open_with_perm(test_dir + '/test2', 'w', 0o600)
self.assertEqual(os.stat(test_dir + '/test2').st_mode & 0o777, 0o600)
finally:
os.umask(old_umask)
finally:
shutil.rmtree(test_dir)
def touch_test(self):
"""Test if the touch function correctly creates empty files"""
test_dir = tempfile.mkdtemp()
try:
file_path = os.path.join(test_dir, "EMPTY_FILE")
# try to create an empty file with touch()
iutil.touch(file_path)
# check if it exists & is a file
self.assertTrue(os.path.isfile(file_path))
# check if the file is empty
self.assertEqual(os.stat(file_path).st_size, 0)
finally:
shutil.rmtree(test_dir)
|
wgwoods/anaconda
|
tests/pyanaconda_tests/iutil_test.py
|
Python
|
gpl-2.0
| 30,600
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.openstack.common.log import policy_is
from openstack_dashboard.dashboards.admin import dashboard
class Log(horizon.Panel):
name = _("Log")
slug = 'log'
img = '/static/dashboard/img/nav/log1.png'
def nav(self, context):
username = context['request'].user.username
return policy_is(username, 'auditadmin', 'admin')
dashboard.Admin.register(Log)
|
ChinaMassClouds/copenstack-server
|
openstack/src/horizon-2014.2/openstack_dashboard/dashboards/admin/log/panel.py
|
Python
|
gpl-2.0
| 1,050
|
"""This module handles feeding food to worker in the end of a round."""
from game import food_checker
from game import resource
class FeederError(Exception):
pass
class Feeder(object):
def __init__(self):
self._resource_picker = None
self._player = None
self._food_req = None
self._food_checker = None
self._picked_res = None
def SetResourcePicker(self, picker):
self._resource_picker = picker
def SetPlayer(self, player):
self._player = player
def SetFoodRequirement(self, food_requirement):
self._food_req = food_requirement
self._food_checker = food_checker.FoodChecker(self._food_req)
def GetFoodRequirement(self):
return self._food_req
def GetResourcePicker(self):
return self._resource_picker
def FeedWithPickedResource(self):
self._picked_res = self._resource_picker.GetPicked()
try:
enough = self._food_checker.Check(self._picked_res)
except food_checker.FoodTooMuchError:
raise FeederError('Pick too much')
except food_checker.NotFoodError:
raise FeederError('Pick non food')
if enough:
self._ApplyTransaction()
else:
self._CheckReallyCanNotPayMore()
self._ApplyTransaction()
self._ApplyLoan()
def _ApplyTransaction(self):
self._player.SubtractResource(self._picked_res)
def _CheckReallyCanNotPayMore(self):
test_res = self._player.GetResource().Copy()
test_res.Subtract(self._picked_res)
if test_res.GetFoodValue():
raise FeederError('Can pay more')
@classmethod
def _GetNeededLoanNumber(cls, needed_loan_value):
loan_unit_value = resource.Loan.GetFrancValueWhenGetLoan()
needed_loan_number = (
needed_loan_value / loan_unit_value)
if needed_loan_value % loan_unit_value:
needed_loan_number = needed_loan_number + 1
return needed_loan_number
def _ApplyLoan(self):
picked_food_value = self._picked_res.GetFoodValue()
needed_loan_value = self._food_req - picked_food_value
needed_loan_number = self._GetNeededLoanNumber(needed_loan_value)
self._player.GetLoan(needed_loan_number)
self._player.SubtractResource(
resource.Resource(franc=needed_loan_value))
def CreateFeeder(player, food_requirement, picker):
feeder_obj = Feeder()
feeder_obj.SetResourcePicker(picker)
feeder_obj.SetPlayer(player)
feeder_obj.SetFoodRequirement(food_requirement)
return feeder_obj
|
chiang831/LeHavre
|
src/game/feeder.py
|
Python
|
gpl-2.0
| 2,413
|
from java.util.zip import ZipEntry, ZipOutputStream
from java.io import File, FileInputStream, ByteArrayOutputStream, FileOutputStream, ByteArrayInputStream
import string, os, jarray, sys
class DirectoryOutput:
def __init__(self, dirname):
self.outdir = dirname
def getFile(self, name):
fname = apply(os.path.join, tuple([self.outdir]+string.split(name, '.')))+'.class'
file = File(fname)
File(file.getParent()).mkdirs()
return FileOutputStream(file)
def write(self, name, file):
fp = self.getFile(name)
if isinstance(file, ByteArrayOutputStream):
file.writeTo(fp)
else:
if isinstance(file, type('')):
file = FileInputStream(file)
data = jarray.zeros(1024*4, 'b')
#print 'writing', file,
while 1:
n = file.read(data)
#print n,
if n == -1: break
fp.write(data, 0, n)
#print
def close(self):
pass
class ZipOutput(DirectoryOutput):
def __init__(self, filename):
self.zipfile = ZipOutputStream(FileOutputStream(filename))
def getName(self, name):
return string.join(string.split(name, '.'), '/')+'.class'
def getFile(self, name):
fname = self.getName(name)
self.zipfile.putNextEntry(ZipEntry(fname))
return self.zipfile
def close(self):
self.zipfile.close()
try:
sys.add_package('com.ms.util.cab')
from com.ms.util import cab
import com.ms.util.cab.CabCreator
from java.util import Date
class CabOutput(cab.CabProgressInterface):
def progress(self, ptype, val1, val2, data):
pass
#print 'cab progress made'
def __init__(self, filename):
self.cabfile = cab.CabCreator(self)
self.cabfile.create(FileOutputStream(filename))
folder = cab.CabFolderEntry()
folder.setCompression(cab.CabConstants.COMPRESSION_LZX, 20)
#print folder.compressionToString()
self.cabfile.newFolder(folder)
def getName(self, name):
return string.join(string.split(name, '.'), '\\')+'.class'
def write(self, name, file):
fname = self.getName(name)
entry = cab.CabFileEntry(name=fname, date=Date())
if isinstance(file, ByteArrayOutputStream):
file = ByteArrayInputStream(file.toByteArray())
elif isinstance(file, type('')):
file = FileInputStream(file)
self.cabfile.addStream(file, entry)
def close(self):
self.cabfile.complete()
except AttributeError:
pass
if __name__ == '__main__':
for of in [CabOutput('c:\\jpython\\test.cab')]: #DirectoryOutput('c:\\jpython\\dtest'), ZipOutput('c:\\jpython\\test.jar')]:
of.write('org.python.core.PyInteger', 'c:\\jpython\\JavaCode\\org\\python\\core\\PyInteger.class')
of.write('org.python.core.PyFloat', 'c:\\jpython\\JavaCode\\org\\python\\core\\PyFloat.class')
bytes = ByteArrayOutputStream()
bytes.write(jarray.array([10]*500, 'b'))
of.write('hi.there', bytes)
of.close()
|
carvalhomb/tsmells
|
guess/Tools/freeze/Output.py
|
Python
|
gpl-2.0
| 2,877
|
import datetime
from django.db import models
from modelcluster.fields import ParentalKey
from wagtail.admin.edit_handlers import FieldPanel, InlinePanel, PageChooserPanel, MultiFieldPanel, StreamFieldPanel
from wagtail.core.fields import StreamField
from wagtail.search import index
from opentech.public.utils.models import BasePage, RelatedPage
from opentech.public.funds.models import FundPage, LabPage, RFPPage
from .blocks import OurWorkBlock
class PromotedFunds(RelatedPage):
source_page = ParentalKey(
'home.HomePage',
related_name='promoted_funds'
)
class Meta(RelatedPage.Meta):
unique_together = ('page',)
panels = [
PageChooserPanel('page', 'public_funds.FundPage'),
]
class PromotedLabs(RelatedPage):
source_page = ParentalKey(
'home.HomePage',
related_name='promoted_labs'
)
class Meta(RelatedPage.Meta):
unique_together = ('page',)
panels = [
PageChooserPanel('page', 'public_funds.LabPage'),
]
class PromotedRFPs(RelatedPage):
source_page = ParentalKey(
'home.HomePage',
related_name='promoted_rfps'
)
class Meta(RelatedPage.Meta):
unique_together = ('page',)
panels = [
PageChooserPanel('page', 'public_funds.RFPPage'),
]
class HomePage(BasePage):
# Only allow creating HomePages at the root level
parent_page_types = ['wagtailcore.Page']
NUM_RELATED = 6
strapline = models.CharField(blank=True, max_length=255)
strapline_link = models.ForeignKey('wagtailcore.Page', related_name='+', on_delete=models.PROTECT)
strapline_link_text = models.CharField(max_length=255)
our_work_title = models.CharField(max_length=255)
our_work = StreamField([
('work', OurWorkBlock()),
])
our_work_link = models.ForeignKey('wagtailcore.Page', related_name='+', on_delete=models.PROTECT)
our_work_link_text = models.CharField(max_length=255)
funds_title = models.CharField(max_length=255)
funds_intro = models.TextField(blank=True)
funds_link = models.ForeignKey('wagtailcore.Page', related_name='+', on_delete=models.PROTECT)
funds_link_text = models.CharField(max_length=255)
labs_title = models.CharField(max_length=255)
labs_intro = models.TextField(blank=True)
labs_link = models.ForeignKey('wagtailcore.Page', related_name='+', on_delete=models.PROTECT)
labs_link_text = models.CharField(max_length=255)
rfps_title = models.CharField(max_length=255)
rfps_intro = models.TextField(blank=True)
search_fields = BasePage.search_fields + [
index.SearchField('strapline'),
]
content_panels = BasePage.content_panels + [
MultiFieldPanel([
FieldPanel('strapline'),
PageChooserPanel('strapline_link'),
FieldPanel('strapline_link_text'),
], heading='Introduction'),
MultiFieldPanel([
FieldPanel('our_work_title'),
StreamFieldPanel('our_work'),
PageChooserPanel('our_work_link'),
FieldPanel('our_work_link_text'),
], heading='Our Work'),
MultiFieldPanel([
FieldPanel('funds_title'),
FieldPanel('funds_intro'),
InlinePanel('promoted_funds', label='Promoted Funds', max_num=NUM_RELATED),
PageChooserPanel('funds_link'),
FieldPanel('funds_link_text'),
], heading='Funds'),
MultiFieldPanel([
FieldPanel('labs_title'),
FieldPanel('labs_intro'),
InlinePanel('promoted_labs', label='Promoted Labs', max_num=NUM_RELATED),
PageChooserPanel('labs_link'),
FieldPanel('labs_link_text'),
], heading='Labs'),
MultiFieldPanel([
FieldPanel('rfps_title'),
FieldPanel('rfps_intro'),
InlinePanel('promoted_rfps', label='Promoted RFPs', max_num=NUM_RELATED),
], heading='Labs'),
]
def get_related(self, page_type, base_list):
related = page_type.objects.filter(id__in=base_list.values_list('page')).live().public()
yield from related
selected = list(related.values_list('id', flat=True))
extra_needed = self.NUM_RELATED - len(selected)
extra_qs = page_type.objects.public().live().exclude(id__in=selected)[:extra_needed]
yield from self.sorted_by_deadline(extra_qs)
def sorted_by_deadline(self, qs):
def sort_by_deadline(value):
try:
return value.deadline or datetime.date.max
except AttributeError:
return datetime.date.max
yield from sorted(qs, key=sort_by_deadline)
def pages_from_related(self, related):
for related in related.all():
if related.page.live and related.page.public:
yield related.page.specific
def get_context(self, *args, **kwargs):
context = super().get_context(*args, **kwargs)
context['lab_list'] = list(self.get_related(LabPage, self.promoted_labs))
context['fund_list'] = list(self.get_related(FundPage, self.promoted_funds))
context['rfps_list'] = list(self.get_related(RFPPage, self.promoted_rfps))
return context
|
OpenTechFund/WebApp
|
opentech/public/home/models.py
|
Python
|
gpl-2.0
| 5,246
|
import mistune
from django.contrib.auth import get_user_model
from django_bleach.templatetags.bleach_tags import bleach_value
from rest_framework import serializers
from opentech.apply.activity.models import Activity
from opentech.apply.determinations.views import DeterminationCreateOrUpdateView
from opentech.apply.review.models import Review, ReviewOpinion
from opentech.apply.review.options import RECOMMENDATION_CHOICES
from .models import ApplicationSubmission, RoundsAndLabs
User = get_user_model()
markdown = mistune.Markdown()
class ActionSerializer(serializers.Field):
def to_representation(self, instance):
actions = instance.get_actions_for_user(self.context['request'].user)
representation = []
for transition, action in actions:
action_dict = {
'value': transition,
'display': action
}
# Sometimes the status does not exist in the
# determination matrix.
try:
redirect = DeterminationCreateOrUpdateView.should_redirect(
None,
instance,
transition,
)
except KeyError:
redirect = None
if redirect:
action_dict['type'] = 'redirect'
action_dict['target'] = redirect.url
else:
action_dict['type'] = 'submit'
representation.append(action_dict)
return representation
class OpinionSerializer(serializers.ModelSerializer):
author_id = serializers.ReadOnlyField(source='author.id')
opinion = serializers.ReadOnlyField(source='get_opinion_display')
class Meta:
model = ReviewOpinion
fields = ('author_id', 'opinion')
class ReviewSerializer(serializers.ModelSerializer):
author_id = serializers.ReadOnlyField(source='author.id')
url = serializers.ReadOnlyField(source='get_absolute_url')
opinions = OpinionSerializer(read_only=True, many=True)
recommendation = serializers.SerializerMethodField()
class Meta:
model = Review
fields = ('id', 'score', 'author_id', 'url', 'opinions', 'recommendation')
def get_recommendation(self, obj):
return {
'value': obj.recommendation,
'display': obj.get_recommendation_display(),
}
class ReviewSummarySerializer(serializers.Serializer):
reviews = ReviewSerializer(many=True, read_only=True)
count = serializers.ReadOnlyField(source='reviews.count')
score = serializers.ReadOnlyField(source='reviews.score')
assigned = ReviewSerializer(many=True, read_only=True)
recommendation = serializers.SerializerMethodField()
assigned = serializers.SerializerMethodField()
def get_recommendation(self, obj):
recommendation = obj.reviews.recommendation()
return {
'value': recommendation,
'display': dict(RECOMMENDATION_CHOICES).get(recommendation),
}
def get_assigned(self, obj):
assigned_reviewers = obj.assigned.select_related('reviewer', 'role')
response = [
{
'id': assigned.reviewer.id,
'name': str(assigned.reviewer),
'role': {
'icon': assigned.role and assigned.role.icon_url('fill-12x12'),
'name': assigned.role and assigned.role.name,
'order': assigned.role and assigned.role.order,
},
'is_staff': assigned.reviewer.is_apply_staff,
'is_partner': assigned.reviewer.is_partner,
} for assigned in assigned_reviewers
]
opinionated_reviewers = ReviewOpinion.objects.filter(review__submission=obj).values('author').distinct()
extra_reviewers = opinionated_reviewers.exclude(author__in=assigned_reviewers.values('reviewer'))
response.extend([
{
'id': user.id,
'name': str(user),
'role': {
'icon': None,
'name': None,
'order': None,
},
'is_staff': user.is_apply_staff,
'is_partner': user.is_partner,
} for user in User.objects.filter(id__in=extra_reviewers)
])
return response
class SubmissionListSerializer(serializers.ModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='funds:api:submissions:detail')
round = serializers.SerializerMethodField()
class Meta:
model = ApplicationSubmission
fields = ('id', 'title', 'status', 'url', 'round')
def get_round(self, obj):
"""
This gets round or lab ID.
"""
return obj.round_id or obj.page_id
class SubmissionDetailSerializer(serializers.ModelSerializer):
questions = serializers.SerializerMethodField()
meta_questions = serializers.SerializerMethodField()
stage = serializers.CharField(source='stage.name')
actions = ActionSerializer(source='*')
review = ReviewSummarySerializer(source='*')
phase = serializers.CharField()
screening = serializers.ReadOnlyField(source='screening_status.title')
class Meta:
model = ApplicationSubmission
fields = ('id', 'title', 'stage', 'status', 'phase', 'meta_questions', 'questions', 'actions', 'review', 'screening')
def serialize_questions(self, obj, fields):
for field_id in fields:
yield obj.serialize(field_id)
def get_meta_questions(self, obj):
meta_questions = {
'title': 'Project Name',
'full_name': 'Legal Name',
'email': 'Email',
'value': 'Requested Funding',
'duration': 'Project Duration',
'address': 'Address'
}
data = self.serialize_questions(obj, obj.named_blocks.values())
data = [
{
**response,
'question': meta_questions.get(response['type'], response['question'])
}
for response in data
]
return data
def get_questions(self, obj):
return self.serialize_questions(obj, obj.normal_blocks)
class SubmissionActionSerializer(serializers.ModelSerializer):
actions = ActionSerializer(source='*', read_only=True)
class Meta:
model = ApplicationSubmission
fields = ('id', 'actions')
class RoundLabDetailSerializer(serializers.ModelSerializer):
workflow = serializers.SerializerMethodField()
class Meta:
model = RoundsAndLabs
fields = ('id', 'title', 'workflow')
def get_workflow(self, obj):
return [
{
'value': phase.name,
'display': phase.display_name
}
for phase in obj.workflow.values()
]
class RoundLabSerializer(serializers.ModelSerializer):
class Meta:
model = RoundsAndLabs
fields = ('id', 'title')
class CommentSerializer(serializers.ModelSerializer):
user = serializers.StringRelatedField()
message = serializers.SerializerMethodField()
class Meta:
model = Activity
fields = ('id', 'timestamp', 'user', 'submission', 'message', 'visibility')
def get_message(self, obj):
return bleach_value(markdown(obj.message))
class CommentCreateSerializer(serializers.ModelSerializer):
user = serializers.StringRelatedField()
class Meta:
model = Activity
fields = ('id', 'timestamp', 'user', 'message', 'visibility')
|
OpenTechFund/WebApp
|
opentech/apply/funds/serializers.py
|
Python
|
gpl-2.0
| 7,604
|
# -*- coding: utf-8 -*-
# Placed into the Public Domain by tav <tav@espians.com>
# origin: https://raw.github.com/tav/scripts/master/validate_jsonp.py
"""Validate Javascript Identifiers for use as JSON-P callback parameters."""
from builtins import str
from builtins import chr
import re
from unicodedata import category
# ------------------------------------------------------------------------------
# javascript identifier unicode categories and "exceptional" chars
# ------------------------------------------------------------------------------
valid_jsid_categories_start = frozenset(["Lu", "Ll", "Lt", "Lm", "Lo", "Nl"])
valid_jsid_categories = frozenset(["Lu", "Ll", "Lt", "Lm", "Lo", "Nl", "Mn", "Mc", "Nd", "Pc"])
valid_jsid_chars = ("$", "_")
# ------------------------------------------------------------------------------
# regex to find array[index] patterns
# ------------------------------------------------------------------------------
array_index_regex = re.compile(r"\[[0-9]+\]$")
has_valid_array_index = array_index_regex.search
replace_array_index = array_index_regex.sub
# ------------------------------------------------------------------------------
# javascript reserved words -- including keywords and null/boolean literals
# ------------------------------------------------------------------------------
is_reserved_js_word = frozenset(
[
"abstract",
"boolean",
"break",
"byte",
"case",
"catch",
"char",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"double",
"else",
"enum",
"export",
"extends",
"false",
"final",
"finally",
"float",
"for",
"function",
"goto",
"if",
"implements",
"import",
"in",
"instanceof",
"int",
"interface",
"long",
"native",
"new",
"null",
"package",
"private",
"protected",
"public",
"return",
"short",
"static",
"super",
"switch",
"synchronized",
"this",
"throw",
"throws",
"transient",
"true",
"try",
"typeof",
"var",
"void",
"volatile",
"while",
"with",
# potentially reserved in a future version of the ES5 standard
# 'let', 'yield'
]
).__contains__
# ------------------------------------------------------------------------------
# the core validation functions
# ------------------------------------------------------------------------------
def is_valid_javascript_identifier(identifier, escape=r"\u", ucd_cat=category):
"""Return whether the given ``id`` is a valid Javascript identifier."""
if not identifier:
return False
if not isinstance(identifier, str):
try:
identifier = str(identifier, "utf-8")
except UnicodeDecodeError:
return False
if escape in identifier:
new = []
add_char = new.append
split_id = identifier.split(escape)
add_char(split_id.pop(0))
for segment in split_id:
if len(segment) < 4:
return False
try:
add_char(chr(int("0x" + segment[:4], 16)))
except Exception:
return False
add_char(segment[4:])
identifier = "".join(new)
if is_reserved_js_word(identifier):
return False
first_char = identifier[0]
if not (
(first_char in valid_jsid_chars) or (ucd_cat(first_char) in valid_jsid_categories_start)
):
return False
for char in identifier[1:]:
if not ((char in valid_jsid_chars) or (ucd_cat(char) in valid_jsid_categories)):
return False
return True
def is_valid_jsonp_callback_value(value):
"""Return whether the given ``value`` can be used as a JSON-P callback."""
for identifier in value.split("."):
while "[" in identifier:
if not has_valid_array_index(identifier):
return False
identifier = replace_array_index("", identifier)
if not is_valid_javascript_identifier(identifier):
return False
return True
# ------------------------------------------------------------------------------
# test
# ------------------------------------------------------------------------------
def test():
r"""
The function ``is_valid_javascript_identifier`` validates a given identifier
according to the latest draft of the ECMAScript 5 Specification:
>>> is_valid_javascript_identifier('hello')
True
>>> is_valid_javascript_identifier('alert()')
False
>>> is_valid_javascript_identifier('a-b')
False
>>> is_valid_javascript_identifier('23foo')
False
>>> is_valid_javascript_identifier('foo23')
True
>>> is_valid_javascript_identifier('$210')
True
>>> is_valid_javascript_identifier(u'Stra\u00dfe')
True
>>> is_valid_javascript_identifier(r'\u0062') # u'b'
True
>>> is_valid_javascript_identifier(r'\u62')
False
>>> is_valid_javascript_identifier(r'\u0020')
False
>>> is_valid_javascript_identifier('_bar')
True
>>> is_valid_javascript_identifier('some_var')
True
>>> is_valid_javascript_identifier('$')
True
But ``is_valid_jsonp_callback_value`` is the function you want to use for
validating JSON-P callback parameter values:
>>> is_valid_jsonp_callback_value('somevar')
True
>>> is_valid_jsonp_callback_value('function')
False
>>> is_valid_jsonp_callback_value(' somevar')
False
It supports the possibility of '.' being present in the callback name, e.g.
>>> is_valid_jsonp_callback_value('$.ajaxHandler')
True
>>> is_valid_jsonp_callback_value('$.23')
False
As well as the pattern of providing an array index lookup, e.g.
>>> is_valid_jsonp_callback_value('array_of_functions[42]')
True
>>> is_valid_jsonp_callback_value('array_of_functions[42][1]')
True
>>> is_valid_jsonp_callback_value('$.ajaxHandler[42][1].foo')
True
>>> is_valid_jsonp_callback_value('array_of_functions[42]foo[1]')
False
>>> is_valid_jsonp_callback_value('array_of_functions[]')
False
>>> is_valid_jsonp_callback_value('array_of_functions["key"]')
False
Enjoy!
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Kegbot/kegbot-server
|
pykeg/web/api/validate_jsonp.py
|
Python
|
gpl-2.0
| 6,743
|
# -*- coding: utf-8 -*-
'''libnano.padlock
Generation + filtering of padlock probes / MIPs from a target region sequence
Padlock structure reminder, left and right are in terms of the hybridized sequence
LINEAR VERSION:
5' Right Arm Scaffold Seq (aka Loop) Left Arm 3'
+------------------>+-----------~-----------++------------------>
HYBRIDIZED VERSION
Scaffold Seq (aka Loop)
-------------------~--------------------
| |
< Left Arm 3' 5' Right Arm +
3' +------------------>+------------------> 5'
<----------------------------------------------------+
copied RT'd cDNA reverse strand
'''
import logging
import random
import io
from typing import (
List,
Tuple,
Dict,
NamedTuple
)
from pprint import pprint
import os.path
import yaml
from primer3 import (
calcTm,
calcHeterodimerTm
)
from libnano.seqstr import reverseComplement as rc
from libnano.metric.seqmetric import gcContent
from libnano import DATASET_DIR
pjoin = os.path.join
with io.open(pjoin(DATASET_DIR, 'padlock.yaml'), 'r') as fd:
PADLOCK_SEQS: dict = yaml.load(fd)
T2S_SEQ: str = PADLOCK_SEQS['T2S_SEQ']
SCAFFOLD_SEQ_SOLID: str = ( PADLOCK_SEQS['SCAFFOLD_SEQ_SOLID'][0] +
T2S_SEQ +
PADLOCK_SEQS['SCAFFOLD_SEQ_SOLID'][1] )
SCAFFOLD_SEQ_ILUMINA: str = ( PADLOCK_SEQS['SCAFFOLD_SEQ_ILUMINA'][0] +
T2S_SEQ +
PADLOCK_SEQS['SCAFFOLD_SEQ_ILUMINA'][1] )
ILLLUMINA_SEQ: str = PADLOCK_SEQS['ILLLUMINA_SEQ']
SCAFFOLD_SEQ_HYBRID: str = (PADLOCK_SEQS['SCAFFOLD_SEQ_HYBRID'][0] +
ILLLUMINA_SEQ +
PADLOCK_SEQS['SCAFFOLD_SEQ_HYBRID'][1] +
T2S_SEQ +
PADLOCK_SEQS['SCAFFOLD_SEQ_HYBRID'][2] )
PadHit = NamedTuple("PadHit", [ ('name0', str), # identifier string
('name1', str), # identifier string
('strand_dir', str), # fwd or rev
('genome_idx', int), # the starting genomic index of this base
('idx', int), # the index into this sequence string
('padlock_seq', str),
('barcode', str),
('seq_r', str),
('scaffold', str),
('seq_l', str)])
DEFAULT_PADLOCK_CONFIG = lambda: {
'species': 'human', # target species
'padding': 25, # NOTE UNUSED: Padding from 3' end of gene (nt)
'spacing': 20, # Spacing between probe starts (nt)
'arm_length': 20, # Padlock / MIP arm length (nt)
'gap_size': 0, # MIP gap size
'arm_gc_min': 0.4, # Minimum arm GC content
'arm_gc_max': 0.6, # Maximum arm GC content
'scaffold': SCAFFOLD_SEQ_HYBRID, # Adapter sequence (for struct checks)
'exclude_seqs': ['GGGGG'], # Subsequences to avoid
'structure_tm_max': 30, # Max melting temp of secondary struct
'keep_random_n': None, # If not None, keep only `n` probes
'thermo_params': { # Primer3 thermo params
'mv_conc': 50,
'dv_conc': 0,
'dntp_conc': 0.8,
'dna_conc': 50
},
'arm_tm_min': 50
}
P_PARAMS: dict = DEFAULT_PADLOCK_CONFIG()
def createScaffold(barcode: str, scaf_type: str ='solid') -> str:
if scaf_type == 'solid':
scaffold = SCAFFOLD_SEQ_SOLID
elif scaf_type == 'illumina':
scaffold = SCAFFOLD_SEQ_ILUMINA
elif scaf_type == 'hybrid':
scaffold = SCAFFOLD_SEQ_HYBRID
else:
raise ValueError("Unknown scaf_type, %s" % (scaf_type))
return scaffold.format(barcode=barcode,
armr='',
t2s5p='',
t2s3p='',
il5p='',
il3p='',
arml=''
)
# end def
def screenPadlockArms( p_l_seq: str,
p_r_seq: str,
loop_seq: str,
p_params: dict,
do_print: bool = False) -> Tuple[bool, dict]:
is_good = True
tp = p_params['thermo_params']
report = {
'arm_gc_min_l': 0,
'arm_gc_max_l': 0,
'arm_gc_min_r': 0,
'arm_gc_max_r': 0,
'l_clamp': True,
'tm_arm_min_l': 0,
'tm_arm_min_r': 0,
'ex_seq': [],
'tm_hairpin_l': 0,
'tm_hairpin_r': 0,
'tm_hetero_0': 0,
'tm_hetero_1': 0,
'tm_hetero_2': 0
}
"1. GC content checks"
p_l_gc_content = gcContent(p_l_seq)
p_r_gc_content = gcContent(p_r_seq)
if p_l_gc_content < p_params['arm_gc_min']:
if do_print:
print("\tgc content L min fail %0.3f" % p_l_gc_content)
is_good = False
report['arm_gc_min_l'] = p_l_gc_content
if p_r_gc_content < p_params['arm_gc_min']:
if do_print:
print("\tgc content R min fail %0.3f" % p_r_gc_content)
is_good = False
report['arm_gc_min_r'] = p_r_gc_content
if p_l_gc_content > p_params['arm_gc_max']:
if do_print:
print("\tgc content L max fail %0.3f" % p_l_gc_content)
is_good = False
report['arm_gc_max_l'] = p_l_gc_content
if p_r_gc_content > p_params['arm_gc_max']:
if do_print:
print("\tgc content R max fail %0.3f" % p_r_gc_content)
is_good = False
report['arm_gc_max_r'] = p_r_gc_content
"2. GC clamp checks"
l_3p_check = padlockLeftArmGCClamp(p_l_seq)
if l_3p_check > 3:
if do_print:
print("\tl clamp fail")
is_good = False
report['l_clamp'] = False
"3. Arm Tm check"
p_arm_tm_l = calcTm(p_l_seq, **tp)
p_arm_tm_r = calcTm(p_r_seq, **tp)
if p_arm_tm_l < p_params['arm_tm_min']:
if do_print:
print("\tArm L fail %2.3f" % p_arm_tm_l)
is_good = False
report['tm_arm_min_l'] = p_arm_tm_l
if p_arm_tm_r < p_params['arm_tm_min']:
if do_print:
print("\tArm R fail %2.3f" % p_arm_tm_r)
is_good = False
report['tm_arm_min_r'] = p_arm_tm_r
p_seq = (
p_r_seq + loop_seq + p_l_seq
)
"4. Check for excluded seqs"
ex_fail = False
for ex_seq in p_params['exclude_seqs']:
if ex_seq in p_seq:
ex_fail = True
report['ex_seq'].append(ex_seq)
break
if ex_fail:
is_good = False
"5. Secondary structure / primer dimer checks"
p_het_tm_0 = calcHeterodimerTm(p_l_seq, p_r_seq, **tp)
p_het_tm_1 = calcHeterodimerTm(p_l_seq, loop_seq, **tp)
p_het_tm_2 = calcHeterodimerTm(p_r_seq, loop_seq, **tp)
if p_het_tm_0 > p_params['structure_tm_max']:
if do_print:
print("\thetero 0 fail")
is_good = False
report['tm_hetero_0'] = p_het_tm_0
if p_het_tm_1 > p_params['structure_tm_max']:
if do_print:
print("\thetero 1 fail")
is_good = False
report['tm_hetero_1'] = p_het_tm_1
if p_het_tm_2 > p_params['structure_tm_max']:
if do_print:
print("\thetero 2 fail")
is_good = False
report['tm_hetero_2'] = p_het_tm_2
return is_good, report
# end def
def splitHitList( items: List[Tuple[int, dict]],
arm_length,
spacing) -> List[List[Tuple[int, dict]]]:
'''Split hits into groups by a spacing and an arm_length
'''
# split into groups by spacing
if len(items) > 0:
delta: int = items[0][0] + 2*arm_length + spacing
group: List[Tuple[int, dict]] = []
hit_lists: List[List[Tuple[int, dict]]] = [group]
for i, report in items:
if i > delta:
group = []
hit_lists.append(group)
# increment delta for next group
delta = i + 2*arm_length + spacing
group.append((i, report))
return hit_lists
else:
return []
# end def
def sortHitList(items: List[Tuple[int, dict]]) -> List[Tuple[int, dict]]:
'''Sort max sum of arm Tms
'''
max_tm_f = lambda x: x[1]['tm_arm_min_l'] + 0.9*x[1]['tm_arm_min_r']
return sorted(items, key=max_tm_f, reverse=True)
# end def
def writePadlocksToCSV(padlock_results: Dict[str, List[PadHit]], filename: str):
'''Write padlocks to to a CSV file
'''
tp = P_PARAMS['thermo_params']
with io.open(filename, 'w') as fd:
fd.write( 'gene_name, name0, name1, strand_dir, genome_idx, index, '
'gap_size, sequence, barcode, right_arm, scaffold, '
'left_arm, right_tm, left_tm\n')
temp = '%s, %s, %s, %s, %d, %d, %d, %s, %s, %s, %s, %s, %2.3f, %2.3f\n'
for gene, seq_list in padlock_results.items():
for seq_tuple in seq_list:
seq_r, seq_l = seq_tuple.seq_r, seq_tuple.seq_l
tm_tuple = (calcTm(seq_r, **tp), calcTm(seq_l, **tp))
fd.write(temp % ((gene,) + seq_tuple + tm_tuple) )
print('Wrote padlocks to %s' % filename)
# end def
def generatePadlocks(seq: str,
name0: str,
name1: str,
strand_dir: str,
barcodes: List[str],
genome_idx: int = -1,
arm_length: int = 20,
params: dict = None,
do_print: bool = False
) -> List[PadHit]:
'''Screen poly G's in the padlock to avoid warnings from IDT synthesis
Args:
seq: the length of a sequence
name0: name to identify the padlocks by. For example, you could use
the Ensembl Transcript ID
name1: name to identify the padlocks by. For example, you could use
the Ensembl Exon ID
barcodes: list of one or more barcodes to try
genome_idx: start index of a sequence in the genome
arm_length: the length of a padlock arm
params: default is P_PARAMS. parameters for padlock screening. Add in
things like ``gap_size`` here
do_print: debug the design printing
'''
if params is None:
params = P_PARAMS
else:
default_p = DEFAULT_PADLOCK_CONFIG()
default_p.update(params)
params = default_p
arm_length2: int = 2*arm_length
gap_size: int = params['gap_size']
spacing: int = params['spacing']
scaffold: str = None
if len(barcodes) == 0 or not isinstance(barcodes, (tuple, list)):
raise ValueError("barcodes length must be non-zero")
for barcode in barcodes:
candidate_scaffold: str = createScaffold(barcode, scaf_type='hybrid')
if 'GGGG' not in candidate_scaffold:
scaffold = candidate_scaffold
if scaffold is None:
raise ValueError('polyG in scaffold for all barcodes')
items = []
search_range = range(len(seq) - arm_length2)
for i in search_range:
if 'GGGG' not in seq[i:i + arm_length2 + gap_size]: # including the gap_size
l_primer = seq[i:i + arm_length]
r_primer = seq[i + arm_length + gap_size:i + arm_length2 + gap_size]
is_good, report = screenPadlockArms( l_primer,
r_primer,
scaffold,
params)
if is_good:
'''add the start index of the padlock and the report dictionary
to the items list'''
items.append((i, report))
# elif do_print:
# print("FAILURE")
# pprint(report)
# end for
# print(items)
hit_lists = splitHitList(items, arm_length=arm_length, spacing=spacing)
hit_lists = [sortHitList(x) for x in hit_lists]
if do_print:
print('The number of hits', len(hit_lists))
# pick the first element in each group
sampled_list: List = [x[0] for x in hit_lists]
if do_print:
print('$HIT_COUNT:', [len(x) for x in hit_lists])
sequences_list: List[PadHit] = []
for i, b in sampled_list:
seq_l = seq[i:i + arm_length]
seq_r = seq[i + arm_length + gap_size:i + arm_length2 + gap_size]
if do_print:
print("%d,\t %2.3f, %2.3f" % (i, b['tm_arm_min_l'], b['tm_arm_min_r']))
print("%s, %s" % (seq_l, seq_r))
print("%s" % (seq[i:i + arm_length2]))
sequences_list.append(PadHit(
name0,
name1,
strand_dir,
genome_idx,
i,
gap_size,
seq_r + scaffold + seq_l,
barcode,
seq_r,
scaffold,
seq_l))
# end for
return sequences_list
# end def
def padlockRightArmGCClamp(p: str) -> int:
r_3p = p[-5:]
r_3p_check = r_3p.count('G') + r_3p.count('C')
return r_3p_check
# end def
def padlockLeftArmGCClamp(p: str) -> int:
l_3p = p[0:5]
l_3p_check = l_3p.count('G') + l_3p.count('C')
return l_3p_check
# end def
|
libnano/libnano
|
libnano/padlock.py
|
Python
|
gpl-2.0
| 13,787
|
#!/usr/bin/env python
"""
Seeding from GeoJSON string
===========================
"""
from datetime import datetime, timedelta
from opendrift.models.leeway import Leeway
from opendrift.models.openoil import OpenOil
#%%
# Polygon
#--------
o = OpenOil(loglevel=50)
o.seed_from_geojson("""{
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": [
[
[4.0, 60.0], [4.5, 60.0], [4.7, 60.1],
[4.2, 60.1], [4.0, 60.0]
]
]
},
"properties": {
"time": "2020-11-06T12:30:00Z",
"number": 1000,
"oil_type": "DVALIN 2020",
"m3_per_hour": 50
}
}""")
o.plot(fast=True)
#%%
# Point release at seafloor
#--------------------------
o = OpenOil(loglevel=50)
o.set_config('environment:constant:sea_floor_depth_below_sea_level', 200)
o.seed_from_geojson("""{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [ 4.0, 60.0 ]
},
"properties": {
"time": ["2020-11-06T12:30:00Z", "2020-11-06T18:30:00Z"],
"number": 3000,
"z": "seafloor"
}
}""")
for var in ['x_wind', 'y_wind', 'x_sea_water_velocity', 'y_sea_water_velocity']:
o.set_config('environment:constant:' + var, 0)
o.run(duration=timedelta(hours=6), time_step=300)
o.animation_profile()
#%%
# .. image:: /gallery/animations/example_seed_geojson_0.gif
#%%
# Cone
#-----
# from (position1, radius1, time1) to (position2, radius2, time2)
o = Leeway(loglevel=50)
o.seed_from_geojson("""{
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[4.0, 60.0], [4.5, 60.1]
]
},
"properties": {
"time": ["2020-11-06T12:30:00Z", "2020-11-06T18:30:00Z"],
"radius": [0, 2000],
"number": 3000
}
}""")
for var in ['x_wind', 'y_wind', 'x_sea_water_velocity', 'y_sea_water_velocity']:
o.set_config('environment:constant:' + var, 0)
o.run(duration=timedelta(hours=6))
o.animation(fast=True)
#%%
# .. image:: /gallery/animations/example_seed_geojson_1.gif
|
OpenDrift/opendrift
|
examples/example_seed_geojson.py
|
Python
|
gpl-2.0
| 2,137
|
import requests
from requests.auth import HTTPDigestAuth, HTTPBasicAuth
import tempfile
from email.utils import formatdate
from artemis.Task import Task, AuthNature, TaskNature
import logging
import pycurl #until requests support sock5, no accreditation handling, http://tech.michaelaltfield.net/2015/02/22/pycurl-through-tor-without-leaking-dns-lookups/
class HTTPDefaultHandler:
def __init__(self, useragent, contentTypesHeader, accreditationCache,
proxies=None, tor_socket_port=7000):
self.contentTypesHeader = contentTypesHeader
self.accreditationCache = accreditationCache
self.useragent = useragent
self.tor_socket_port = tor_socket_port
self.s = requests.Session()
self.newTasks = [] #redirection ones
if proxies :
self.s.proxies = proxies
def buildHeaders(self, task):
headers = {"User-Agent":self.useragent,
"Accept":self.contentTypesHeader}
if task.lastvisited != -1:
headers["If-Modified-Since"]= formatdate(
timeval = task.lastvisited,
localtime = False,
usegmt = True)
return headers
def checkHeaders(self,task, status_code, headers):
if task.lastvisited != -1:
if( (status_code == 301 or status_code ==302
or status_code ==307 or status_code ==308)
and "Location" in headers): #redirection
task.incr()
self.newTasks.append( Task(headers["Location"] ) )
return False
elif status_code == 304 : #Content unchange
task.incr()
return False
elif status_code > 400 : #ie 4xx or 5xx error
task.incr()
logging.debug( status_code, task.url )
return False
return True
def checkHeader(self, task):
r = self.s.head(task.url, headers=self.buildHeaders(task) )
return self.checkHeaders(task, r.status_code, r.headers)
def header_function_pycurl(self, header_line):
header_line = header_line.decode('iso-8859-1')
if ':' not in header_line:
return
name, value = header_line.split(':', 1)
name = name.strip().lower()
value = value.strip()
self.tor_headers[name] = value
def tor_pycurl(self, task):
self.tor_headers = {} #response headers
tmpFile = tempfile.SpooledTemporaryFile(max_size=1048576) #1Mo
query = pycurl.Curl()
query.setopt(pycurl.URL, task.url)
query.setopt(pycurl.PROXY, '127.0.0.1')
query.setopt(pycurl.PROXYPORT, self.tor_socket_port)
query.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME)
query.setopt(pycurl.HTTPHEADER, [
''.join( [str(k), ":", str(v)])
for k,v in self.buildHeaders(task).items()] )
query.setopt(pycurl.WRITEFUNCTION, tmpFile.write)
query.setopt(pycurl.HEADERFUNCTION, self.header_function_pycurl)
query.perform()
status_code = query.getinfo(pycurl.HTTP_CODE)
query.close()
headers=self.tor_headers
if self.checkHeaders(task, status_code, headers) :
return headers["content-type"], tmpFile, self.newTasks
return None, None, self.newTasks
def getAccreditation(self, task):
if task.auth == AuthNature.no :
return None
elif task.auth == AuthNature.form :
self.s.cookies = self.accreditationCache.get(task.auth, task)
return None
elif task.auth == AuthNature.http_basic :
user = self.accreditationCache.get(task.auth, task )
return HTTPBasicAuth(user.login, user.password)
elif task.auth == AuthNature.htto_digest :
user = self.accreditationCache.get(task.auth, task )
return HTTPDigestAuth(user.login, user.password)
def execute(self, task):
if task.nature == TaskNature.web_static_tor:
return self.tor_pycurl( task)
with self.s :
if self.checkHeader(task):
headers = {"User-Agent":self.useragent,
"Accept":self.contentTypesHeader}
r = self.s.get(task.url, headers=headers,
auth=self.getAccreditation(task), stream=True )
if r.status_code != 200 : #ie 4xx or 5xx error
task.incr()
logging.debug( ' '.join( [
task.url, str(r.status_code) ]) )
return None, None, []
tmpFile = tempfile.SpooledTemporaryFile(max_size=1048576) #1Mo
for block in r.iter_content(4096):
tmpFile.write(block)
return r.headers["Content-Type"], tmpFile, self.newTasks
return None, None, self.newTasks
|
athena-project/Artemis
|
src/handlers/HTTPDefaultHandler.py
|
Python
|
gpl-2.0
| 4,167
|
# vim:set et sts=4 sw=4:
#
# ibus - The Input Bus
#
# Copyright (c) 2007-2008 Huang Peng <shawn.p.huang@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
SHIFT_MASK = 1 << 0
LOCK_MASK = 1 << 1
CONTROL_MASK = 1 << 2
ALT_MASK = 1 << 3
MOD1_MASK = 1 << 3
MOD2_MASK = 1 << 4
MOD3_MASK = 1 << 5
MOD4_MASK = 1 << 6
MOD5_MASK = 1 << 7
#
# lotem 2010-12-03
# modified in Weasel to fit the mask into a UINT16
#
HANDLED_MASK = 1 << 8
IGNORED_MASK = 1 << 9
FORWARD_MASK = 1 << 9
SUPER_MASK = 1 << 10
HYPER_MASK = 1 << 11
META_MASK = 1 << 12
RELEASE_MASK = 1 << 14
MODIFIER_MASK = 0x2fff
MODIFIER_NAME_TABLE = (
("Shift", SHIFT_MASK),
("CapsLock", LOCK_MASK),
("Ctrl", CONTROL_MASK),
("Alt", MOD1_MASK),
("SUPER", SUPER_MASK),
("Hyper", HYPER_MASK),
("Meta", META_MASK),
("Release", RELEASE_MASK),
)
|
lotem/rime.py
|
weasel/ibus/modifier.py
|
Python
|
gpl-3.0
| 1,512
|
# -*- coding: utf-8 -*-
"""
ORCA Open Remote Control Application
Copyright (C) 2013-2020 Carsten Thielepape
Please contact me by : http://www.orca-remote.org/
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from typing import List
from typing import Dict
from typing import Any
from typing import Union
import os
import json
from copy import deepcopy
from kivy.logger import Logger
from ORCA.vars.Replace import ReplaceVars
import ORCA.vars.Globals
__all__ = ['Round',
'GetVarList',
'UnSplit',
'GetEnvVar',
'CopyDict',
'Find_nth_Character']
def Round(fValue:Union[float,None],iPos:int) -> Union[float,int]:
"""
Rounds a float value to the given pos. If the value is none, zero (0) will be returned.
Example: Round(1.81,0) return 2. Round(1.81,1) returns 1.80
:param float fValue: The float value to round
:param int iPos: the round pos
:return: Rounded Value
"""
if (fValue is None) or (fValue==0.0):
return 0.0
fRet:float=fValue/abs(fValue) * int(abs(fValue) * 10**iPos + 0.5)/ 10**iPos
if iPos==0:
fRet = int(fRet)
return fRet
def GetVarList(uFilter:str=u'') -> Dict[str,Any]:
"""
Returns a dict of uservars
:param str uFilter: A filter to just return vars, which CONTAINS the filter string
:return: A dict which contains all user vars, which fits to uFilter or all, if uFilter is empty
"""
dRet:Dict[str,Any] = {}
if uFilter==u'':
for uVarIdx in ORCA.vars.Globals.dUserVars:
dRet[uVarIdx]=ORCA.vars.Globals.dUserVars[uVarIdx]
else:
for uVarIdx in sorted(ORCA.vars.Globals.dUserVars):
if uFilter in uVarIdx:
dRet[uVarIdx]=ORCA.vars.Globals.dUserVars[uVarIdx]
return dRet
def UnSplit(aVars:List) ->None:
#Unsplits a Json splitted array
i=0
while i < len(aVars):
if not aVars[i]=='':
if aVars[i][0:1]=='[':
if not i==len(aVars)-1:
if aVars[i+1][-1]==']':
aVars[i]+=','
aVars[i]+=aVars[i+1]
del aVars[i+1]
i+=1
def GetEnvVar(uVarName:str,uDefault:str="") -> str:
"""
Returns the value of an environment var
:param str uVarName: Name of the Environment variable to return
:param str uDefault: Optional: The default value, if no environment is given
:return: The value of the variable or an empty string if not found
"""
uRet:str=os.getenv(uVarName)
if uRet is None:
uRet=uDefault
else:
uRet= ReplaceVars(uRet)
return uRet
def Find_nth_Character(uStr1:str, uSubstr:str, iLevel:int) -> Union[int,None]:
"""
Find the nTh position of a string with in string.
Find_nth_Character(u"Test Test Test","Test",1) returns 0
Find_nth_Character(u"Test Test Test","Test",2) returns 5
Find_nth_Character(u"Test Test Test","Test",3) returns 10
Find_nth_Character(u"Test Test Test","Test",6) returns None
Find_nth_Character(u"Test Test Test","Foo",3) returns None
:param str uStr1: The string to search in
:param str uSubstr: The string to search for
:param int iLevel: The occurance of the substring (starting with 1)
:return: Position of the nTh occurance of the substring in string, or None if not found
"""
iPos:int = -1
for x in range(iLevel):
iPos = uStr1.find(uSubstr, iPos + 1)
if iPos == -1:
return None
return iPos
def CopyDict(dSrc:Dict) -> Dict:
try:
return json.loads(json.dumps(dSrc))
except Exception:
Logger.warning("Can't copy dict the preferred way:"+str(dSrc))
return deepcopy(dSrc)
|
thica/ORCA-Remote
|
src/ORCA/vars/Helpers.py
|
Python
|
gpl-3.0
| 4,527
|